1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::RepoPath,
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use http_client::Url;
22use itertools::Itertools;
23use language::{
24 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
25 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
26 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainLister,
27 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
28 tree_sitter_rust, tree_sitter_typescript,
29};
30use lsp::{
31 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
32 WillRenameFiles, notification::DidRenameFiles,
33};
34use parking_lot::Mutex;
35use paths::{config_dir, tasks_file};
36use postage::stream::Stream as _;
37use pretty_assertions::{assert_eq, assert_matches};
38use rand::{Rng as _, rngs::StdRng};
39use serde_json::json;
40#[cfg(not(windows))]
41use std::os;
42use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
43use task::{ResolvedTask, TaskContext};
44use unindent::Unindent as _;
45use util::{
46 TryFutureExt as _, assert_set_eq, maybe, path,
47 paths::PathMatcher,
48 test::{TempTree, marked_text_offsets},
49 uri,
50};
51use worktree::WorktreeModelHandle as _;
52
53#[gpui::test]
54async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
55 cx.executor().allow_parking();
56
57 let (tx, mut rx) = futures::channel::mpsc::unbounded();
58 let _thread = std::thread::spawn(move || {
59 #[cfg(not(target_os = "windows"))]
60 std::fs::metadata("/tmp").unwrap();
61 #[cfg(target_os = "windows")]
62 std::fs::metadata("C:/Windows").unwrap();
63 std::thread::sleep(Duration::from_millis(1000));
64 tx.unbounded_send(1).unwrap();
65 });
66 rx.next().await.unwrap();
67}
68
69#[gpui::test]
70async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
71 cx.executor().allow_parking();
72
73 let io_task = smol::unblock(move || {
74 println!("sleeping on thread {:?}", std::thread::current().id());
75 std::thread::sleep(Duration::from_millis(10));
76 1
77 });
78
79 let task = cx.foreground_executor().spawn(async move {
80 io_task.await;
81 });
82
83 task.await;
84}
85
86#[cfg(not(windows))]
87#[gpui::test]
88async fn test_symlinks(cx: &mut gpui::TestAppContext) {
89 init_test(cx);
90 cx.executor().allow_parking();
91
92 let dir = TempTree::new(json!({
93 "root": {
94 "apple": "",
95 "banana": {
96 "carrot": {
97 "date": "",
98 "endive": "",
99 }
100 },
101 "fennel": {
102 "grape": "",
103 }
104 }
105 }));
106
107 let root_link_path = dir.path().join("root_link");
108 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
109 os::unix::fs::symlink(
110 dir.path().join("root/fennel"),
111 dir.path().join("root/finnochio"),
112 )
113 .unwrap();
114
115 let project = Project::test(
116 Arc::new(RealFs::new(None, cx.executor())),
117 [root_link_path.as_ref()],
118 cx,
119 )
120 .await;
121
122 project.update(cx, |project, cx| {
123 let tree = project.worktrees(cx).next().unwrap().read(cx);
124 assert_eq!(tree.file_count(), 5);
125 assert_eq!(
126 tree.inode_for_path("fennel/grape"),
127 tree.inode_for_path("finnochio/grape")
128 );
129 });
130}
131
132#[gpui::test]
133async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
134 init_test(cx);
135
136 let dir = TempTree::new(json!({
137 ".editorconfig": r#"
138 root = true
139 [*.rs]
140 indent_style = tab
141 indent_size = 3
142 end_of_line = lf
143 insert_final_newline = true
144 trim_trailing_whitespace = true
145 max_line_length = 120
146 [*.js]
147 tab_width = 10
148 max_line_length = off
149 "#,
150 ".zed": {
151 "settings.json": r#"{
152 "tab_size": 8,
153 "hard_tabs": false,
154 "ensure_final_newline_on_save": false,
155 "remove_trailing_whitespace_on_save": false,
156 "preferred_line_length": 64,
157 "soft_wrap": "editor_width",
158 }"#,
159 },
160 "a.rs": "fn a() {\n A\n}",
161 "b": {
162 ".editorconfig": r#"
163 [*.rs]
164 indent_size = 2
165 max_line_length = off,
166 "#,
167 "b.rs": "fn b() {\n B\n}",
168 },
169 "c.js": "def c\n C\nend",
170 "README.json": "tabs are better\n",
171 }));
172
173 let path = dir.path();
174 let fs = FakeFs::new(cx.executor());
175 fs.insert_tree_from_real_fs(path, path).await;
176 let project = Project::test(fs, [path], cx).await;
177
178 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
179 language_registry.add(js_lang());
180 language_registry.add(json_lang());
181 language_registry.add(rust_lang());
182
183 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
184
185 cx.executor().run_until_parked();
186
187 cx.update(|cx| {
188 let tree = worktree.read(cx);
189 let settings_for = |path: &str| {
190 let file_entry = tree.entry_for_path(path).unwrap().clone();
191 let file = File::for_entry(file_entry, worktree.clone());
192 let file_language = project
193 .read(cx)
194 .languages()
195 .language_for_file_path(file.path.as_ref());
196 let file_language = cx
197 .background_executor()
198 .block(file_language)
199 .expect("Failed to get file language");
200 let file = file as _;
201 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
202 };
203
204 let settings_a = settings_for("a.rs");
205 let settings_b = settings_for("b/b.rs");
206 let settings_c = settings_for("c.js");
207 let settings_readme = settings_for("README.json");
208
209 // .editorconfig overrides .zed/settings
210 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
211 assert_eq!(settings_a.hard_tabs, true);
212 assert_eq!(settings_a.ensure_final_newline_on_save, true);
213 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
214 assert_eq!(settings_a.preferred_line_length, 120);
215
216 // .editorconfig in b/ overrides .editorconfig in root
217 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
218
219 // "indent_size" is not set, so "tab_width" is used
220 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
221
222 // When max_line_length is "off", default to .zed/settings.json
223 assert_eq!(settings_b.preferred_line_length, 64);
224 assert_eq!(settings_c.preferred_line_length, 64);
225
226 // README.md should not be affected by .editorconfig's globe "*.rs"
227 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
228 });
229}
230
231#[gpui::test]
232async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
233 init_test(cx);
234 cx.update(|cx| {
235 GitHostingProviderRegistry::default_global(cx);
236 git_hosting_providers::init(cx);
237 });
238
239 let fs = FakeFs::new(cx.executor());
240 let str_path = path!("/dir");
241 let path = Path::new(str_path);
242
243 fs.insert_tree(
244 path!("/dir"),
245 json!({
246 ".zed": {
247 "settings.json": r#"{
248 "git_hosting_providers": [
249 {
250 "provider": "gitlab",
251 "base_url": "https://google.com",
252 "name": "foo"
253 }
254 ]
255 }"#
256 },
257 }),
258 )
259 .await;
260
261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
262 let (_worktree, _) =
263 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
264 cx.executor().run_until_parked();
265
266 cx.update(|cx| {
267 let provider = GitHostingProviderRegistry::global(cx);
268 assert!(
269 provider
270 .list_hosting_providers()
271 .into_iter()
272 .any(|provider| provider.name() == "foo")
273 );
274 });
275
276 fs.atomic_write(
277 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
278 "{}".into(),
279 )
280 .await
281 .unwrap();
282
283 cx.run_until_parked();
284
285 cx.update(|cx| {
286 let provider = GitHostingProviderRegistry::global(cx);
287 assert!(
288 !provider
289 .list_hosting_providers()
290 .into_iter()
291 .any(|provider| provider.name() == "foo")
292 );
293 });
294}
295
296#[gpui::test]
297async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
298 init_test(cx);
299 TaskStore::init(None);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 path!("/dir"),
304 json!({
305 ".zed": {
306 "settings.json": r#"{ "tab_size": 8 }"#,
307 "tasks.json": r#"[{
308 "label": "cargo check all",
309 "command": "cargo",
310 "args": ["check", "--all"]
311 },]"#,
312 },
313 "a": {
314 "a.rs": "fn a() {\n A\n}"
315 },
316 "b": {
317 ".zed": {
318 "settings.json": r#"{ "tab_size": 2 }"#,
319 "tasks.json": r#"[{
320 "label": "cargo check",
321 "command": "cargo",
322 "args": ["check"]
323 },]"#,
324 },
325 "b.rs": "fn b() {\n B\n}"
326 }
327 }),
328 )
329 .await;
330
331 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
332 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
333
334 cx.executor().run_until_parked();
335 let worktree_id = cx.update(|cx| {
336 project.update(cx, |project, cx| {
337 project.worktrees(cx).next().unwrap().read(cx).id()
338 })
339 });
340
341 let mut task_contexts = TaskContexts::default();
342 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
343 let task_contexts = Arc::new(task_contexts);
344
345 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
346 id: worktree_id,
347 directory_in_worktree: PathBuf::from(".zed"),
348 id_base: "local worktree tasks from directory \".zed\"".into(),
349 };
350
351 let all_tasks = cx
352 .update(|cx| {
353 let tree = worktree.read(cx);
354
355 let file_a = File::for_entry(
356 tree.entry_for_path("a/a.rs").unwrap().clone(),
357 worktree.clone(),
358 ) as _;
359 let settings_a = language_settings(None, Some(&file_a), cx);
360 let file_b = File::for_entry(
361 tree.entry_for_path("b/b.rs").unwrap().clone(),
362 worktree.clone(),
363 ) as _;
364 let settings_b = language_settings(None, Some(&file_b), cx);
365
366 assert_eq!(settings_a.tab_size.get(), 8);
367 assert_eq!(settings_b.tab_size.get(), 2);
368
369 get_all_tasks(&project, task_contexts.clone(), cx)
370 })
371 .await
372 .into_iter()
373 .map(|(source_kind, task)| {
374 let resolved = task.resolved;
375 (
376 source_kind,
377 task.resolved_label,
378 resolved.args,
379 resolved.env,
380 )
381 })
382 .collect::<Vec<_>>();
383 assert_eq!(
384 all_tasks,
385 vec![
386 (
387 TaskSourceKind::Worktree {
388 id: worktree_id,
389 directory_in_worktree: PathBuf::from(path!("b/.zed")),
390 id_base: if cfg!(windows) {
391 "local worktree tasks from directory \"b\\\\.zed\"".into()
392 } else {
393 "local worktree tasks from directory \"b/.zed\"".into()
394 },
395 },
396 "cargo check".to_string(),
397 vec!["check".to_string()],
398 HashMap::default(),
399 ),
400 (
401 topmost_local_task_source_kind.clone(),
402 "cargo check all".to_string(),
403 vec!["check".to_string(), "--all".to_string()],
404 HashMap::default(),
405 ),
406 ]
407 );
408
409 let (_, resolved_task) = cx
410 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
411 .await
412 .into_iter()
413 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
414 .expect("should have one global task");
415 project.update(cx, |project, cx| {
416 let task_inventory = project
417 .task_store
418 .read(cx)
419 .task_inventory()
420 .cloned()
421 .unwrap();
422 task_inventory.update(cx, |inventory, _| {
423 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
424 inventory
425 .update_file_based_tasks(
426 TaskSettingsLocation::Global(tasks_file()),
427 Some(
428 &json!([{
429 "label": "cargo check unstable",
430 "command": "cargo",
431 "args": [
432 "check",
433 "--all",
434 "--all-targets"
435 ],
436 "env": {
437 "RUSTFLAGS": "-Zunstable-options"
438 }
439 }])
440 .to_string(),
441 ),
442 )
443 .unwrap();
444 });
445 });
446 cx.run_until_parked();
447
448 let all_tasks = cx
449 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
450 .await
451 .into_iter()
452 .map(|(source_kind, task)| {
453 let resolved = task.resolved;
454 (
455 source_kind,
456 task.resolved_label,
457 resolved.args,
458 resolved.env,
459 )
460 })
461 .collect::<Vec<_>>();
462 assert_eq!(
463 all_tasks,
464 vec![
465 (
466 topmost_local_task_source_kind.clone(),
467 "cargo check all".to_string(),
468 vec!["check".to_string(), "--all".to_string()],
469 HashMap::default(),
470 ),
471 (
472 TaskSourceKind::Worktree {
473 id: worktree_id,
474 directory_in_worktree: PathBuf::from(path!("b/.zed")),
475 id_base: if cfg!(windows) {
476 "local worktree tasks from directory \"b\\\\.zed\"".into()
477 } else {
478 "local worktree tasks from directory \"b/.zed\"".into()
479 },
480 },
481 "cargo check".to_string(),
482 vec!["check".to_string()],
483 HashMap::default(),
484 ),
485 (
486 TaskSourceKind::AbsPath {
487 abs_path: paths::tasks_file().clone(),
488 id_base: "global tasks.json".into(),
489 },
490 "cargo check unstable".to_string(),
491 vec![
492 "check".to_string(),
493 "--all".to_string(),
494 "--all-targets".to_string(),
495 ],
496 HashMap::from_iter(Some((
497 "RUSTFLAGS".to_string(),
498 "-Zunstable-options".to_string()
499 ))),
500 ),
501 ]
502 );
503}
504
505#[gpui::test]
506async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
507 init_test(cx);
508 TaskStore::init(None);
509
510 let fs = FakeFs::new(cx.executor());
511 fs.insert_tree(
512 path!("/dir"),
513 json!({
514 ".zed": {
515 "tasks.json": r#"[{
516 "label": "test worktree root",
517 "command": "echo $ZED_WORKTREE_ROOT"
518 }]"#,
519 },
520 "a": {
521 "a.rs": "fn a() {\n A\n}"
522 },
523 }),
524 )
525 .await;
526
527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
528 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
529
530 cx.executor().run_until_parked();
531 let worktree_id = cx.update(|cx| {
532 project.update(cx, |project, cx| {
533 project.worktrees(cx).next().unwrap().read(cx).id()
534 })
535 });
536
537 let active_non_worktree_item_tasks = cx
538 .update(|cx| {
539 get_all_tasks(
540 &project,
541 Arc::new(TaskContexts {
542 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
543 active_worktree_context: None,
544 other_worktree_contexts: Vec::new(),
545 lsp_task_sources: HashMap::default(),
546 latest_selection: None,
547 }),
548 cx,
549 )
550 })
551 .await;
552 assert!(
553 active_non_worktree_item_tasks.is_empty(),
554 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
555 );
556
557 let active_worktree_tasks = cx
558 .update(|cx| {
559 get_all_tasks(
560 &project,
561 Arc::new(TaskContexts {
562 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
563 active_worktree_context: Some((worktree_id, {
564 let mut worktree_context = TaskContext::default();
565 worktree_context
566 .task_variables
567 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
568 worktree_context
569 })),
570 other_worktree_contexts: Vec::new(),
571 lsp_task_sources: HashMap::default(),
572 latest_selection: None,
573 }),
574 cx,
575 )
576 })
577 .await;
578 assert_eq!(
579 active_worktree_tasks
580 .into_iter()
581 .map(|(source_kind, task)| {
582 let resolved = task.resolved;
583 (source_kind, resolved.command.unwrap())
584 })
585 .collect::<Vec<_>>(),
586 vec![(
587 TaskSourceKind::Worktree {
588 id: worktree_id,
589 directory_in_worktree: PathBuf::from(path!(".zed")),
590 id_base: if cfg!(windows) {
591 "local worktree tasks from directory \".zed\"".into()
592 } else {
593 "local worktree tasks from directory \".zed\"".into()
594 },
595 },
596 "echo /dir".to_string(),
597 )]
598 );
599}
600
601#[gpui::test]
602async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
603 cx: &mut gpui::TestAppContext,
604) {
605 pub(crate) struct PyprojectTomlManifestProvider;
606
607 impl ManifestProvider for PyprojectTomlManifestProvider {
608 fn name(&self) -> ManifestName {
609 SharedString::new_static("pyproject.toml").into()
610 }
611
612 fn search(
613 &self,
614 ManifestQuery {
615 path,
616 depth,
617 delegate,
618 }: ManifestQuery,
619 ) -> Option<Arc<Path>> {
620 for path in path.ancestors().take(depth) {
621 let p = path.join("pyproject.toml");
622 if delegate.exists(&p, Some(false)) {
623 return Some(path.into());
624 }
625 }
626
627 None
628 }
629 }
630
631 init_test(cx);
632 let fs = FakeFs::new(cx.executor());
633
634 fs.insert_tree(
635 path!("/the-root"),
636 json!({
637 ".zed": {
638 "settings.json": r#"
639 {
640 "languages": {
641 "Python": {
642 "language_servers": ["ty"]
643 }
644 }
645 }"#
646 },
647 "project-a": {
648 ".venv": {},
649 "file.py": "",
650 "pyproject.toml": ""
651 },
652 "project-b": {
653 ".venv": {},
654 "source_file.py":"",
655 "another_file.py": "",
656 "pyproject.toml": ""
657 }
658 }),
659 )
660 .await;
661 cx.update(|cx| {
662 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
663 });
664
665 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
666 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
667 let _fake_python_server = language_registry.register_fake_lsp(
668 "Python",
669 FakeLspAdapter {
670 name: "ty",
671 capabilities: lsp::ServerCapabilities {
672 ..Default::default()
673 },
674 ..Default::default()
675 },
676 );
677
678 language_registry.add(python_lang(fs.clone()));
679 let (first_buffer, _handle) = project
680 .update(cx, |project, cx| {
681 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
682 })
683 .await
684 .unwrap();
685 cx.executor().run_until_parked();
686 let servers = project.update(cx, |project, cx| {
687 project.lsp_store.update(cx, |this, cx| {
688 first_buffer.update(cx, |buffer, cx| {
689 this.language_servers_for_local_buffer(buffer, cx)
690 .map(|(adapter, server)| (adapter.clone(), server.clone()))
691 .collect::<Vec<_>>()
692 })
693 })
694 });
695 cx.executor().run_until_parked();
696 assert_eq!(servers.len(), 1);
697 let (adapter, server) = servers.into_iter().next().unwrap();
698 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
699 assert_eq!(server.server_id(), LanguageServerId(0));
700 // `workspace_folders` are set to the rooting point.
701 assert_eq!(
702 server.workspace_folders(),
703 BTreeSet::from_iter(
704 [Url::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
705 )
706 );
707
708 let (second_project_buffer, _other_handle) = project
709 .update(cx, |project, cx| {
710 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
711 })
712 .await
713 .unwrap();
714 cx.executor().run_until_parked();
715 let servers = project.update(cx, |project, cx| {
716 project.lsp_store.update(cx, |this, cx| {
717 second_project_buffer.update(cx, |buffer, cx| {
718 this.language_servers_for_local_buffer(buffer, cx)
719 .map(|(adapter, server)| (adapter.clone(), server.clone()))
720 .collect::<Vec<_>>()
721 })
722 })
723 });
724 cx.executor().run_until_parked();
725 assert_eq!(servers.len(), 1);
726 let (adapter, server) = servers.into_iter().next().unwrap();
727 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
728 // We're not using venvs at all here, so both folders should fall under the same root.
729 assert_eq!(server.server_id(), LanguageServerId(0));
730 // Now, let's select a different toolchain for one of subprojects.
731 let (available_toolchains_for_b, root_path) = project
732 .update(cx, |this, cx| {
733 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
734 this.available_toolchains(
735 ProjectPath {
736 worktree_id,
737 path: Arc::from("project-b/source_file.py".as_ref()),
738 },
739 LanguageName::new("Python"),
740 cx,
741 )
742 })
743 .await
744 .expect("A toolchain to be discovered");
745 assert_eq!(root_path.as_ref(), Path::new("project-b"));
746 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
747 let currently_active_toolchain = project
748 .update(cx, |this, cx| {
749 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
750 this.active_toolchain(
751 ProjectPath {
752 worktree_id,
753 path: Arc::from("project-b/source_file.py".as_ref()),
754 },
755 LanguageName::new("Python"),
756 cx,
757 )
758 })
759 .await;
760
761 assert!(currently_active_toolchain.is_none());
762 let _ = project
763 .update(cx, |this, cx| {
764 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
765 this.activate_toolchain(
766 ProjectPath {
767 worktree_id,
768 path: root_path,
769 },
770 available_toolchains_for_b
771 .toolchains
772 .into_iter()
773 .next()
774 .unwrap(),
775 cx,
776 )
777 })
778 .await
779 .unwrap();
780 cx.run_until_parked();
781 let servers = project.update(cx, |project, cx| {
782 project.lsp_store.update(cx, |this, cx| {
783 second_project_buffer.update(cx, |buffer, cx| {
784 this.language_servers_for_local_buffer(buffer, cx)
785 .map(|(adapter, server)| (adapter.clone(), server.clone()))
786 .collect::<Vec<_>>()
787 })
788 })
789 });
790 cx.executor().run_until_parked();
791 assert_eq!(servers.len(), 1);
792 let (adapter, server) = servers.into_iter().next().unwrap();
793 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
794 // There's a new language server in town.
795 assert_eq!(server.server_id(), LanguageServerId(1));
796}
797
798#[gpui::test]
799async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
800 init_test(cx);
801
802 let fs = FakeFs::new(cx.executor());
803 fs.insert_tree(
804 path!("/dir"),
805 json!({
806 "test.rs": "const A: i32 = 1;",
807 "test2.rs": "",
808 "Cargo.toml": "a = 1",
809 "package.json": "{\"a\": 1}",
810 }),
811 )
812 .await;
813
814 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
815 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
816
817 let mut fake_rust_servers = language_registry.register_fake_lsp(
818 "Rust",
819 FakeLspAdapter {
820 name: "the-rust-language-server",
821 capabilities: lsp::ServerCapabilities {
822 completion_provider: Some(lsp::CompletionOptions {
823 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
824 ..Default::default()
825 }),
826 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
827 lsp::TextDocumentSyncOptions {
828 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
829 ..Default::default()
830 },
831 )),
832 ..Default::default()
833 },
834 ..Default::default()
835 },
836 );
837 let mut fake_json_servers = language_registry.register_fake_lsp(
838 "JSON",
839 FakeLspAdapter {
840 name: "the-json-language-server",
841 capabilities: lsp::ServerCapabilities {
842 completion_provider: Some(lsp::CompletionOptions {
843 trigger_characters: Some(vec![":".to_string()]),
844 ..Default::default()
845 }),
846 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
847 lsp::TextDocumentSyncOptions {
848 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
849 ..Default::default()
850 },
851 )),
852 ..Default::default()
853 },
854 ..Default::default()
855 },
856 );
857
858 // Open a buffer without an associated language server.
859 let (toml_buffer, _handle) = project
860 .update(cx, |project, cx| {
861 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
862 })
863 .await
864 .unwrap();
865
866 // Open a buffer with an associated language server before the language for it has been loaded.
867 let (rust_buffer, _handle2) = project
868 .update(cx, |project, cx| {
869 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
870 })
871 .await
872 .unwrap();
873 rust_buffer.update(cx, |buffer, _| {
874 assert_eq!(buffer.language().map(|l| l.name()), None);
875 });
876
877 // Now we add the languages to the project, and ensure they get assigned to all
878 // the relevant open buffers.
879 language_registry.add(json_lang());
880 language_registry.add(rust_lang());
881 cx.executor().run_until_parked();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
884 });
885
886 // A server is started up, and it is notified about Rust files.
887 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
888 assert_eq!(
889 fake_rust_server
890 .receive_notification::<lsp::notification::DidOpenTextDocument>()
891 .await
892 .text_document,
893 lsp::TextDocumentItem {
894 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
895 version: 0,
896 text: "const A: i32 = 1;".to_string(),
897 language_id: "rust".to_string(),
898 }
899 );
900
901 // The buffer is configured based on the language server's capabilities.
902 rust_buffer.update(cx, |buffer, _| {
903 assert_eq!(
904 buffer
905 .completion_triggers()
906 .iter()
907 .cloned()
908 .collect::<Vec<_>>(),
909 &[".".to_string(), "::".to_string()]
910 );
911 });
912 toml_buffer.update(cx, |buffer, _| {
913 assert!(buffer.completion_triggers().is_empty());
914 });
915
916 // Edit a buffer. The changes are reported to the language server.
917 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
918 assert_eq!(
919 fake_rust_server
920 .receive_notification::<lsp::notification::DidChangeTextDocument>()
921 .await
922 .text_document,
923 lsp::VersionedTextDocumentIdentifier::new(
924 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
925 1
926 )
927 );
928
929 // Open a third buffer with a different associated language server.
930 let (json_buffer, _json_handle) = project
931 .update(cx, |project, cx| {
932 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
933 })
934 .await
935 .unwrap();
936
937 // A json language server is started up and is only notified about the json buffer.
938 let mut fake_json_server = fake_json_servers.next().await.unwrap();
939 assert_eq!(
940 fake_json_server
941 .receive_notification::<lsp::notification::DidOpenTextDocument>()
942 .await
943 .text_document,
944 lsp::TextDocumentItem {
945 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
946 version: 0,
947 text: "{\"a\": 1}".to_string(),
948 language_id: "json".to_string(),
949 }
950 );
951
952 // This buffer is configured based on the second language server's
953 // capabilities.
954 json_buffer.update(cx, |buffer, _| {
955 assert_eq!(
956 buffer
957 .completion_triggers()
958 .iter()
959 .cloned()
960 .collect::<Vec<_>>(),
961 &[":".to_string()]
962 );
963 });
964
965 // When opening another buffer whose language server is already running,
966 // it is also configured based on the existing language server's capabilities.
967 let (rust_buffer2, _handle4) = project
968 .update(cx, |project, cx| {
969 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
970 })
971 .await
972 .unwrap();
973 rust_buffer2.update(cx, |buffer, _| {
974 assert_eq!(
975 buffer
976 .completion_triggers()
977 .iter()
978 .cloned()
979 .collect::<Vec<_>>(),
980 &[".".to_string(), "::".to_string()]
981 );
982 });
983
984 // Changes are reported only to servers matching the buffer's language.
985 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
986 rust_buffer2.update(cx, |buffer, cx| {
987 buffer.edit([(0..0, "let x = 1;")], None, cx)
988 });
989 assert_eq!(
990 fake_rust_server
991 .receive_notification::<lsp::notification::DidChangeTextDocument>()
992 .await
993 .text_document,
994 lsp::VersionedTextDocumentIdentifier::new(
995 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
996 1
997 )
998 );
999
1000 // Save notifications are reported to all servers.
1001 project
1002 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1003 .await
1004 .unwrap();
1005 assert_eq!(
1006 fake_rust_server
1007 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1008 .await
1009 .text_document,
1010 lsp::TextDocumentIdentifier::new(
1011 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1012 )
1013 );
1014 assert_eq!(
1015 fake_json_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023
1024 // Renames are reported only to servers matching the buffer's language.
1025 fs.rename(
1026 Path::new(path!("/dir/test2.rs")),
1027 Path::new(path!("/dir/test3.rs")),
1028 Default::default(),
1029 )
1030 .await
1031 .unwrap();
1032 assert_eq!(
1033 fake_rust_server
1034 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1035 .await
1036 .text_document,
1037 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
1038 );
1039 assert_eq!(
1040 fake_rust_server
1041 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1042 .await
1043 .text_document,
1044 lsp::TextDocumentItem {
1045 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
1046 version: 0,
1047 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1048 language_id: "rust".to_string(),
1049 },
1050 );
1051
1052 rust_buffer2.update(cx, |buffer, cx| {
1053 buffer.update_diagnostics(
1054 LanguageServerId(0),
1055 DiagnosticSet::from_sorted_entries(
1056 vec![DiagnosticEntry {
1057 diagnostic: Default::default(),
1058 range: Anchor::MIN..Anchor::MAX,
1059 }],
1060 &buffer.snapshot(),
1061 ),
1062 cx,
1063 );
1064 assert_eq!(
1065 buffer
1066 .snapshot()
1067 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1068 .count(),
1069 1
1070 );
1071 });
1072
1073 // When the rename changes the extension of the file, the buffer gets closed on the old
1074 // language server and gets opened on the new one.
1075 fs.rename(
1076 Path::new(path!("/dir/test3.rs")),
1077 Path::new(path!("/dir/test3.json")),
1078 Default::default(),
1079 )
1080 .await
1081 .unwrap();
1082 assert_eq!(
1083 fake_rust_server
1084 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1085 .await
1086 .text_document,
1087 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
1088 );
1089 assert_eq!(
1090 fake_json_server
1091 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1092 .await
1093 .text_document,
1094 lsp::TextDocumentItem {
1095 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
1096 version: 0,
1097 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1098 language_id: "json".to_string(),
1099 },
1100 );
1101
1102 // We clear the diagnostics, since the language has changed.
1103 rust_buffer2.update(cx, |buffer, _| {
1104 assert_eq!(
1105 buffer
1106 .snapshot()
1107 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1108 .count(),
1109 0
1110 );
1111 });
1112
1113 // The renamed file's version resets after changing language server.
1114 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1115 assert_eq!(
1116 fake_json_server
1117 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1118 .await
1119 .text_document,
1120 lsp::VersionedTextDocumentIdentifier::new(
1121 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
1122 1
1123 )
1124 );
1125
1126 // Restart language servers
1127 project.update(cx, |project, cx| {
1128 project.restart_language_servers_for_buffers(
1129 vec![rust_buffer.clone(), json_buffer.clone()],
1130 HashSet::default(),
1131 cx,
1132 );
1133 });
1134
1135 let mut rust_shutdown_requests = fake_rust_server
1136 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1137 let mut json_shutdown_requests = fake_json_server
1138 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1139 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1140
1141 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1142 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1143
1144 // Ensure rust document is reopened in new rust language server
1145 assert_eq!(
1146 fake_rust_server
1147 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1148 .await
1149 .text_document,
1150 lsp::TextDocumentItem {
1151 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
1152 version: 0,
1153 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1154 language_id: "rust".to_string(),
1155 }
1156 );
1157
1158 // Ensure json documents are reopened in new json language server
1159 assert_set_eq!(
1160 [
1161 fake_json_server
1162 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1163 .await
1164 .text_document,
1165 fake_json_server
1166 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1167 .await
1168 .text_document,
1169 ],
1170 [
1171 lsp::TextDocumentItem {
1172 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
1173 version: 0,
1174 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1175 language_id: "json".to_string(),
1176 },
1177 lsp::TextDocumentItem {
1178 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
1179 version: 0,
1180 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1181 language_id: "json".to_string(),
1182 }
1183 ]
1184 );
1185
1186 // Close notifications are reported only to servers matching the buffer's language.
1187 cx.update(|_| drop(_json_handle));
1188 let close_message = lsp::DidCloseTextDocumentParams {
1189 text_document: lsp::TextDocumentIdentifier::new(
1190 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
1191 ),
1192 };
1193 assert_eq!(
1194 fake_json_server
1195 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1196 .await,
1197 close_message,
1198 );
1199}
1200
1201#[gpui::test]
1202async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1203 init_test(cx);
1204
1205 let fs = FakeFs::new(cx.executor());
1206 fs.insert_tree(
1207 path!("/the-root"),
1208 json!({
1209 ".gitignore": "target\n",
1210 "Cargo.lock": "",
1211 "src": {
1212 "a.rs": "",
1213 "b.rs": "",
1214 },
1215 "target": {
1216 "x": {
1217 "out": {
1218 "x.rs": ""
1219 }
1220 },
1221 "y": {
1222 "out": {
1223 "y.rs": "",
1224 }
1225 },
1226 "z": {
1227 "out": {
1228 "z.rs": ""
1229 }
1230 }
1231 }
1232 }),
1233 )
1234 .await;
1235 fs.insert_tree(
1236 path!("/the-registry"),
1237 json!({
1238 "dep1": {
1239 "src": {
1240 "dep1.rs": "",
1241 }
1242 },
1243 "dep2": {
1244 "src": {
1245 "dep2.rs": "",
1246 }
1247 },
1248 }),
1249 )
1250 .await;
1251 fs.insert_tree(
1252 path!("/the/stdlib"),
1253 json!({
1254 "LICENSE": "",
1255 "src": {
1256 "string.rs": "",
1257 }
1258 }),
1259 )
1260 .await;
1261
1262 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1263 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1264 (project.languages().clone(), project.lsp_store())
1265 });
1266 language_registry.add(rust_lang());
1267 let mut fake_servers = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "the-language-server",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.executor().run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1286 project.update(cx, |project, cx| {
1287 let worktree = project.worktrees(cx).next().unwrap();
1288 assert_eq!(
1289 worktree
1290 .read(cx)
1291 .snapshot()
1292 .entries(true, 0)
1293 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1294 .collect::<Vec<_>>(),
1295 &[
1296 (Path::new(""), false),
1297 (Path::new(".gitignore"), false),
1298 (Path::new("Cargo.lock"), false),
1299 (Path::new("src"), false),
1300 (Path::new("src/a.rs"), false),
1301 (Path::new("src/b.rs"), false),
1302 (Path::new("target"), true),
1303 ]
1304 );
1305 });
1306
1307 let prev_read_dir_count = fs.read_dir_call_count();
1308
1309 let fake_server = fake_servers.next().await.unwrap();
1310 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1311 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1312 id
1313 });
1314
1315 // Simulate jumping to a definition in a dependency outside of the worktree.
1316 let _out_of_worktree_buffer = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer_via_lsp(
1319 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1320 server_id,
1321 cx,
1322 )
1323 })
1324 .await
1325 .unwrap();
1326
1327 // Keep track of the FS events reported to the language server.
1328 let file_changes = Arc::new(Mutex::new(Vec::new()));
1329 fake_server
1330 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1331 registrations: vec![lsp::Registration {
1332 id: Default::default(),
1333 method: "workspace/didChangeWatchedFiles".to_string(),
1334 register_options: serde_json::to_value(
1335 lsp::DidChangeWatchedFilesRegistrationOptions {
1336 watchers: vec![
1337 lsp::FileSystemWatcher {
1338 glob_pattern: lsp::GlobPattern::String(
1339 path!("/the-root/Cargo.toml").to_string(),
1340 ),
1341 kind: None,
1342 },
1343 lsp::FileSystemWatcher {
1344 glob_pattern: lsp::GlobPattern::String(
1345 path!("/the-root/src/*.{rs,c}").to_string(),
1346 ),
1347 kind: None,
1348 },
1349 lsp::FileSystemWatcher {
1350 glob_pattern: lsp::GlobPattern::String(
1351 path!("/the-root/target/y/**/*.rs").to_string(),
1352 ),
1353 kind: None,
1354 },
1355 lsp::FileSystemWatcher {
1356 glob_pattern: lsp::GlobPattern::String(
1357 path!("/the/stdlib/src/**/*.rs").to_string(),
1358 ),
1359 kind: None,
1360 },
1361 lsp::FileSystemWatcher {
1362 glob_pattern: lsp::GlobPattern::String(
1363 path!("**/Cargo.lock").to_string(),
1364 ),
1365 kind: None,
1366 },
1367 ],
1368 },
1369 )
1370 .ok(),
1371 }],
1372 })
1373 .await
1374 .into_response()
1375 .unwrap();
1376 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1377 let file_changes = file_changes.clone();
1378 move |params, _| {
1379 let mut file_changes = file_changes.lock();
1380 file_changes.extend(params.changes);
1381 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1382 }
1383 });
1384
1385 cx.executor().run_until_parked();
1386 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1387 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1388
1389 let mut new_watched_paths = fs.watched_paths();
1390 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1391 assert_eq!(
1392 &new_watched_paths,
1393 &[
1394 Path::new(path!("/the-root")),
1395 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1396 Path::new(path!("/the/stdlib/src"))
1397 ]
1398 );
1399
1400 // Now the language server has asked us to watch an ignored directory path,
1401 // so we recursively load it.
1402 project.update(cx, |project, cx| {
1403 let worktree = project.visible_worktrees(cx).next().unwrap();
1404 assert_eq!(
1405 worktree
1406 .read(cx)
1407 .snapshot()
1408 .entries(true, 0)
1409 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1410 .collect::<Vec<_>>(),
1411 &[
1412 (Path::new(""), false),
1413 (Path::new(".gitignore"), false),
1414 (Path::new("Cargo.lock"), false),
1415 (Path::new("src"), false),
1416 (Path::new("src/a.rs"), false),
1417 (Path::new("src/b.rs"), false),
1418 (Path::new("target"), true),
1419 (Path::new("target/x"), true),
1420 (Path::new("target/y"), true),
1421 (Path::new("target/y/out"), true),
1422 (Path::new("target/y/out/y.rs"), true),
1423 (Path::new("target/z"), true),
1424 ]
1425 );
1426 });
1427
1428 // Perform some file system mutations, two of which match the watched patterns,
1429 // and one of which does not.
1430 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1431 .await
1432 .unwrap();
1433 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1434 .await
1435 .unwrap();
1436 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1437 .await
1438 .unwrap();
1439 fs.create_file(
1440 path!("/the-root/target/x/out/x2.rs").as_ref(),
1441 Default::default(),
1442 )
1443 .await
1444 .unwrap();
1445 fs.create_file(
1446 path!("/the-root/target/y/out/y2.rs").as_ref(),
1447 Default::default(),
1448 )
1449 .await
1450 .unwrap();
1451 fs.save(
1452 path!("/the-root/Cargo.lock").as_ref(),
1453 &"".into(),
1454 Default::default(),
1455 )
1456 .await
1457 .unwrap();
1458 fs.save(
1459 path!("/the-stdlib/LICENSE").as_ref(),
1460 &"".into(),
1461 Default::default(),
1462 )
1463 .await
1464 .unwrap();
1465 fs.save(
1466 path!("/the/stdlib/src/string.rs").as_ref(),
1467 &"".into(),
1468 Default::default(),
1469 )
1470 .await
1471 .unwrap();
1472
1473 // The language server receives events for the FS mutations that match its watch patterns.
1474 cx.executor().run_until_parked();
1475 assert_eq!(
1476 &*file_changes.lock(),
1477 &[
1478 lsp::FileEvent {
1479 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1480 typ: lsp::FileChangeType::CHANGED,
1481 },
1482 lsp::FileEvent {
1483 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1484 typ: lsp::FileChangeType::DELETED,
1485 },
1486 lsp::FileEvent {
1487 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1488 typ: lsp::FileChangeType::CREATED,
1489 },
1490 lsp::FileEvent {
1491 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1492 typ: lsp::FileChangeType::CREATED,
1493 },
1494 lsp::FileEvent {
1495 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1496 typ: lsp::FileChangeType::CHANGED,
1497 },
1498 ]
1499 );
1500}
1501
1502#[gpui::test]
1503async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1504 init_test(cx);
1505
1506 let fs = FakeFs::new(cx.executor());
1507 fs.insert_tree(
1508 path!("/dir"),
1509 json!({
1510 "a.rs": "let a = 1;",
1511 "b.rs": "let b = 2;"
1512 }),
1513 )
1514 .await;
1515
1516 let project = Project::test(
1517 fs,
1518 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1519 cx,
1520 )
1521 .await;
1522 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1523
1524 let buffer_a = project
1525 .update(cx, |project, cx| {
1526 project.open_local_buffer(path!("/dir/a.rs"), cx)
1527 })
1528 .await
1529 .unwrap();
1530 let buffer_b = project
1531 .update(cx, |project, cx| {
1532 project.open_local_buffer(path!("/dir/b.rs"), cx)
1533 })
1534 .await
1535 .unwrap();
1536
1537 lsp_store.update(cx, |lsp_store, cx| {
1538 lsp_store
1539 .update_diagnostics(
1540 LanguageServerId(0),
1541 lsp::PublishDiagnosticsParams {
1542 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1543 version: None,
1544 diagnostics: vec![lsp::Diagnostic {
1545 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1546 severity: Some(lsp::DiagnosticSeverity::ERROR),
1547 message: "error 1".to_string(),
1548 ..Default::default()
1549 }],
1550 },
1551 None,
1552 DiagnosticSourceKind::Pushed,
1553 &[],
1554 cx,
1555 )
1556 .unwrap();
1557 lsp_store
1558 .update_diagnostics(
1559 LanguageServerId(0),
1560 lsp::PublishDiagnosticsParams {
1561 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1562 version: None,
1563 diagnostics: vec![lsp::Diagnostic {
1564 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1565 severity: Some(DiagnosticSeverity::WARNING),
1566 message: "error 2".to_string(),
1567 ..Default::default()
1568 }],
1569 },
1570 None,
1571 DiagnosticSourceKind::Pushed,
1572 &[],
1573 cx,
1574 )
1575 .unwrap();
1576 });
1577
1578 buffer_a.update(cx, |buffer, _| {
1579 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1580 assert_eq!(
1581 chunks
1582 .iter()
1583 .map(|(s, d)| (s.as_str(), *d))
1584 .collect::<Vec<_>>(),
1585 &[
1586 ("let ", None),
1587 ("a", Some(DiagnosticSeverity::ERROR)),
1588 (" = 1;", None),
1589 ]
1590 );
1591 });
1592 buffer_b.update(cx, |buffer, _| {
1593 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1594 assert_eq!(
1595 chunks
1596 .iter()
1597 .map(|(s, d)| (s.as_str(), *d))
1598 .collect::<Vec<_>>(),
1599 &[
1600 ("let ", None),
1601 ("b", Some(DiagnosticSeverity::WARNING)),
1602 (" = 2;", None),
1603 ]
1604 );
1605 });
1606}
1607
1608#[gpui::test]
1609async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1610 init_test(cx);
1611
1612 let fs = FakeFs::new(cx.executor());
1613 fs.insert_tree(
1614 path!("/root"),
1615 json!({
1616 "dir": {
1617 ".git": {
1618 "HEAD": "ref: refs/heads/main",
1619 },
1620 ".gitignore": "b.rs",
1621 "a.rs": "let a = 1;",
1622 "b.rs": "let b = 2;",
1623 },
1624 "other.rs": "let b = c;"
1625 }),
1626 )
1627 .await;
1628
1629 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1630 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1631 let (worktree, _) = project
1632 .update(cx, |project, cx| {
1633 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1634 })
1635 .await
1636 .unwrap();
1637 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1638
1639 let (worktree, _) = project
1640 .update(cx, |project, cx| {
1641 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1642 })
1643 .await
1644 .unwrap();
1645 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1646
1647 let server_id = LanguageServerId(0);
1648 lsp_store.update(cx, |lsp_store, cx| {
1649 lsp_store
1650 .update_diagnostics(
1651 server_id,
1652 lsp::PublishDiagnosticsParams {
1653 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1654 version: None,
1655 diagnostics: vec![lsp::Diagnostic {
1656 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1657 severity: Some(lsp::DiagnosticSeverity::ERROR),
1658 message: "unused variable 'b'".to_string(),
1659 ..Default::default()
1660 }],
1661 },
1662 None,
1663 DiagnosticSourceKind::Pushed,
1664 &[],
1665 cx,
1666 )
1667 .unwrap();
1668 lsp_store
1669 .update_diagnostics(
1670 server_id,
1671 lsp::PublishDiagnosticsParams {
1672 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1673 version: None,
1674 diagnostics: vec![lsp::Diagnostic {
1675 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1676 severity: Some(lsp::DiagnosticSeverity::ERROR),
1677 message: "unknown variable 'c'".to_string(),
1678 ..Default::default()
1679 }],
1680 },
1681 None,
1682 DiagnosticSourceKind::Pushed,
1683 &[],
1684 cx,
1685 )
1686 .unwrap();
1687 });
1688
1689 let main_ignored_buffer = project
1690 .update(cx, |project, cx| {
1691 project.open_buffer((main_worktree_id, "b.rs"), cx)
1692 })
1693 .await
1694 .unwrap();
1695 main_ignored_buffer.update(cx, |buffer, _| {
1696 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1697 assert_eq!(
1698 chunks
1699 .iter()
1700 .map(|(s, d)| (s.as_str(), *d))
1701 .collect::<Vec<_>>(),
1702 &[
1703 ("let ", None),
1704 ("b", Some(DiagnosticSeverity::ERROR)),
1705 (" = 2;", None),
1706 ],
1707 "Gigitnored buffers should still get in-buffer diagnostics",
1708 );
1709 });
1710 let other_buffer = project
1711 .update(cx, |project, cx| {
1712 project.open_buffer((other_worktree_id, ""), cx)
1713 })
1714 .await
1715 .unwrap();
1716 other_buffer.update(cx, |buffer, _| {
1717 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1718 assert_eq!(
1719 chunks
1720 .iter()
1721 .map(|(s, d)| (s.as_str(), *d))
1722 .collect::<Vec<_>>(),
1723 &[
1724 ("let b = ", None),
1725 ("c", Some(DiagnosticSeverity::ERROR)),
1726 (";", None),
1727 ],
1728 "Buffers from hidden projects should still get in-buffer diagnostics"
1729 );
1730 });
1731
1732 project.update(cx, |project, cx| {
1733 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1734 assert_eq!(
1735 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1736 vec![(
1737 ProjectPath {
1738 worktree_id: main_worktree_id,
1739 path: Arc::from(Path::new("b.rs")),
1740 },
1741 server_id,
1742 DiagnosticSummary {
1743 error_count: 1,
1744 warning_count: 0,
1745 }
1746 )]
1747 );
1748 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1749 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1750 });
1751}
1752
1753#[gpui::test]
1754async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1755 init_test(cx);
1756
1757 let progress_token = "the-progress-token";
1758
1759 let fs = FakeFs::new(cx.executor());
1760 fs.insert_tree(
1761 path!("/dir"),
1762 json!({
1763 "a.rs": "fn a() { A }",
1764 "b.rs": "const y: i32 = 1",
1765 }),
1766 )
1767 .await;
1768
1769 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1771
1772 language_registry.add(rust_lang());
1773 let mut fake_servers = language_registry.register_fake_lsp(
1774 "Rust",
1775 FakeLspAdapter {
1776 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1777 disk_based_diagnostics_sources: vec!["disk".into()],
1778 ..Default::default()
1779 },
1780 );
1781
1782 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1783
1784 // Cause worktree to start the fake language server
1785 let _ = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1788 })
1789 .await
1790 .unwrap();
1791
1792 let mut events = cx.events(&project);
1793
1794 let fake_server = fake_servers.next().await.unwrap();
1795 assert_eq!(
1796 events.next().await.unwrap(),
1797 Event::LanguageServerAdded(
1798 LanguageServerId(0),
1799 fake_server.server.name(),
1800 Some(worktree_id)
1801 ),
1802 );
1803
1804 fake_server
1805 .start_progress(format!("{}/0", progress_token))
1806 .await;
1807 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1808 assert_eq!(
1809 events.next().await.unwrap(),
1810 Event::DiskBasedDiagnosticsStarted {
1811 language_server_id: LanguageServerId(0),
1812 }
1813 );
1814
1815 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1816 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1817 version: None,
1818 diagnostics: vec![lsp::Diagnostic {
1819 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1820 severity: Some(lsp::DiagnosticSeverity::ERROR),
1821 message: "undefined variable 'A'".to_string(),
1822 ..Default::default()
1823 }],
1824 });
1825 assert_eq!(
1826 events.next().await.unwrap(),
1827 Event::DiagnosticsUpdated {
1828 language_server_id: LanguageServerId(0),
1829 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1830 }
1831 );
1832
1833 fake_server.end_progress(format!("{}/0", progress_token));
1834 assert_eq!(
1835 events.next().await.unwrap(),
1836 Event::DiskBasedDiagnosticsFinished {
1837 language_server_id: LanguageServerId(0)
1838 }
1839 );
1840
1841 let buffer = project
1842 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1843 .await
1844 .unwrap();
1845
1846 buffer.update(cx, |buffer, _| {
1847 let snapshot = buffer.snapshot();
1848 let diagnostics = snapshot
1849 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1850 .collect::<Vec<_>>();
1851 assert_eq!(
1852 diagnostics,
1853 &[DiagnosticEntry {
1854 range: Point::new(0, 9)..Point::new(0, 10),
1855 diagnostic: Diagnostic {
1856 severity: lsp::DiagnosticSeverity::ERROR,
1857 message: "undefined variable 'A'".to_string(),
1858 group_id: 0,
1859 is_primary: true,
1860 source_kind: DiagnosticSourceKind::Pushed,
1861 ..Diagnostic::default()
1862 }
1863 }]
1864 )
1865 });
1866
1867 // Ensure publishing empty diagnostics twice only results in one update event.
1868 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1869 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1870 version: None,
1871 diagnostics: Default::default(),
1872 });
1873 assert_eq!(
1874 events.next().await.unwrap(),
1875 Event::DiagnosticsUpdated {
1876 language_server_id: LanguageServerId(0),
1877 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1878 }
1879 );
1880
1881 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1882 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1883 version: None,
1884 diagnostics: Default::default(),
1885 });
1886 cx.executor().run_until_parked();
1887 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1888}
1889
1890#[gpui::test]
1891async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1892 init_test(cx);
1893
1894 let progress_token = "the-progress-token";
1895
1896 let fs = FakeFs::new(cx.executor());
1897 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1898
1899 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1900
1901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1902 language_registry.add(rust_lang());
1903 let mut fake_servers = language_registry.register_fake_lsp(
1904 "Rust",
1905 FakeLspAdapter {
1906 name: "the-language-server",
1907 disk_based_diagnostics_sources: vec!["disk".into()],
1908 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1909 ..FakeLspAdapter::default()
1910 },
1911 );
1912
1913 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1914
1915 let (buffer, _handle) = project
1916 .update(cx, |project, cx| {
1917 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1918 })
1919 .await
1920 .unwrap();
1921 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1922 // Simulate diagnostics starting to update.
1923 let fake_server = fake_servers.next().await.unwrap();
1924 fake_server.start_progress(progress_token).await;
1925
1926 // Restart the server before the diagnostics finish updating.
1927 project.update(cx, |project, cx| {
1928 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1929 });
1930 let mut events = cx.events(&project);
1931
1932 // Simulate the newly started server sending more diagnostics.
1933 let fake_server = fake_servers.next().await.unwrap();
1934 assert_eq!(
1935 events.next().await.unwrap(),
1936 Event::LanguageServerRemoved(LanguageServerId(0))
1937 );
1938 assert_eq!(
1939 events.next().await.unwrap(),
1940 Event::LanguageServerAdded(
1941 LanguageServerId(1),
1942 fake_server.server.name(),
1943 Some(worktree_id)
1944 )
1945 );
1946 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1947 fake_server.start_progress(progress_token).await;
1948 assert_eq!(
1949 events.next().await.unwrap(),
1950 Event::LanguageServerBufferRegistered {
1951 server_id: LanguageServerId(1),
1952 buffer_id,
1953 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1954 name: Some(fake_server.server.name())
1955 }
1956 );
1957 assert_eq!(
1958 events.next().await.unwrap(),
1959 Event::DiskBasedDiagnosticsStarted {
1960 language_server_id: LanguageServerId(1)
1961 }
1962 );
1963 project.update(cx, |project, cx| {
1964 assert_eq!(
1965 project
1966 .language_servers_running_disk_based_diagnostics(cx)
1967 .collect::<Vec<_>>(),
1968 [LanguageServerId(1)]
1969 );
1970 });
1971
1972 // All diagnostics are considered done, despite the old server's diagnostic
1973 // task never completing.
1974 fake_server.end_progress(progress_token);
1975 assert_eq!(
1976 events.next().await.unwrap(),
1977 Event::DiskBasedDiagnosticsFinished {
1978 language_server_id: LanguageServerId(1)
1979 }
1980 );
1981 project.update(cx, |project, cx| {
1982 assert_eq!(
1983 project
1984 .language_servers_running_disk_based_diagnostics(cx)
1985 .collect::<Vec<_>>(),
1986 [] as [language::LanguageServerId; 0]
1987 );
1988 });
1989}
1990
1991#[gpui::test]
1992async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1993 init_test(cx);
1994
1995 let fs = FakeFs::new(cx.executor());
1996 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1997
1998 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1999
2000 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2001 language_registry.add(rust_lang());
2002 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2003
2004 let (buffer, _) = project
2005 .update(cx, |project, cx| {
2006 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2007 })
2008 .await
2009 .unwrap();
2010
2011 // Publish diagnostics
2012 let fake_server = fake_servers.next().await.unwrap();
2013 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2014 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2015 version: None,
2016 diagnostics: vec![lsp::Diagnostic {
2017 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2018 severity: Some(lsp::DiagnosticSeverity::ERROR),
2019 message: "the message".to_string(),
2020 ..Default::default()
2021 }],
2022 });
2023
2024 cx.executor().run_until_parked();
2025 buffer.update(cx, |buffer, _| {
2026 assert_eq!(
2027 buffer
2028 .snapshot()
2029 .diagnostics_in_range::<_, usize>(0..1, false)
2030 .map(|entry| entry.diagnostic.message)
2031 .collect::<Vec<_>>(),
2032 ["the message".to_string()]
2033 );
2034 });
2035 project.update(cx, |project, cx| {
2036 assert_eq!(
2037 project.diagnostic_summary(false, cx),
2038 DiagnosticSummary {
2039 error_count: 1,
2040 warning_count: 0,
2041 }
2042 );
2043 });
2044
2045 project.update(cx, |project, cx| {
2046 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2047 });
2048
2049 // The diagnostics are cleared.
2050 cx.executor().run_until_parked();
2051 buffer.update(cx, |buffer, _| {
2052 assert_eq!(
2053 buffer
2054 .snapshot()
2055 .diagnostics_in_range::<_, usize>(0..1, false)
2056 .map(|entry| entry.diagnostic.message)
2057 .collect::<Vec<_>>(),
2058 Vec::<String>::new(),
2059 );
2060 });
2061 project.update(cx, |project, cx| {
2062 assert_eq!(
2063 project.diagnostic_summary(false, cx),
2064 DiagnosticSummary {
2065 error_count: 0,
2066 warning_count: 0,
2067 }
2068 );
2069 });
2070}
2071
2072#[gpui::test]
2073async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2074 init_test(cx);
2075
2076 let fs = FakeFs::new(cx.executor());
2077 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2078
2079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2080 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2081
2082 language_registry.add(rust_lang());
2083 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2084
2085 let (buffer, _handle) = project
2086 .update(cx, |project, cx| {
2087 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2088 })
2089 .await
2090 .unwrap();
2091
2092 // Before restarting the server, report diagnostics with an unknown buffer version.
2093 let fake_server = fake_servers.next().await.unwrap();
2094 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2095 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2096 version: Some(10000),
2097 diagnostics: Vec::new(),
2098 });
2099 cx.executor().run_until_parked();
2100 project.update(cx, |project, cx| {
2101 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2102 });
2103
2104 let mut fake_server = fake_servers.next().await.unwrap();
2105 let notification = fake_server
2106 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2107 .await
2108 .text_document;
2109 assert_eq!(notification.version, 0);
2110}
2111
2112#[gpui::test]
2113async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2114 init_test(cx);
2115
2116 let progress_token = "the-progress-token";
2117
2118 let fs = FakeFs::new(cx.executor());
2119 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2120
2121 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2122
2123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2124 language_registry.add(rust_lang());
2125 let mut fake_servers = language_registry.register_fake_lsp(
2126 "Rust",
2127 FakeLspAdapter {
2128 name: "the-language-server",
2129 disk_based_diagnostics_sources: vec!["disk".into()],
2130 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2131 ..Default::default()
2132 },
2133 );
2134
2135 let (buffer, _handle) = project
2136 .update(cx, |project, cx| {
2137 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2138 })
2139 .await
2140 .unwrap();
2141
2142 // Simulate diagnostics starting to update.
2143 let mut fake_server = fake_servers.next().await.unwrap();
2144 fake_server
2145 .start_progress_with(
2146 "another-token",
2147 lsp::WorkDoneProgressBegin {
2148 cancellable: Some(false),
2149 ..Default::default()
2150 },
2151 )
2152 .await;
2153 fake_server
2154 .start_progress_with(
2155 progress_token,
2156 lsp::WorkDoneProgressBegin {
2157 cancellable: Some(true),
2158 ..Default::default()
2159 },
2160 )
2161 .await;
2162 cx.executor().run_until_parked();
2163
2164 project.update(cx, |project, cx| {
2165 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2166 });
2167
2168 let cancel_notification = fake_server
2169 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2170 .await;
2171 assert_eq!(
2172 cancel_notification.token,
2173 NumberOrString::String(progress_token.into())
2174 );
2175}
2176
2177#[gpui::test]
2178async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2179 init_test(cx);
2180
2181 let fs = FakeFs::new(cx.executor());
2182 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2183 .await;
2184
2185 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2186 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2187
2188 let mut fake_rust_servers = language_registry.register_fake_lsp(
2189 "Rust",
2190 FakeLspAdapter {
2191 name: "rust-lsp",
2192 ..Default::default()
2193 },
2194 );
2195 let mut fake_js_servers = language_registry.register_fake_lsp(
2196 "JavaScript",
2197 FakeLspAdapter {
2198 name: "js-lsp",
2199 ..Default::default()
2200 },
2201 );
2202 language_registry.add(rust_lang());
2203 language_registry.add(js_lang());
2204
2205 let _rs_buffer = project
2206 .update(cx, |project, cx| {
2207 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2208 })
2209 .await
2210 .unwrap();
2211 let _js_buffer = project
2212 .update(cx, |project, cx| {
2213 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2214 })
2215 .await
2216 .unwrap();
2217
2218 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2219 assert_eq!(
2220 fake_rust_server_1
2221 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2222 .await
2223 .text_document
2224 .uri
2225 .as_str(),
2226 uri!("file:///dir/a.rs")
2227 );
2228
2229 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2230 assert_eq!(
2231 fake_js_server
2232 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2233 .await
2234 .text_document
2235 .uri
2236 .as_str(),
2237 uri!("file:///dir/b.js")
2238 );
2239
2240 // Disable Rust language server, ensuring only that server gets stopped.
2241 cx.update(|cx| {
2242 SettingsStore::update_global(cx, |settings, cx| {
2243 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2244 settings.languages.0.insert(
2245 "Rust".into(),
2246 LanguageSettingsContent {
2247 enable_language_server: Some(false),
2248 ..Default::default()
2249 },
2250 );
2251 });
2252 })
2253 });
2254 fake_rust_server_1
2255 .receive_notification::<lsp::notification::Exit>()
2256 .await;
2257
2258 // Enable Rust and disable JavaScript language servers, ensuring that the
2259 // former gets started again and that the latter stops.
2260 cx.update(|cx| {
2261 SettingsStore::update_global(cx, |settings, cx| {
2262 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2263 settings.languages.0.insert(
2264 LanguageName::new("Rust"),
2265 LanguageSettingsContent {
2266 enable_language_server: Some(true),
2267 ..Default::default()
2268 },
2269 );
2270 settings.languages.0.insert(
2271 LanguageName::new("JavaScript"),
2272 LanguageSettingsContent {
2273 enable_language_server: Some(false),
2274 ..Default::default()
2275 },
2276 );
2277 });
2278 })
2279 });
2280 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2281 assert_eq!(
2282 fake_rust_server_2
2283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2284 .await
2285 .text_document
2286 .uri
2287 .as_str(),
2288 uri!("file:///dir/a.rs")
2289 );
2290 fake_js_server
2291 .receive_notification::<lsp::notification::Exit>()
2292 .await;
2293}
2294
2295#[gpui::test(iterations = 3)]
2296async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2297 init_test(cx);
2298
2299 let text = "
2300 fn a() { A }
2301 fn b() { BB }
2302 fn c() { CCC }
2303 "
2304 .unindent();
2305
2306 let fs = FakeFs::new(cx.executor());
2307 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2308
2309 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2310 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2311
2312 language_registry.add(rust_lang());
2313 let mut fake_servers = language_registry.register_fake_lsp(
2314 "Rust",
2315 FakeLspAdapter {
2316 disk_based_diagnostics_sources: vec!["disk".into()],
2317 ..Default::default()
2318 },
2319 );
2320
2321 let buffer = project
2322 .update(cx, |project, cx| {
2323 project.open_local_buffer(path!("/dir/a.rs"), cx)
2324 })
2325 .await
2326 .unwrap();
2327
2328 let _handle = project.update(cx, |project, cx| {
2329 project.register_buffer_with_language_servers(&buffer, cx)
2330 });
2331
2332 let mut fake_server = fake_servers.next().await.unwrap();
2333 let open_notification = fake_server
2334 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2335 .await;
2336
2337 // Edit the buffer, moving the content down
2338 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2339 let change_notification_1 = fake_server
2340 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2341 .await;
2342 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2343
2344 // Report some diagnostics for the initial version of the buffer
2345 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2346 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2347 version: Some(open_notification.text_document.version),
2348 diagnostics: vec![
2349 lsp::Diagnostic {
2350 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2351 severity: Some(DiagnosticSeverity::ERROR),
2352 message: "undefined variable 'A'".to_string(),
2353 source: Some("disk".to_string()),
2354 ..Default::default()
2355 },
2356 lsp::Diagnostic {
2357 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2358 severity: Some(DiagnosticSeverity::ERROR),
2359 message: "undefined variable 'BB'".to_string(),
2360 source: Some("disk".to_string()),
2361 ..Default::default()
2362 },
2363 lsp::Diagnostic {
2364 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2365 severity: Some(DiagnosticSeverity::ERROR),
2366 source: Some("disk".to_string()),
2367 message: "undefined variable 'CCC'".to_string(),
2368 ..Default::default()
2369 },
2370 ],
2371 });
2372
2373 // The diagnostics have moved down since they were created.
2374 cx.executor().run_until_parked();
2375 buffer.update(cx, |buffer, _| {
2376 assert_eq!(
2377 buffer
2378 .snapshot()
2379 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2380 .collect::<Vec<_>>(),
2381 &[
2382 DiagnosticEntry {
2383 range: Point::new(3, 9)..Point::new(3, 11),
2384 diagnostic: Diagnostic {
2385 source: Some("disk".into()),
2386 severity: DiagnosticSeverity::ERROR,
2387 message: "undefined variable 'BB'".to_string(),
2388 is_disk_based: true,
2389 group_id: 1,
2390 is_primary: true,
2391 source_kind: DiagnosticSourceKind::Pushed,
2392 ..Diagnostic::default()
2393 },
2394 },
2395 DiagnosticEntry {
2396 range: Point::new(4, 9)..Point::new(4, 12),
2397 diagnostic: Diagnostic {
2398 source: Some("disk".into()),
2399 severity: DiagnosticSeverity::ERROR,
2400 message: "undefined variable 'CCC'".to_string(),
2401 is_disk_based: true,
2402 group_id: 2,
2403 is_primary: true,
2404 source_kind: DiagnosticSourceKind::Pushed,
2405 ..Diagnostic::default()
2406 }
2407 }
2408 ]
2409 );
2410 assert_eq!(
2411 chunks_with_diagnostics(buffer, 0..buffer.len()),
2412 [
2413 ("\n\nfn a() { ".to_string(), None),
2414 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2415 (" }\nfn b() { ".to_string(), None),
2416 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2417 (" }\nfn c() { ".to_string(), None),
2418 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2419 (" }\n".to_string(), None),
2420 ]
2421 );
2422 assert_eq!(
2423 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2424 [
2425 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2426 (" }\nfn c() { ".to_string(), None),
2427 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2428 ]
2429 );
2430 });
2431
2432 // Ensure overlapping diagnostics are highlighted correctly.
2433 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2434 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2435 version: Some(open_notification.text_document.version),
2436 diagnostics: vec![
2437 lsp::Diagnostic {
2438 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2439 severity: Some(DiagnosticSeverity::ERROR),
2440 message: "undefined variable 'A'".to_string(),
2441 source: Some("disk".to_string()),
2442 ..Default::default()
2443 },
2444 lsp::Diagnostic {
2445 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2446 severity: Some(DiagnosticSeverity::WARNING),
2447 message: "unreachable statement".to_string(),
2448 source: Some("disk".to_string()),
2449 ..Default::default()
2450 },
2451 ],
2452 });
2453
2454 cx.executor().run_until_parked();
2455 buffer.update(cx, |buffer, _| {
2456 assert_eq!(
2457 buffer
2458 .snapshot()
2459 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2460 .collect::<Vec<_>>(),
2461 &[
2462 DiagnosticEntry {
2463 range: Point::new(2, 9)..Point::new(2, 12),
2464 diagnostic: Diagnostic {
2465 source: Some("disk".into()),
2466 severity: DiagnosticSeverity::WARNING,
2467 message: "unreachable statement".to_string(),
2468 is_disk_based: true,
2469 group_id: 4,
2470 is_primary: true,
2471 source_kind: DiagnosticSourceKind::Pushed,
2472 ..Diagnostic::default()
2473 }
2474 },
2475 DiagnosticEntry {
2476 range: Point::new(2, 9)..Point::new(2, 10),
2477 diagnostic: Diagnostic {
2478 source: Some("disk".into()),
2479 severity: DiagnosticSeverity::ERROR,
2480 message: "undefined variable 'A'".to_string(),
2481 is_disk_based: true,
2482 group_id: 3,
2483 is_primary: true,
2484 source_kind: DiagnosticSourceKind::Pushed,
2485 ..Diagnostic::default()
2486 },
2487 }
2488 ]
2489 );
2490 assert_eq!(
2491 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2492 [
2493 ("fn a() { ".to_string(), None),
2494 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2495 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2496 ("\n".to_string(), None),
2497 ]
2498 );
2499 assert_eq!(
2500 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2501 [
2502 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2503 ("\n".to_string(), None),
2504 ]
2505 );
2506 });
2507
2508 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2509 // changes since the last save.
2510 buffer.update(cx, |buffer, cx| {
2511 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2512 buffer.edit(
2513 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2514 None,
2515 cx,
2516 );
2517 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2518 });
2519 let change_notification_2 = fake_server
2520 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2521 .await;
2522 assert!(
2523 change_notification_2.text_document.version > change_notification_1.text_document.version
2524 );
2525
2526 // Handle out-of-order diagnostics
2527 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2528 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2529 version: Some(change_notification_2.text_document.version),
2530 diagnostics: vec![
2531 lsp::Diagnostic {
2532 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2533 severity: Some(DiagnosticSeverity::ERROR),
2534 message: "undefined variable 'BB'".to_string(),
2535 source: Some("disk".to_string()),
2536 ..Default::default()
2537 },
2538 lsp::Diagnostic {
2539 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2540 severity: Some(DiagnosticSeverity::WARNING),
2541 message: "undefined variable 'A'".to_string(),
2542 source: Some("disk".to_string()),
2543 ..Default::default()
2544 },
2545 ],
2546 });
2547
2548 cx.executor().run_until_parked();
2549 buffer.update(cx, |buffer, _| {
2550 assert_eq!(
2551 buffer
2552 .snapshot()
2553 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2554 .collect::<Vec<_>>(),
2555 &[
2556 DiagnosticEntry {
2557 range: Point::new(2, 21)..Point::new(2, 22),
2558 diagnostic: Diagnostic {
2559 source: Some("disk".into()),
2560 severity: DiagnosticSeverity::WARNING,
2561 message: "undefined variable 'A'".to_string(),
2562 is_disk_based: true,
2563 group_id: 6,
2564 is_primary: true,
2565 source_kind: DiagnosticSourceKind::Pushed,
2566 ..Diagnostic::default()
2567 }
2568 },
2569 DiagnosticEntry {
2570 range: Point::new(3, 9)..Point::new(3, 14),
2571 diagnostic: Diagnostic {
2572 source: Some("disk".into()),
2573 severity: DiagnosticSeverity::ERROR,
2574 message: "undefined variable 'BB'".to_string(),
2575 is_disk_based: true,
2576 group_id: 5,
2577 is_primary: true,
2578 source_kind: DiagnosticSourceKind::Pushed,
2579 ..Diagnostic::default()
2580 },
2581 }
2582 ]
2583 );
2584 });
2585}
2586
2587#[gpui::test]
2588async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2589 init_test(cx);
2590
2591 let text = concat!(
2592 "let one = ;\n", //
2593 "let two = \n",
2594 "let three = 3;\n",
2595 );
2596
2597 let fs = FakeFs::new(cx.executor());
2598 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2599
2600 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2601 let buffer = project
2602 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2603 .await
2604 .unwrap();
2605
2606 project.update(cx, |project, cx| {
2607 project.lsp_store.update(cx, |lsp_store, cx| {
2608 lsp_store
2609 .update_diagnostic_entries(
2610 LanguageServerId(0),
2611 PathBuf::from("/dir/a.rs"),
2612 None,
2613 None,
2614 vec![
2615 DiagnosticEntry {
2616 range: Unclipped(PointUtf16::new(0, 10))
2617 ..Unclipped(PointUtf16::new(0, 10)),
2618 diagnostic: Diagnostic {
2619 severity: DiagnosticSeverity::ERROR,
2620 message: "syntax error 1".to_string(),
2621 source_kind: DiagnosticSourceKind::Pushed,
2622 ..Diagnostic::default()
2623 },
2624 },
2625 DiagnosticEntry {
2626 range: Unclipped(PointUtf16::new(1, 10))
2627 ..Unclipped(PointUtf16::new(1, 10)),
2628 diagnostic: Diagnostic {
2629 severity: DiagnosticSeverity::ERROR,
2630 message: "syntax error 2".to_string(),
2631 source_kind: DiagnosticSourceKind::Pushed,
2632 ..Diagnostic::default()
2633 },
2634 },
2635 ],
2636 cx,
2637 )
2638 .unwrap();
2639 })
2640 });
2641
2642 // An empty range is extended forward to include the following character.
2643 // At the end of a line, an empty range is extended backward to include
2644 // the preceding character.
2645 buffer.update(cx, |buffer, _| {
2646 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2647 assert_eq!(
2648 chunks
2649 .iter()
2650 .map(|(s, d)| (s.as_str(), *d))
2651 .collect::<Vec<_>>(),
2652 &[
2653 ("let one = ", None),
2654 (";", Some(DiagnosticSeverity::ERROR)),
2655 ("\nlet two =", None),
2656 (" ", Some(DiagnosticSeverity::ERROR)),
2657 ("\nlet three = 3;\n", None)
2658 ]
2659 );
2660 });
2661}
2662
2663#[gpui::test]
2664async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2665 init_test(cx);
2666
2667 let fs = FakeFs::new(cx.executor());
2668 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2669 .await;
2670
2671 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2672 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2673
2674 lsp_store.update(cx, |lsp_store, cx| {
2675 lsp_store
2676 .update_diagnostic_entries(
2677 LanguageServerId(0),
2678 Path::new("/dir/a.rs").to_owned(),
2679 None,
2680 None,
2681 vec![DiagnosticEntry {
2682 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2683 diagnostic: Diagnostic {
2684 severity: DiagnosticSeverity::ERROR,
2685 is_primary: true,
2686 message: "syntax error a1".to_string(),
2687 source_kind: DiagnosticSourceKind::Pushed,
2688 ..Diagnostic::default()
2689 },
2690 }],
2691 cx,
2692 )
2693 .unwrap();
2694 lsp_store
2695 .update_diagnostic_entries(
2696 LanguageServerId(1),
2697 Path::new("/dir/a.rs").to_owned(),
2698 None,
2699 None,
2700 vec![DiagnosticEntry {
2701 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2702 diagnostic: Diagnostic {
2703 severity: DiagnosticSeverity::ERROR,
2704 is_primary: true,
2705 message: "syntax error b1".to_string(),
2706 source_kind: DiagnosticSourceKind::Pushed,
2707 ..Diagnostic::default()
2708 },
2709 }],
2710 cx,
2711 )
2712 .unwrap();
2713
2714 assert_eq!(
2715 lsp_store.diagnostic_summary(false, cx),
2716 DiagnosticSummary {
2717 error_count: 2,
2718 warning_count: 0,
2719 }
2720 );
2721 });
2722}
2723
2724#[gpui::test]
2725async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2726 init_test(cx);
2727
2728 let text = "
2729 fn a() {
2730 f1();
2731 }
2732 fn b() {
2733 f2();
2734 }
2735 fn c() {
2736 f3();
2737 }
2738 "
2739 .unindent();
2740
2741 let fs = FakeFs::new(cx.executor());
2742 fs.insert_tree(
2743 path!("/dir"),
2744 json!({
2745 "a.rs": text.clone(),
2746 }),
2747 )
2748 .await;
2749
2750 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2751 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2752
2753 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2754 language_registry.add(rust_lang());
2755 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2756
2757 let (buffer, _handle) = project
2758 .update(cx, |project, cx| {
2759 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2760 })
2761 .await
2762 .unwrap();
2763
2764 let mut fake_server = fake_servers.next().await.unwrap();
2765 let lsp_document_version = fake_server
2766 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2767 .await
2768 .text_document
2769 .version;
2770
2771 // Simulate editing the buffer after the language server computes some edits.
2772 buffer.update(cx, |buffer, cx| {
2773 buffer.edit(
2774 [(
2775 Point::new(0, 0)..Point::new(0, 0),
2776 "// above first function\n",
2777 )],
2778 None,
2779 cx,
2780 );
2781 buffer.edit(
2782 [(
2783 Point::new(2, 0)..Point::new(2, 0),
2784 " // inside first function\n",
2785 )],
2786 None,
2787 cx,
2788 );
2789 buffer.edit(
2790 [(
2791 Point::new(6, 4)..Point::new(6, 4),
2792 "// inside second function ",
2793 )],
2794 None,
2795 cx,
2796 );
2797
2798 assert_eq!(
2799 buffer.text(),
2800 "
2801 // above first function
2802 fn a() {
2803 // inside first function
2804 f1();
2805 }
2806 fn b() {
2807 // inside second function f2();
2808 }
2809 fn c() {
2810 f3();
2811 }
2812 "
2813 .unindent()
2814 );
2815 });
2816
2817 let edits = lsp_store
2818 .update(cx, |lsp_store, cx| {
2819 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2820 &buffer,
2821 vec![
2822 // replace body of first function
2823 lsp::TextEdit {
2824 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2825 new_text: "
2826 fn a() {
2827 f10();
2828 }
2829 "
2830 .unindent(),
2831 },
2832 // edit inside second function
2833 lsp::TextEdit {
2834 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2835 new_text: "00".into(),
2836 },
2837 // edit inside third function via two distinct edits
2838 lsp::TextEdit {
2839 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2840 new_text: "4000".into(),
2841 },
2842 lsp::TextEdit {
2843 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2844 new_text: "".into(),
2845 },
2846 ],
2847 LanguageServerId(0),
2848 Some(lsp_document_version),
2849 cx,
2850 )
2851 })
2852 .await
2853 .unwrap();
2854
2855 buffer.update(cx, |buffer, cx| {
2856 for (range, new_text) in edits {
2857 buffer.edit([(range, new_text)], None, cx);
2858 }
2859 assert_eq!(
2860 buffer.text(),
2861 "
2862 // above first function
2863 fn a() {
2864 // inside first function
2865 f10();
2866 }
2867 fn b() {
2868 // inside second function f200();
2869 }
2870 fn c() {
2871 f4000();
2872 }
2873 "
2874 .unindent()
2875 );
2876 });
2877}
2878
2879#[gpui::test]
2880async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2881 init_test(cx);
2882
2883 let text = "
2884 use a::b;
2885 use a::c;
2886
2887 fn f() {
2888 b();
2889 c();
2890 }
2891 "
2892 .unindent();
2893
2894 let fs = FakeFs::new(cx.executor());
2895 fs.insert_tree(
2896 path!("/dir"),
2897 json!({
2898 "a.rs": text.clone(),
2899 }),
2900 )
2901 .await;
2902
2903 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2904 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2905 let buffer = project
2906 .update(cx, |project, cx| {
2907 project.open_local_buffer(path!("/dir/a.rs"), cx)
2908 })
2909 .await
2910 .unwrap();
2911
2912 // Simulate the language server sending us a small edit in the form of a very large diff.
2913 // Rust-analyzer does this when performing a merge-imports code action.
2914 let edits = lsp_store
2915 .update(cx, |lsp_store, cx| {
2916 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2917 &buffer,
2918 [
2919 // Replace the first use statement without editing the semicolon.
2920 lsp::TextEdit {
2921 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2922 new_text: "a::{b, c}".into(),
2923 },
2924 // Reinsert the remainder of the file between the semicolon and the final
2925 // newline of the file.
2926 lsp::TextEdit {
2927 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2928 new_text: "\n\n".into(),
2929 },
2930 lsp::TextEdit {
2931 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2932 new_text: "
2933 fn f() {
2934 b();
2935 c();
2936 }"
2937 .unindent(),
2938 },
2939 // Delete everything after the first newline of the file.
2940 lsp::TextEdit {
2941 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2942 new_text: "".into(),
2943 },
2944 ],
2945 LanguageServerId(0),
2946 None,
2947 cx,
2948 )
2949 })
2950 .await
2951 .unwrap();
2952
2953 buffer.update(cx, |buffer, cx| {
2954 let edits = edits
2955 .into_iter()
2956 .map(|(range, text)| {
2957 (
2958 range.start.to_point(buffer)..range.end.to_point(buffer),
2959 text,
2960 )
2961 })
2962 .collect::<Vec<_>>();
2963
2964 assert_eq!(
2965 edits,
2966 [
2967 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2968 (Point::new(1, 0)..Point::new(2, 0), "".into())
2969 ]
2970 );
2971
2972 for (range, new_text) in edits {
2973 buffer.edit([(range, new_text)], None, cx);
2974 }
2975 assert_eq!(
2976 buffer.text(),
2977 "
2978 use a::{b, c};
2979
2980 fn f() {
2981 b();
2982 c();
2983 }
2984 "
2985 .unindent()
2986 );
2987 });
2988}
2989
2990#[gpui::test]
2991async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2992 cx: &mut gpui::TestAppContext,
2993) {
2994 init_test(cx);
2995
2996 let text = "Path()";
2997
2998 let fs = FakeFs::new(cx.executor());
2999 fs.insert_tree(
3000 path!("/dir"),
3001 json!({
3002 "a.rs": text
3003 }),
3004 )
3005 .await;
3006
3007 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3008 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3009 let buffer = project
3010 .update(cx, |project, cx| {
3011 project.open_local_buffer(path!("/dir/a.rs"), cx)
3012 })
3013 .await
3014 .unwrap();
3015
3016 // Simulate the language server sending us a pair of edits at the same location,
3017 // with an insertion following a replacement (which violates the LSP spec).
3018 let edits = lsp_store
3019 .update(cx, |lsp_store, cx| {
3020 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3021 &buffer,
3022 [
3023 lsp::TextEdit {
3024 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3025 new_text: "Path".into(),
3026 },
3027 lsp::TextEdit {
3028 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3029 new_text: "from path import Path\n\n\n".into(),
3030 },
3031 ],
3032 LanguageServerId(0),
3033 None,
3034 cx,
3035 )
3036 })
3037 .await
3038 .unwrap();
3039
3040 buffer.update(cx, |buffer, cx| {
3041 buffer.edit(edits, None, cx);
3042 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3043 });
3044}
3045
3046#[gpui::test]
3047async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3048 init_test(cx);
3049
3050 let text = "
3051 use a::b;
3052 use a::c;
3053
3054 fn f() {
3055 b();
3056 c();
3057 }
3058 "
3059 .unindent();
3060
3061 let fs = FakeFs::new(cx.executor());
3062 fs.insert_tree(
3063 path!("/dir"),
3064 json!({
3065 "a.rs": text.clone(),
3066 }),
3067 )
3068 .await;
3069
3070 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3071 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3072 let buffer = project
3073 .update(cx, |project, cx| {
3074 project.open_local_buffer(path!("/dir/a.rs"), cx)
3075 })
3076 .await
3077 .unwrap();
3078
3079 // Simulate the language server sending us edits in a non-ordered fashion,
3080 // with ranges sometimes being inverted or pointing to invalid locations.
3081 let edits = lsp_store
3082 .update(cx, |lsp_store, cx| {
3083 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3084 &buffer,
3085 [
3086 lsp::TextEdit {
3087 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3088 new_text: "\n\n".into(),
3089 },
3090 lsp::TextEdit {
3091 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3092 new_text: "a::{b, c}".into(),
3093 },
3094 lsp::TextEdit {
3095 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3096 new_text: "".into(),
3097 },
3098 lsp::TextEdit {
3099 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3100 new_text: "
3101 fn f() {
3102 b();
3103 c();
3104 }"
3105 .unindent(),
3106 },
3107 ],
3108 LanguageServerId(0),
3109 None,
3110 cx,
3111 )
3112 })
3113 .await
3114 .unwrap();
3115
3116 buffer.update(cx, |buffer, cx| {
3117 let edits = edits
3118 .into_iter()
3119 .map(|(range, text)| {
3120 (
3121 range.start.to_point(buffer)..range.end.to_point(buffer),
3122 text,
3123 )
3124 })
3125 .collect::<Vec<_>>();
3126
3127 assert_eq!(
3128 edits,
3129 [
3130 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3131 (Point::new(1, 0)..Point::new(2, 0), "".into())
3132 ]
3133 );
3134
3135 for (range, new_text) in edits {
3136 buffer.edit([(range, new_text)], None, cx);
3137 }
3138 assert_eq!(
3139 buffer.text(),
3140 "
3141 use a::{b, c};
3142
3143 fn f() {
3144 b();
3145 c();
3146 }
3147 "
3148 .unindent()
3149 );
3150 });
3151}
3152
3153fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3154 buffer: &Buffer,
3155 range: Range<T>,
3156) -> Vec<(String, Option<DiagnosticSeverity>)> {
3157 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3158 for chunk in buffer.snapshot().chunks(range, true) {
3159 if chunks
3160 .last()
3161 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3162 {
3163 chunks.last_mut().unwrap().0.push_str(chunk.text);
3164 } else {
3165 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3166 }
3167 }
3168 chunks
3169}
3170
3171#[gpui::test(iterations = 10)]
3172async fn test_definition(cx: &mut gpui::TestAppContext) {
3173 init_test(cx);
3174
3175 let fs = FakeFs::new(cx.executor());
3176 fs.insert_tree(
3177 path!("/dir"),
3178 json!({
3179 "a.rs": "const fn a() { A }",
3180 "b.rs": "const y: i32 = crate::a()",
3181 }),
3182 )
3183 .await;
3184
3185 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3186
3187 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3188 language_registry.add(rust_lang());
3189 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3190
3191 let (buffer, _handle) = project
3192 .update(cx, |project, cx| {
3193 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3194 })
3195 .await
3196 .unwrap();
3197
3198 let fake_server = fake_servers.next().await.unwrap();
3199 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3200 let params = params.text_document_position_params;
3201 assert_eq!(
3202 params.text_document.uri.to_file_path().unwrap(),
3203 Path::new(path!("/dir/b.rs")),
3204 );
3205 assert_eq!(params.position, lsp::Position::new(0, 22));
3206
3207 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3208 lsp::Location::new(
3209 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
3210 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3211 ),
3212 )))
3213 });
3214 let mut definitions = project
3215 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3216 .await
3217 .unwrap()
3218 .unwrap();
3219
3220 // Assert no new language server started
3221 cx.executor().run_until_parked();
3222 assert!(fake_servers.try_next().is_err());
3223
3224 assert_eq!(definitions.len(), 1);
3225 let definition = definitions.pop().unwrap();
3226 cx.update(|cx| {
3227 let target_buffer = definition.target.buffer.read(cx);
3228 assert_eq!(
3229 target_buffer
3230 .file()
3231 .unwrap()
3232 .as_local()
3233 .unwrap()
3234 .abs_path(cx),
3235 Path::new(path!("/dir/a.rs")),
3236 );
3237 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3238 assert_eq!(
3239 list_worktrees(&project, cx),
3240 [
3241 (path!("/dir/a.rs").as_ref(), false),
3242 (path!("/dir/b.rs").as_ref(), true)
3243 ],
3244 );
3245
3246 drop(definition);
3247 });
3248 cx.update(|cx| {
3249 assert_eq!(
3250 list_worktrees(&project, cx),
3251 [(path!("/dir/b.rs").as_ref(), true)]
3252 );
3253 });
3254
3255 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3256 project
3257 .read(cx)
3258 .worktrees(cx)
3259 .map(|worktree| {
3260 let worktree = worktree.read(cx);
3261 (
3262 worktree.as_local().unwrap().abs_path().as_ref(),
3263 worktree.is_visible(),
3264 )
3265 })
3266 .collect::<Vec<_>>()
3267 }
3268}
3269
3270#[gpui::test]
3271async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3272 init_test(cx);
3273
3274 let fs = FakeFs::new(cx.executor());
3275 fs.insert_tree(
3276 path!("/dir"),
3277 json!({
3278 "a.ts": "",
3279 }),
3280 )
3281 .await;
3282
3283 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3284
3285 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3286 language_registry.add(typescript_lang());
3287 let mut fake_language_servers = language_registry.register_fake_lsp(
3288 "TypeScript",
3289 FakeLspAdapter {
3290 capabilities: lsp::ServerCapabilities {
3291 completion_provider: Some(lsp::CompletionOptions {
3292 trigger_characters: Some(vec![".".to_string()]),
3293 ..Default::default()
3294 }),
3295 ..Default::default()
3296 },
3297 ..Default::default()
3298 },
3299 );
3300
3301 let (buffer, _handle) = project
3302 .update(cx, |p, cx| {
3303 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3304 })
3305 .await
3306 .unwrap();
3307
3308 let fake_server = fake_language_servers.next().await.unwrap();
3309
3310 // When text_edit exists, it takes precedence over insert_text and label
3311 let text = "let a = obj.fqn";
3312 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3313 let completions = project.update(cx, |project, cx| {
3314 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3315 });
3316
3317 fake_server
3318 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3319 Ok(Some(lsp::CompletionResponse::Array(vec![
3320 lsp::CompletionItem {
3321 label: "labelText".into(),
3322 insert_text: Some("insertText".into()),
3323 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3324 range: lsp::Range::new(
3325 lsp::Position::new(0, text.len() as u32 - 3),
3326 lsp::Position::new(0, text.len() as u32),
3327 ),
3328 new_text: "textEditText".into(),
3329 })),
3330 ..Default::default()
3331 },
3332 ])))
3333 })
3334 .next()
3335 .await;
3336
3337 let completions = completions
3338 .await
3339 .unwrap()
3340 .into_iter()
3341 .flat_map(|response| response.completions)
3342 .collect::<Vec<_>>();
3343 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3344
3345 assert_eq!(completions.len(), 1);
3346 assert_eq!(completions[0].new_text, "textEditText");
3347 assert_eq!(
3348 completions[0].replace_range.to_offset(&snapshot),
3349 text.len() - 3..text.len()
3350 );
3351}
3352
3353#[gpui::test]
3354async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3355 init_test(cx);
3356
3357 let fs = FakeFs::new(cx.executor());
3358 fs.insert_tree(
3359 path!("/dir"),
3360 json!({
3361 "a.ts": "",
3362 }),
3363 )
3364 .await;
3365
3366 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3367
3368 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3369 language_registry.add(typescript_lang());
3370 let mut fake_language_servers = language_registry.register_fake_lsp(
3371 "TypeScript",
3372 FakeLspAdapter {
3373 capabilities: lsp::ServerCapabilities {
3374 completion_provider: Some(lsp::CompletionOptions {
3375 trigger_characters: Some(vec![".".to_string()]),
3376 ..Default::default()
3377 }),
3378 ..Default::default()
3379 },
3380 ..Default::default()
3381 },
3382 );
3383
3384 let (buffer, _handle) = project
3385 .update(cx, |p, cx| {
3386 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3387 })
3388 .await
3389 .unwrap();
3390
3391 let fake_server = fake_language_servers.next().await.unwrap();
3392 let text = "let a = obj.fqn";
3393
3394 // Test 1: When text_edit is None but insert_text exists with default edit_range
3395 {
3396 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3397 let completions = project.update(cx, |project, cx| {
3398 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3399 });
3400
3401 fake_server
3402 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3403 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3404 is_incomplete: false,
3405 item_defaults: Some(lsp::CompletionListItemDefaults {
3406 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3407 lsp::Range::new(
3408 lsp::Position::new(0, text.len() as u32 - 3),
3409 lsp::Position::new(0, text.len() as u32),
3410 ),
3411 )),
3412 ..Default::default()
3413 }),
3414 items: vec![lsp::CompletionItem {
3415 label: "labelText".into(),
3416 insert_text: Some("insertText".into()),
3417 text_edit: None,
3418 ..Default::default()
3419 }],
3420 })))
3421 })
3422 .next()
3423 .await;
3424
3425 let completions = completions
3426 .await
3427 .unwrap()
3428 .into_iter()
3429 .flat_map(|response| response.completions)
3430 .collect::<Vec<_>>();
3431 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3432
3433 assert_eq!(completions.len(), 1);
3434 assert_eq!(completions[0].new_text, "insertText");
3435 assert_eq!(
3436 completions[0].replace_range.to_offset(&snapshot),
3437 text.len() - 3..text.len()
3438 );
3439 }
3440
3441 // Test 2: When both text_edit and insert_text are None with default edit_range
3442 {
3443 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3444 let completions = project.update(cx, |project, cx| {
3445 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3446 });
3447
3448 fake_server
3449 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3450 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3451 is_incomplete: false,
3452 item_defaults: Some(lsp::CompletionListItemDefaults {
3453 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3454 lsp::Range::new(
3455 lsp::Position::new(0, text.len() as u32 - 3),
3456 lsp::Position::new(0, text.len() as u32),
3457 ),
3458 )),
3459 ..Default::default()
3460 }),
3461 items: vec![lsp::CompletionItem {
3462 label: "labelText".into(),
3463 insert_text: None,
3464 text_edit: None,
3465 ..Default::default()
3466 }],
3467 })))
3468 })
3469 .next()
3470 .await;
3471
3472 let completions = completions
3473 .await
3474 .unwrap()
3475 .into_iter()
3476 .flat_map(|response| response.completions)
3477 .collect::<Vec<_>>();
3478 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3479
3480 assert_eq!(completions.len(), 1);
3481 assert_eq!(completions[0].new_text, "labelText");
3482 assert_eq!(
3483 completions[0].replace_range.to_offset(&snapshot),
3484 text.len() - 3..text.len()
3485 );
3486 }
3487}
3488
3489#[gpui::test]
3490async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3491 init_test(cx);
3492
3493 let fs = FakeFs::new(cx.executor());
3494 fs.insert_tree(
3495 path!("/dir"),
3496 json!({
3497 "a.ts": "",
3498 }),
3499 )
3500 .await;
3501
3502 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3503
3504 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3505 language_registry.add(typescript_lang());
3506 let mut fake_language_servers = language_registry.register_fake_lsp(
3507 "TypeScript",
3508 FakeLspAdapter {
3509 capabilities: lsp::ServerCapabilities {
3510 completion_provider: Some(lsp::CompletionOptions {
3511 trigger_characters: Some(vec![":".to_string()]),
3512 ..Default::default()
3513 }),
3514 ..Default::default()
3515 },
3516 ..Default::default()
3517 },
3518 );
3519
3520 let (buffer, _handle) = project
3521 .update(cx, |p, cx| {
3522 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3523 })
3524 .await
3525 .unwrap();
3526
3527 let fake_server = fake_language_servers.next().await.unwrap();
3528
3529 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3530 let text = "let a = b.fqn";
3531 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3532 let completions = project.update(cx, |project, cx| {
3533 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3534 });
3535
3536 fake_server
3537 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3538 Ok(Some(lsp::CompletionResponse::Array(vec![
3539 lsp::CompletionItem {
3540 label: "fullyQualifiedName?".into(),
3541 insert_text: Some("fullyQualifiedName".into()),
3542 ..Default::default()
3543 },
3544 ])))
3545 })
3546 .next()
3547 .await;
3548 let completions = completions
3549 .await
3550 .unwrap()
3551 .into_iter()
3552 .flat_map(|response| response.completions)
3553 .collect::<Vec<_>>();
3554 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3555 assert_eq!(completions.len(), 1);
3556 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3557 assert_eq!(
3558 completions[0].replace_range.to_offset(&snapshot),
3559 text.len() - 3..text.len()
3560 );
3561
3562 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3563 let text = "let a = \"atoms/cmp\"";
3564 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3565 let completions = project.update(cx, |project, cx| {
3566 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3567 });
3568
3569 fake_server
3570 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3571 Ok(Some(lsp::CompletionResponse::Array(vec![
3572 lsp::CompletionItem {
3573 label: "component".into(),
3574 ..Default::default()
3575 },
3576 ])))
3577 })
3578 .next()
3579 .await;
3580 let completions = completions
3581 .await
3582 .unwrap()
3583 .into_iter()
3584 .flat_map(|response| response.completions)
3585 .collect::<Vec<_>>();
3586 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3587 assert_eq!(completions.len(), 1);
3588 assert_eq!(completions[0].new_text, "component");
3589 assert_eq!(
3590 completions[0].replace_range.to_offset(&snapshot),
3591 text.len() - 4..text.len() - 1
3592 );
3593}
3594
3595#[gpui::test]
3596async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3597 init_test(cx);
3598
3599 let fs = FakeFs::new(cx.executor());
3600 fs.insert_tree(
3601 path!("/dir"),
3602 json!({
3603 "a.ts": "",
3604 }),
3605 )
3606 .await;
3607
3608 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3609
3610 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3611 language_registry.add(typescript_lang());
3612 let mut fake_language_servers = language_registry.register_fake_lsp(
3613 "TypeScript",
3614 FakeLspAdapter {
3615 capabilities: lsp::ServerCapabilities {
3616 completion_provider: Some(lsp::CompletionOptions {
3617 trigger_characters: Some(vec![":".to_string()]),
3618 ..Default::default()
3619 }),
3620 ..Default::default()
3621 },
3622 ..Default::default()
3623 },
3624 );
3625
3626 let (buffer, _handle) = project
3627 .update(cx, |p, cx| {
3628 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3629 })
3630 .await
3631 .unwrap();
3632
3633 let fake_server = fake_language_servers.next().await.unwrap();
3634
3635 let text = "let a = b.fqn";
3636 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3637 let completions = project.update(cx, |project, cx| {
3638 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3639 });
3640
3641 fake_server
3642 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3643 Ok(Some(lsp::CompletionResponse::Array(vec![
3644 lsp::CompletionItem {
3645 label: "fullyQualifiedName?".into(),
3646 insert_text: Some("fully\rQualified\r\nName".into()),
3647 ..Default::default()
3648 },
3649 ])))
3650 })
3651 .next()
3652 .await;
3653 let completions = completions
3654 .await
3655 .unwrap()
3656 .into_iter()
3657 .flat_map(|response| response.completions)
3658 .collect::<Vec<_>>();
3659 assert_eq!(completions.len(), 1);
3660 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3661}
3662
3663#[gpui::test(iterations = 10)]
3664async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3665 init_test(cx);
3666
3667 let fs = FakeFs::new(cx.executor());
3668 fs.insert_tree(
3669 path!("/dir"),
3670 json!({
3671 "a.ts": "a",
3672 }),
3673 )
3674 .await;
3675
3676 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3677
3678 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3679 language_registry.add(typescript_lang());
3680 let mut fake_language_servers = language_registry.register_fake_lsp(
3681 "TypeScript",
3682 FakeLspAdapter {
3683 capabilities: lsp::ServerCapabilities {
3684 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3685 lsp::CodeActionOptions {
3686 resolve_provider: Some(true),
3687 ..lsp::CodeActionOptions::default()
3688 },
3689 )),
3690 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3691 commands: vec!["_the/command".to_string()],
3692 ..lsp::ExecuteCommandOptions::default()
3693 }),
3694 ..lsp::ServerCapabilities::default()
3695 },
3696 ..FakeLspAdapter::default()
3697 },
3698 );
3699
3700 let (buffer, _handle) = project
3701 .update(cx, |p, cx| {
3702 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3703 })
3704 .await
3705 .unwrap();
3706
3707 let fake_server = fake_language_servers.next().await.unwrap();
3708
3709 // Language server returns code actions that contain commands, and not edits.
3710 let actions = project.update(cx, |project, cx| {
3711 project.code_actions(&buffer, 0..0, None, cx)
3712 });
3713 fake_server
3714 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3715 Ok(Some(vec![
3716 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3717 title: "The code action".into(),
3718 data: Some(serde_json::json!({
3719 "command": "_the/command",
3720 })),
3721 ..lsp::CodeAction::default()
3722 }),
3723 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3724 title: "two".into(),
3725 ..lsp::CodeAction::default()
3726 }),
3727 ]))
3728 })
3729 .next()
3730 .await;
3731
3732 let action = actions.await.unwrap().unwrap()[0].clone();
3733 let apply = project.update(cx, |project, cx| {
3734 project.apply_code_action(buffer.clone(), action, true, cx)
3735 });
3736
3737 // Resolving the code action does not populate its edits. In absence of
3738 // edits, we must execute the given command.
3739 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3740 |mut action, _| async move {
3741 if action.data.is_some() {
3742 action.command = Some(lsp::Command {
3743 title: "The command".into(),
3744 command: "_the/command".into(),
3745 arguments: Some(vec![json!("the-argument")]),
3746 });
3747 }
3748 Ok(action)
3749 },
3750 );
3751
3752 // While executing the command, the language server sends the editor
3753 // a `workspaceEdit` request.
3754 fake_server
3755 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3756 let fake = fake_server.clone();
3757 move |params, _| {
3758 assert_eq!(params.command, "_the/command");
3759 let fake = fake.clone();
3760 async move {
3761 fake.server
3762 .request::<lsp::request::ApplyWorkspaceEdit>(
3763 lsp::ApplyWorkspaceEditParams {
3764 label: None,
3765 edit: lsp::WorkspaceEdit {
3766 changes: Some(
3767 [(
3768 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3769 vec![lsp::TextEdit {
3770 range: lsp::Range::new(
3771 lsp::Position::new(0, 0),
3772 lsp::Position::new(0, 0),
3773 ),
3774 new_text: "X".into(),
3775 }],
3776 )]
3777 .into_iter()
3778 .collect(),
3779 ),
3780 ..Default::default()
3781 },
3782 },
3783 )
3784 .await
3785 .into_response()
3786 .unwrap();
3787 Ok(Some(json!(null)))
3788 }
3789 }
3790 })
3791 .next()
3792 .await;
3793
3794 // Applying the code action returns a project transaction containing the edits
3795 // sent by the language server in its `workspaceEdit` request.
3796 let transaction = apply.await.unwrap();
3797 assert!(transaction.0.contains_key(&buffer));
3798 buffer.update(cx, |buffer, cx| {
3799 assert_eq!(buffer.text(), "Xa");
3800 buffer.undo(cx);
3801 assert_eq!(buffer.text(), "a");
3802 });
3803}
3804
3805#[gpui::test(iterations = 10)]
3806async fn test_save_file(cx: &mut gpui::TestAppContext) {
3807 init_test(cx);
3808
3809 let fs = FakeFs::new(cx.executor());
3810 fs.insert_tree(
3811 path!("/dir"),
3812 json!({
3813 "file1": "the old contents",
3814 }),
3815 )
3816 .await;
3817
3818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3819 let buffer = project
3820 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3821 .await
3822 .unwrap();
3823 buffer.update(cx, |buffer, cx| {
3824 assert_eq!(buffer.text(), "the old contents");
3825 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3826 });
3827
3828 project
3829 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3830 .await
3831 .unwrap();
3832
3833 let new_text = fs
3834 .load(Path::new(path!("/dir/file1")))
3835 .await
3836 .unwrap()
3837 .replace("\r\n", "\n");
3838 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3839}
3840
3841#[gpui::test(iterations = 10)]
3842async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3843 // Issue: #24349
3844 init_test(cx);
3845
3846 let fs = FakeFs::new(cx.executor());
3847 fs.insert_tree(path!("/dir"), json!({})).await;
3848
3849 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3851
3852 language_registry.add(rust_lang());
3853 let mut fake_rust_servers = language_registry.register_fake_lsp(
3854 "Rust",
3855 FakeLspAdapter {
3856 name: "the-rust-language-server",
3857 capabilities: lsp::ServerCapabilities {
3858 completion_provider: Some(lsp::CompletionOptions {
3859 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3860 ..Default::default()
3861 }),
3862 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3863 lsp::TextDocumentSyncOptions {
3864 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3865 ..Default::default()
3866 },
3867 )),
3868 ..Default::default()
3869 },
3870 ..Default::default()
3871 },
3872 );
3873
3874 let buffer = project
3875 .update(cx, |this, cx| this.create_buffer(cx))
3876 .unwrap()
3877 .await;
3878 project.update(cx, |this, cx| {
3879 this.register_buffer_with_language_servers(&buffer, cx);
3880 buffer.update(cx, |buffer, cx| {
3881 assert!(!this.has_language_servers_for(buffer, cx));
3882 })
3883 });
3884
3885 project
3886 .update(cx, |this, cx| {
3887 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3888 this.save_buffer_as(
3889 buffer.clone(),
3890 ProjectPath {
3891 worktree_id,
3892 path: Arc::from("file.rs".as_ref()),
3893 },
3894 cx,
3895 )
3896 })
3897 .await
3898 .unwrap();
3899 // A server is started up, and it is notified about Rust files.
3900 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3901 assert_eq!(
3902 fake_rust_server
3903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3904 .await
3905 .text_document,
3906 lsp::TextDocumentItem {
3907 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3908 version: 0,
3909 text: "".to_string(),
3910 language_id: "rust".to_string(),
3911 }
3912 );
3913
3914 project.update(cx, |this, cx| {
3915 buffer.update(cx, |buffer, cx| {
3916 assert!(this.has_language_servers_for(buffer, cx));
3917 })
3918 });
3919}
3920
3921#[gpui::test(iterations = 30)]
3922async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3923 init_test(cx);
3924
3925 let fs = FakeFs::new(cx.executor());
3926 fs.insert_tree(
3927 path!("/dir"),
3928 json!({
3929 "file1": "the original contents",
3930 }),
3931 )
3932 .await;
3933
3934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3935 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3936 let buffer = project
3937 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3938 .await
3939 .unwrap();
3940
3941 // Simulate buffer diffs being slow, so that they don't complete before
3942 // the next file change occurs.
3943 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3944
3945 // Change the buffer's file on disk, and then wait for the file change
3946 // to be detected by the worktree, so that the buffer starts reloading.
3947 fs.save(
3948 path!("/dir/file1").as_ref(),
3949 &"the first contents".into(),
3950 Default::default(),
3951 )
3952 .await
3953 .unwrap();
3954 worktree.next_event(cx).await;
3955
3956 // Change the buffer's file again. Depending on the random seed, the
3957 // previous file change may still be in progress.
3958 fs.save(
3959 path!("/dir/file1").as_ref(),
3960 &"the second contents".into(),
3961 Default::default(),
3962 )
3963 .await
3964 .unwrap();
3965 worktree.next_event(cx).await;
3966
3967 cx.executor().run_until_parked();
3968 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3969 buffer.read_with(cx, |buffer, _| {
3970 assert_eq!(buffer.text(), on_disk_text);
3971 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3972 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3973 });
3974}
3975
3976#[gpui::test(iterations = 30)]
3977async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3978 init_test(cx);
3979
3980 let fs = FakeFs::new(cx.executor());
3981 fs.insert_tree(
3982 path!("/dir"),
3983 json!({
3984 "file1": "the original contents",
3985 }),
3986 )
3987 .await;
3988
3989 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3990 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3991 let buffer = project
3992 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3993 .await
3994 .unwrap();
3995
3996 // Simulate buffer diffs being slow, so that they don't complete before
3997 // the next file change occurs.
3998 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3999
4000 // Change the buffer's file on disk, and then wait for the file change
4001 // to be detected by the worktree, so that the buffer starts reloading.
4002 fs.save(
4003 path!("/dir/file1").as_ref(),
4004 &"the first contents".into(),
4005 Default::default(),
4006 )
4007 .await
4008 .unwrap();
4009 worktree.next_event(cx).await;
4010
4011 cx.executor()
4012 .spawn(cx.executor().simulate_random_delay())
4013 .await;
4014
4015 // Perform a noop edit, causing the buffer's version to increase.
4016 buffer.update(cx, |buffer, cx| {
4017 buffer.edit([(0..0, " ")], None, cx);
4018 buffer.undo(cx);
4019 });
4020
4021 cx.executor().run_until_parked();
4022 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4023 buffer.read_with(cx, |buffer, _| {
4024 let buffer_text = buffer.text();
4025 if buffer_text == on_disk_text {
4026 assert!(
4027 !buffer.is_dirty() && !buffer.has_conflict(),
4028 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4029 );
4030 }
4031 // If the file change occurred while the buffer was processing the first
4032 // change, the buffer will be in a conflicting state.
4033 else {
4034 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4035 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4036 }
4037 });
4038}
4039
4040#[gpui::test]
4041async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4042 init_test(cx);
4043
4044 let fs = FakeFs::new(cx.executor());
4045 fs.insert_tree(
4046 path!("/dir"),
4047 json!({
4048 "file1": "the old contents",
4049 }),
4050 )
4051 .await;
4052
4053 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4054 let buffer = project
4055 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4056 .await
4057 .unwrap();
4058 buffer.update(cx, |buffer, cx| {
4059 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4060 });
4061
4062 project
4063 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4064 .await
4065 .unwrap();
4066
4067 let new_text = fs
4068 .load(Path::new(path!("/dir/file1")))
4069 .await
4070 .unwrap()
4071 .replace("\r\n", "\n");
4072 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4073}
4074
4075#[gpui::test]
4076async fn test_save_as(cx: &mut gpui::TestAppContext) {
4077 init_test(cx);
4078
4079 let fs = FakeFs::new(cx.executor());
4080 fs.insert_tree("/dir", json!({})).await;
4081
4082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4083
4084 let languages = project.update(cx, |project, _| project.languages().clone());
4085 languages.add(rust_lang());
4086
4087 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
4088 buffer.update(cx, |buffer, cx| {
4089 buffer.edit([(0..0, "abc")], None, cx);
4090 assert!(buffer.is_dirty());
4091 assert!(!buffer.has_conflict());
4092 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4093 });
4094 project
4095 .update(cx, |project, cx| {
4096 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4097 let path = ProjectPath {
4098 worktree_id,
4099 path: Arc::from(Path::new("file1.rs")),
4100 };
4101 project.save_buffer_as(buffer.clone(), path, cx)
4102 })
4103 .await
4104 .unwrap();
4105 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4106
4107 cx.executor().run_until_parked();
4108 buffer.update(cx, |buffer, cx| {
4109 assert_eq!(
4110 buffer.file().unwrap().full_path(cx),
4111 Path::new("dir/file1.rs")
4112 );
4113 assert!(!buffer.is_dirty());
4114 assert!(!buffer.has_conflict());
4115 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4116 });
4117
4118 let opened_buffer = project
4119 .update(cx, |project, cx| {
4120 project.open_local_buffer("/dir/file1.rs", cx)
4121 })
4122 .await
4123 .unwrap();
4124 assert_eq!(opened_buffer, buffer);
4125}
4126
4127#[gpui::test(retries = 5)]
4128async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4129 use worktree::WorktreeModelHandle as _;
4130
4131 init_test(cx);
4132 cx.executor().allow_parking();
4133
4134 let dir = TempTree::new(json!({
4135 "a": {
4136 "file1": "",
4137 "file2": "",
4138 "file3": "",
4139 },
4140 "b": {
4141 "c": {
4142 "file4": "",
4143 "file5": "",
4144 }
4145 }
4146 }));
4147
4148 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4149
4150 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4151 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4152 async move { buffer.await.unwrap() }
4153 };
4154 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4155 project.update(cx, |project, cx| {
4156 let tree = project.worktrees(cx).next().unwrap();
4157 tree.read(cx)
4158 .entry_for_path(path)
4159 .unwrap_or_else(|| panic!("no entry for path {}", path))
4160 .id
4161 })
4162 };
4163
4164 let buffer2 = buffer_for_path("a/file2", cx).await;
4165 let buffer3 = buffer_for_path("a/file3", cx).await;
4166 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4167 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4168
4169 let file2_id = id_for_path("a/file2", cx);
4170 let file3_id = id_for_path("a/file3", cx);
4171 let file4_id = id_for_path("b/c/file4", cx);
4172
4173 // Create a remote copy of this worktree.
4174 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4175 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4176
4177 let updates = Arc::new(Mutex::new(Vec::new()));
4178 tree.update(cx, |tree, cx| {
4179 let updates = updates.clone();
4180 tree.observe_updates(0, cx, move |update| {
4181 updates.lock().push(update);
4182 async { true }
4183 });
4184 });
4185
4186 let remote =
4187 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
4188
4189 cx.executor().run_until_parked();
4190
4191 cx.update(|cx| {
4192 assert!(!buffer2.read(cx).is_dirty());
4193 assert!(!buffer3.read(cx).is_dirty());
4194 assert!(!buffer4.read(cx).is_dirty());
4195 assert!(!buffer5.read(cx).is_dirty());
4196 });
4197
4198 // Rename and delete files and directories.
4199 tree.flush_fs_events(cx).await;
4200 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4201 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4202 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4203 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4204 tree.flush_fs_events(cx).await;
4205
4206 cx.update(|app| {
4207 assert_eq!(
4208 tree.read(app)
4209 .paths()
4210 .map(|p| p.to_str().unwrap())
4211 .collect::<Vec<_>>(),
4212 vec![
4213 "a",
4214 path!("a/file1"),
4215 path!("a/file2.new"),
4216 "b",
4217 "d",
4218 path!("d/file3"),
4219 path!("d/file4"),
4220 ]
4221 );
4222 });
4223
4224 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4225 assert_eq!(id_for_path("d/file3", cx), file3_id);
4226 assert_eq!(id_for_path("d/file4", cx), file4_id);
4227
4228 cx.update(|cx| {
4229 assert_eq!(
4230 buffer2.read(cx).file().unwrap().path().as_ref(),
4231 Path::new("a/file2.new")
4232 );
4233 assert_eq!(
4234 buffer3.read(cx).file().unwrap().path().as_ref(),
4235 Path::new("d/file3")
4236 );
4237 assert_eq!(
4238 buffer4.read(cx).file().unwrap().path().as_ref(),
4239 Path::new("d/file4")
4240 );
4241 assert_eq!(
4242 buffer5.read(cx).file().unwrap().path().as_ref(),
4243 Path::new("b/c/file5")
4244 );
4245
4246 assert_matches!(
4247 buffer2.read(cx).file().unwrap().disk_state(),
4248 DiskState::Present { .. }
4249 );
4250 assert_matches!(
4251 buffer3.read(cx).file().unwrap().disk_state(),
4252 DiskState::Present { .. }
4253 );
4254 assert_matches!(
4255 buffer4.read(cx).file().unwrap().disk_state(),
4256 DiskState::Present { .. }
4257 );
4258 assert_eq!(
4259 buffer5.read(cx).file().unwrap().disk_state(),
4260 DiskState::Deleted
4261 );
4262 });
4263
4264 // Update the remote worktree. Check that it becomes consistent with the
4265 // local worktree.
4266 cx.executor().run_until_parked();
4267
4268 remote.update(cx, |remote, _| {
4269 for update in updates.lock().drain(..) {
4270 remote.as_remote_mut().unwrap().update_from_remote(update);
4271 }
4272 });
4273 cx.executor().run_until_parked();
4274 remote.update(cx, |remote, _| {
4275 assert_eq!(
4276 remote
4277 .paths()
4278 .map(|p| p.to_str().unwrap())
4279 .collect::<Vec<_>>(),
4280 vec![
4281 "a",
4282 path!("a/file1"),
4283 path!("a/file2.new"),
4284 "b",
4285 "d",
4286 path!("d/file3"),
4287 path!("d/file4"),
4288 ]
4289 );
4290 });
4291}
4292
4293#[gpui::test(iterations = 10)]
4294async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4295 init_test(cx);
4296
4297 let fs = FakeFs::new(cx.executor());
4298 fs.insert_tree(
4299 path!("/dir"),
4300 json!({
4301 "a": {
4302 "file1": "",
4303 }
4304 }),
4305 )
4306 .await;
4307
4308 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4309 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4310 let tree_id = tree.update(cx, |tree, _| tree.id());
4311
4312 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4313 project.update(cx, |project, cx| {
4314 let tree = project.worktrees(cx).next().unwrap();
4315 tree.read(cx)
4316 .entry_for_path(path)
4317 .unwrap_or_else(|| panic!("no entry for path {}", path))
4318 .id
4319 })
4320 };
4321
4322 let dir_id = id_for_path("a", cx);
4323 let file_id = id_for_path("a/file1", cx);
4324 let buffer = project
4325 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4326 .await
4327 .unwrap();
4328 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4329
4330 project
4331 .update(cx, |project, cx| {
4332 project.rename_entry(dir_id, Path::new("b"), cx)
4333 })
4334 .unwrap()
4335 .await
4336 .into_included()
4337 .unwrap();
4338 cx.executor().run_until_parked();
4339
4340 assert_eq!(id_for_path("b", cx), dir_id);
4341 assert_eq!(id_for_path("b/file1", cx), file_id);
4342 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4343}
4344
4345#[gpui::test]
4346async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4347 init_test(cx);
4348
4349 let fs = FakeFs::new(cx.executor());
4350 fs.insert_tree(
4351 "/dir",
4352 json!({
4353 "a.txt": "a-contents",
4354 "b.txt": "b-contents",
4355 }),
4356 )
4357 .await;
4358
4359 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4360
4361 // Spawn multiple tasks to open paths, repeating some paths.
4362 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4363 (
4364 p.open_local_buffer("/dir/a.txt", cx),
4365 p.open_local_buffer("/dir/b.txt", cx),
4366 p.open_local_buffer("/dir/a.txt", cx),
4367 )
4368 });
4369
4370 let buffer_a_1 = buffer_a_1.await.unwrap();
4371 let buffer_a_2 = buffer_a_2.await.unwrap();
4372 let buffer_b = buffer_b.await.unwrap();
4373 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4374 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4375
4376 // There is only one buffer per path.
4377 let buffer_a_id = buffer_a_1.entity_id();
4378 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4379
4380 // Open the same path again while it is still open.
4381 drop(buffer_a_1);
4382 let buffer_a_3 = project
4383 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4384 .await
4385 .unwrap();
4386
4387 // There's still only one buffer per path.
4388 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4389}
4390
4391#[gpui::test]
4392async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4393 init_test(cx);
4394
4395 let fs = FakeFs::new(cx.executor());
4396 fs.insert_tree(
4397 path!("/dir"),
4398 json!({
4399 "file1": "abc",
4400 "file2": "def",
4401 "file3": "ghi",
4402 }),
4403 )
4404 .await;
4405
4406 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4407
4408 let buffer1 = project
4409 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4410 .await
4411 .unwrap();
4412 let events = Arc::new(Mutex::new(Vec::new()));
4413
4414 // initially, the buffer isn't dirty.
4415 buffer1.update(cx, |buffer, cx| {
4416 cx.subscribe(&buffer1, {
4417 let events = events.clone();
4418 move |_, _, event, _| match event {
4419 BufferEvent::Operation { .. } => {}
4420 _ => events.lock().push(event.clone()),
4421 }
4422 })
4423 .detach();
4424
4425 assert!(!buffer.is_dirty());
4426 assert!(events.lock().is_empty());
4427
4428 buffer.edit([(1..2, "")], None, cx);
4429 });
4430
4431 // after the first edit, the buffer is dirty, and emits a dirtied event.
4432 buffer1.update(cx, |buffer, cx| {
4433 assert!(buffer.text() == "ac");
4434 assert!(buffer.is_dirty());
4435 assert_eq!(
4436 *events.lock(),
4437 &[
4438 language::BufferEvent::Edited,
4439 language::BufferEvent::DirtyChanged
4440 ]
4441 );
4442 events.lock().clear();
4443 buffer.did_save(
4444 buffer.version(),
4445 buffer.file().unwrap().disk_state().mtime(),
4446 cx,
4447 );
4448 });
4449
4450 // after saving, the buffer is not dirty, and emits a saved event.
4451 buffer1.update(cx, |buffer, cx| {
4452 assert!(!buffer.is_dirty());
4453 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4454 events.lock().clear();
4455
4456 buffer.edit([(1..1, "B")], None, cx);
4457 buffer.edit([(2..2, "D")], None, cx);
4458 });
4459
4460 // after editing again, the buffer is dirty, and emits another dirty event.
4461 buffer1.update(cx, |buffer, cx| {
4462 assert!(buffer.text() == "aBDc");
4463 assert!(buffer.is_dirty());
4464 assert_eq!(
4465 *events.lock(),
4466 &[
4467 language::BufferEvent::Edited,
4468 language::BufferEvent::DirtyChanged,
4469 language::BufferEvent::Edited,
4470 ],
4471 );
4472 events.lock().clear();
4473
4474 // After restoring the buffer to its previously-saved state,
4475 // the buffer is not considered dirty anymore.
4476 buffer.edit([(1..3, "")], None, cx);
4477 assert!(buffer.text() == "ac");
4478 assert!(!buffer.is_dirty());
4479 });
4480
4481 assert_eq!(
4482 *events.lock(),
4483 &[
4484 language::BufferEvent::Edited,
4485 language::BufferEvent::DirtyChanged
4486 ]
4487 );
4488
4489 // When a file is deleted, it is not considered dirty.
4490 let events = Arc::new(Mutex::new(Vec::new()));
4491 let buffer2 = project
4492 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4493 .await
4494 .unwrap();
4495 buffer2.update(cx, |_, cx| {
4496 cx.subscribe(&buffer2, {
4497 let events = events.clone();
4498 move |_, _, event, _| match event {
4499 BufferEvent::Operation { .. } => {}
4500 _ => events.lock().push(event.clone()),
4501 }
4502 })
4503 .detach();
4504 });
4505
4506 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4507 .await
4508 .unwrap();
4509 cx.executor().run_until_parked();
4510 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4511 assert_eq!(
4512 mem::take(&mut *events.lock()),
4513 &[language::BufferEvent::FileHandleChanged]
4514 );
4515
4516 // Buffer becomes dirty when edited.
4517 buffer2.update(cx, |buffer, cx| {
4518 buffer.edit([(2..3, "")], None, cx);
4519 assert_eq!(buffer.is_dirty(), true);
4520 });
4521 assert_eq!(
4522 mem::take(&mut *events.lock()),
4523 &[
4524 language::BufferEvent::Edited,
4525 language::BufferEvent::DirtyChanged
4526 ]
4527 );
4528
4529 // Buffer becomes clean again when all of its content is removed, because
4530 // the file was deleted.
4531 buffer2.update(cx, |buffer, cx| {
4532 buffer.edit([(0..2, "")], None, cx);
4533 assert_eq!(buffer.is_empty(), true);
4534 assert_eq!(buffer.is_dirty(), false);
4535 });
4536 assert_eq!(
4537 *events.lock(),
4538 &[
4539 language::BufferEvent::Edited,
4540 language::BufferEvent::DirtyChanged
4541 ]
4542 );
4543
4544 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4545 let events = Arc::new(Mutex::new(Vec::new()));
4546 let buffer3 = project
4547 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4548 .await
4549 .unwrap();
4550 buffer3.update(cx, |_, cx| {
4551 cx.subscribe(&buffer3, {
4552 let events = events.clone();
4553 move |_, _, event, _| match event {
4554 BufferEvent::Operation { .. } => {}
4555 _ => events.lock().push(event.clone()),
4556 }
4557 })
4558 .detach();
4559 });
4560
4561 buffer3.update(cx, |buffer, cx| {
4562 buffer.edit([(0..0, "x")], None, cx);
4563 });
4564 events.lock().clear();
4565 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4566 .await
4567 .unwrap();
4568 cx.executor().run_until_parked();
4569 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4570 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4571}
4572
4573#[gpui::test]
4574async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4575 init_test(cx);
4576
4577 let (initial_contents, initial_offsets) =
4578 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4579 let fs = FakeFs::new(cx.executor());
4580 fs.insert_tree(
4581 path!("/dir"),
4582 json!({
4583 "the-file": initial_contents,
4584 }),
4585 )
4586 .await;
4587 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4588 let buffer = project
4589 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4590 .await
4591 .unwrap();
4592
4593 let anchors = initial_offsets
4594 .iter()
4595 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4596 .collect::<Vec<_>>();
4597
4598 // Change the file on disk, adding two new lines of text, and removing
4599 // one line.
4600 buffer.update(cx, |buffer, _| {
4601 assert!(!buffer.is_dirty());
4602 assert!(!buffer.has_conflict());
4603 });
4604
4605 let (new_contents, new_offsets) =
4606 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4607 fs.save(
4608 path!("/dir/the-file").as_ref(),
4609 &new_contents.as_str().into(),
4610 LineEnding::Unix,
4611 )
4612 .await
4613 .unwrap();
4614
4615 // Because the buffer was not modified, it is reloaded from disk. Its
4616 // contents are edited according to the diff between the old and new
4617 // file contents.
4618 cx.executor().run_until_parked();
4619 buffer.update(cx, |buffer, _| {
4620 assert_eq!(buffer.text(), new_contents);
4621 assert!(!buffer.is_dirty());
4622 assert!(!buffer.has_conflict());
4623
4624 let anchor_offsets = anchors
4625 .iter()
4626 .map(|anchor| anchor.to_offset(&*buffer))
4627 .collect::<Vec<_>>();
4628 assert_eq!(anchor_offsets, new_offsets);
4629 });
4630
4631 // Modify the buffer
4632 buffer.update(cx, |buffer, cx| {
4633 buffer.edit([(0..0, " ")], None, cx);
4634 assert!(buffer.is_dirty());
4635 assert!(!buffer.has_conflict());
4636 });
4637
4638 // Change the file on disk again, adding blank lines to the beginning.
4639 fs.save(
4640 path!("/dir/the-file").as_ref(),
4641 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4642 LineEnding::Unix,
4643 )
4644 .await
4645 .unwrap();
4646
4647 // Because the buffer is modified, it doesn't reload from disk, but is
4648 // marked as having a conflict.
4649 cx.executor().run_until_parked();
4650 buffer.update(cx, |buffer, _| {
4651 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4652 assert!(buffer.has_conflict());
4653 });
4654}
4655
4656#[gpui::test]
4657async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4658 init_test(cx);
4659
4660 let fs = FakeFs::new(cx.executor());
4661 fs.insert_tree(
4662 path!("/dir"),
4663 json!({
4664 "file1": "a\nb\nc\n",
4665 "file2": "one\r\ntwo\r\nthree\r\n",
4666 }),
4667 )
4668 .await;
4669
4670 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4671 let buffer1 = project
4672 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4673 .await
4674 .unwrap();
4675 let buffer2 = project
4676 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4677 .await
4678 .unwrap();
4679
4680 buffer1.update(cx, |buffer, _| {
4681 assert_eq!(buffer.text(), "a\nb\nc\n");
4682 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4683 });
4684 buffer2.update(cx, |buffer, _| {
4685 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4686 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4687 });
4688
4689 // Change a file's line endings on disk from unix to windows. The buffer's
4690 // state updates correctly.
4691 fs.save(
4692 path!("/dir/file1").as_ref(),
4693 &"aaa\nb\nc\n".into(),
4694 LineEnding::Windows,
4695 )
4696 .await
4697 .unwrap();
4698 cx.executor().run_until_parked();
4699 buffer1.update(cx, |buffer, _| {
4700 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4701 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4702 });
4703
4704 // Save a file with windows line endings. The file is written correctly.
4705 buffer2.update(cx, |buffer, cx| {
4706 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4707 });
4708 project
4709 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4710 .await
4711 .unwrap();
4712 assert_eq!(
4713 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4714 "one\r\ntwo\r\nthree\r\nfour\r\n",
4715 );
4716}
4717
4718#[gpui::test]
4719async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4720 init_test(cx);
4721
4722 let fs = FakeFs::new(cx.executor());
4723 fs.insert_tree(
4724 path!("/dir"),
4725 json!({
4726 "a.rs": "
4727 fn foo(mut v: Vec<usize>) {
4728 for x in &v {
4729 v.push(1);
4730 }
4731 }
4732 "
4733 .unindent(),
4734 }),
4735 )
4736 .await;
4737
4738 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4739 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4740 let buffer = project
4741 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4742 .await
4743 .unwrap();
4744
4745 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4746 let message = lsp::PublishDiagnosticsParams {
4747 uri: buffer_uri.clone(),
4748 diagnostics: vec![
4749 lsp::Diagnostic {
4750 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4751 severity: Some(DiagnosticSeverity::WARNING),
4752 message: "error 1".to_string(),
4753 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4754 location: lsp::Location {
4755 uri: buffer_uri.clone(),
4756 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4757 },
4758 message: "error 1 hint 1".to_string(),
4759 }]),
4760 ..Default::default()
4761 },
4762 lsp::Diagnostic {
4763 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4764 severity: Some(DiagnosticSeverity::HINT),
4765 message: "error 1 hint 1".to_string(),
4766 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4767 location: lsp::Location {
4768 uri: buffer_uri.clone(),
4769 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4770 },
4771 message: "original diagnostic".to_string(),
4772 }]),
4773 ..Default::default()
4774 },
4775 lsp::Diagnostic {
4776 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4777 severity: Some(DiagnosticSeverity::ERROR),
4778 message: "error 2".to_string(),
4779 related_information: Some(vec![
4780 lsp::DiagnosticRelatedInformation {
4781 location: lsp::Location {
4782 uri: buffer_uri.clone(),
4783 range: lsp::Range::new(
4784 lsp::Position::new(1, 13),
4785 lsp::Position::new(1, 15),
4786 ),
4787 },
4788 message: "error 2 hint 1".to_string(),
4789 },
4790 lsp::DiagnosticRelatedInformation {
4791 location: lsp::Location {
4792 uri: buffer_uri.clone(),
4793 range: lsp::Range::new(
4794 lsp::Position::new(1, 13),
4795 lsp::Position::new(1, 15),
4796 ),
4797 },
4798 message: "error 2 hint 2".to_string(),
4799 },
4800 ]),
4801 ..Default::default()
4802 },
4803 lsp::Diagnostic {
4804 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4805 severity: Some(DiagnosticSeverity::HINT),
4806 message: "error 2 hint 1".to_string(),
4807 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4808 location: lsp::Location {
4809 uri: buffer_uri.clone(),
4810 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4811 },
4812 message: "original diagnostic".to_string(),
4813 }]),
4814 ..Default::default()
4815 },
4816 lsp::Diagnostic {
4817 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4818 severity: Some(DiagnosticSeverity::HINT),
4819 message: "error 2 hint 2".to_string(),
4820 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4821 location: lsp::Location {
4822 uri: buffer_uri,
4823 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4824 },
4825 message: "original diagnostic".to_string(),
4826 }]),
4827 ..Default::default()
4828 },
4829 ],
4830 version: None,
4831 };
4832
4833 lsp_store
4834 .update(cx, |lsp_store, cx| {
4835 lsp_store.update_diagnostics(
4836 LanguageServerId(0),
4837 message,
4838 None,
4839 DiagnosticSourceKind::Pushed,
4840 &[],
4841 cx,
4842 )
4843 })
4844 .unwrap();
4845 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4846
4847 assert_eq!(
4848 buffer
4849 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4850 .collect::<Vec<_>>(),
4851 &[
4852 DiagnosticEntry {
4853 range: Point::new(1, 8)..Point::new(1, 9),
4854 diagnostic: Diagnostic {
4855 severity: DiagnosticSeverity::WARNING,
4856 message: "error 1".to_string(),
4857 group_id: 1,
4858 is_primary: true,
4859 source_kind: DiagnosticSourceKind::Pushed,
4860 ..Diagnostic::default()
4861 }
4862 },
4863 DiagnosticEntry {
4864 range: Point::new(1, 8)..Point::new(1, 9),
4865 diagnostic: Diagnostic {
4866 severity: DiagnosticSeverity::HINT,
4867 message: "error 1 hint 1".to_string(),
4868 group_id: 1,
4869 is_primary: false,
4870 source_kind: DiagnosticSourceKind::Pushed,
4871 ..Diagnostic::default()
4872 }
4873 },
4874 DiagnosticEntry {
4875 range: Point::new(1, 13)..Point::new(1, 15),
4876 diagnostic: Diagnostic {
4877 severity: DiagnosticSeverity::HINT,
4878 message: "error 2 hint 1".to_string(),
4879 group_id: 0,
4880 is_primary: false,
4881 source_kind: DiagnosticSourceKind::Pushed,
4882 ..Diagnostic::default()
4883 }
4884 },
4885 DiagnosticEntry {
4886 range: Point::new(1, 13)..Point::new(1, 15),
4887 diagnostic: Diagnostic {
4888 severity: DiagnosticSeverity::HINT,
4889 message: "error 2 hint 2".to_string(),
4890 group_id: 0,
4891 is_primary: false,
4892 source_kind: DiagnosticSourceKind::Pushed,
4893 ..Diagnostic::default()
4894 }
4895 },
4896 DiagnosticEntry {
4897 range: Point::new(2, 8)..Point::new(2, 17),
4898 diagnostic: Diagnostic {
4899 severity: DiagnosticSeverity::ERROR,
4900 message: "error 2".to_string(),
4901 group_id: 0,
4902 is_primary: true,
4903 source_kind: DiagnosticSourceKind::Pushed,
4904 ..Diagnostic::default()
4905 }
4906 }
4907 ]
4908 );
4909
4910 assert_eq!(
4911 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4912 &[
4913 DiagnosticEntry {
4914 range: Point::new(1, 13)..Point::new(1, 15),
4915 diagnostic: Diagnostic {
4916 severity: DiagnosticSeverity::HINT,
4917 message: "error 2 hint 1".to_string(),
4918 group_id: 0,
4919 is_primary: false,
4920 source_kind: DiagnosticSourceKind::Pushed,
4921 ..Diagnostic::default()
4922 }
4923 },
4924 DiagnosticEntry {
4925 range: Point::new(1, 13)..Point::new(1, 15),
4926 diagnostic: Diagnostic {
4927 severity: DiagnosticSeverity::HINT,
4928 message: "error 2 hint 2".to_string(),
4929 group_id: 0,
4930 is_primary: false,
4931 source_kind: DiagnosticSourceKind::Pushed,
4932 ..Diagnostic::default()
4933 }
4934 },
4935 DiagnosticEntry {
4936 range: Point::new(2, 8)..Point::new(2, 17),
4937 diagnostic: Diagnostic {
4938 severity: DiagnosticSeverity::ERROR,
4939 message: "error 2".to_string(),
4940 group_id: 0,
4941 is_primary: true,
4942 source_kind: DiagnosticSourceKind::Pushed,
4943 ..Diagnostic::default()
4944 }
4945 }
4946 ]
4947 );
4948
4949 assert_eq!(
4950 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4951 &[
4952 DiagnosticEntry {
4953 range: Point::new(1, 8)..Point::new(1, 9),
4954 diagnostic: Diagnostic {
4955 severity: DiagnosticSeverity::WARNING,
4956 message: "error 1".to_string(),
4957 group_id: 1,
4958 is_primary: true,
4959 source_kind: DiagnosticSourceKind::Pushed,
4960 ..Diagnostic::default()
4961 }
4962 },
4963 DiagnosticEntry {
4964 range: Point::new(1, 8)..Point::new(1, 9),
4965 diagnostic: Diagnostic {
4966 severity: DiagnosticSeverity::HINT,
4967 message: "error 1 hint 1".to_string(),
4968 group_id: 1,
4969 is_primary: false,
4970 source_kind: DiagnosticSourceKind::Pushed,
4971 ..Diagnostic::default()
4972 }
4973 },
4974 ]
4975 );
4976}
4977
4978#[gpui::test]
4979async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4980 init_test(cx);
4981
4982 let fs = FakeFs::new(cx.executor());
4983 fs.insert_tree(
4984 path!("/dir"),
4985 json!({
4986 "one.rs": "const ONE: usize = 1;",
4987 "two": {
4988 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4989 }
4990
4991 }),
4992 )
4993 .await;
4994 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4995
4996 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4997 language_registry.add(rust_lang());
4998 let watched_paths = lsp::FileOperationRegistrationOptions {
4999 filters: vec![
5000 FileOperationFilter {
5001 scheme: Some("file".to_owned()),
5002 pattern: lsp::FileOperationPattern {
5003 glob: "**/*.rs".to_owned(),
5004 matches: Some(lsp::FileOperationPatternKind::File),
5005 options: None,
5006 },
5007 },
5008 FileOperationFilter {
5009 scheme: Some("file".to_owned()),
5010 pattern: lsp::FileOperationPattern {
5011 glob: "**/**".to_owned(),
5012 matches: Some(lsp::FileOperationPatternKind::Folder),
5013 options: None,
5014 },
5015 },
5016 ],
5017 };
5018 let mut fake_servers = language_registry.register_fake_lsp(
5019 "Rust",
5020 FakeLspAdapter {
5021 capabilities: lsp::ServerCapabilities {
5022 workspace: Some(lsp::WorkspaceServerCapabilities {
5023 workspace_folders: None,
5024 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5025 did_rename: Some(watched_paths.clone()),
5026 will_rename: Some(watched_paths),
5027 ..Default::default()
5028 }),
5029 }),
5030 ..Default::default()
5031 },
5032 ..Default::default()
5033 },
5034 );
5035
5036 let _ = project
5037 .update(cx, |project, cx| {
5038 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5039 })
5040 .await
5041 .unwrap();
5042
5043 let fake_server = fake_servers.next().await.unwrap();
5044 let response = project.update(cx, |project, cx| {
5045 let worktree = project.worktrees(cx).next().unwrap();
5046 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
5047 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
5048 });
5049 let expected_edit = lsp::WorkspaceEdit {
5050 changes: None,
5051 document_changes: Some(DocumentChanges::Edits({
5052 vec![TextDocumentEdit {
5053 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5054 range: lsp::Range {
5055 start: lsp::Position {
5056 line: 0,
5057 character: 1,
5058 },
5059 end: lsp::Position {
5060 line: 0,
5061 character: 3,
5062 },
5063 },
5064 new_text: "This is not a drill".to_owned(),
5065 })],
5066 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5067 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5068 version: Some(1337),
5069 },
5070 }]
5071 })),
5072 change_annotations: None,
5073 };
5074 let resolved_workspace_edit = Arc::new(OnceLock::new());
5075 fake_server
5076 .set_request_handler::<WillRenameFiles, _, _>({
5077 let resolved_workspace_edit = resolved_workspace_edit.clone();
5078 let expected_edit = expected_edit.clone();
5079 move |params, _| {
5080 let resolved_workspace_edit = resolved_workspace_edit.clone();
5081 let expected_edit = expected_edit.clone();
5082 async move {
5083 assert_eq!(params.files.len(), 1);
5084 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5085 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5086 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5087 Ok(Some(expected_edit))
5088 }
5089 }
5090 })
5091 .next()
5092 .await
5093 .unwrap();
5094 let _ = response.await.unwrap();
5095 fake_server
5096 .handle_notification::<DidRenameFiles, _>(|params, _| {
5097 assert_eq!(params.files.len(), 1);
5098 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5099 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5100 })
5101 .next()
5102 .await
5103 .unwrap();
5104 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5105}
5106
5107#[gpui::test]
5108async fn test_rename(cx: &mut gpui::TestAppContext) {
5109 // hi
5110 init_test(cx);
5111
5112 let fs = FakeFs::new(cx.executor());
5113 fs.insert_tree(
5114 path!("/dir"),
5115 json!({
5116 "one.rs": "const ONE: usize = 1;",
5117 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5118 }),
5119 )
5120 .await;
5121
5122 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5123
5124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5125 language_registry.add(rust_lang());
5126 let mut fake_servers = language_registry.register_fake_lsp(
5127 "Rust",
5128 FakeLspAdapter {
5129 capabilities: lsp::ServerCapabilities {
5130 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5131 prepare_provider: Some(true),
5132 work_done_progress_options: Default::default(),
5133 })),
5134 ..Default::default()
5135 },
5136 ..Default::default()
5137 },
5138 );
5139
5140 let (buffer, _handle) = project
5141 .update(cx, |project, cx| {
5142 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5143 })
5144 .await
5145 .unwrap();
5146
5147 let fake_server = fake_servers.next().await.unwrap();
5148
5149 let response = project.update(cx, |project, cx| {
5150 project.prepare_rename(buffer.clone(), 7, cx)
5151 });
5152 fake_server
5153 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5154 assert_eq!(
5155 params.text_document.uri.as_str(),
5156 uri!("file:///dir/one.rs")
5157 );
5158 assert_eq!(params.position, lsp::Position::new(0, 7));
5159 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5160 lsp::Position::new(0, 6),
5161 lsp::Position::new(0, 9),
5162 ))))
5163 })
5164 .next()
5165 .await
5166 .unwrap();
5167 let response = response.await.unwrap();
5168 let PrepareRenameResponse::Success(range) = response else {
5169 panic!("{:?}", response);
5170 };
5171 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5172 assert_eq!(range, 6..9);
5173
5174 let response = project.update(cx, |project, cx| {
5175 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5176 });
5177 fake_server
5178 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5179 assert_eq!(
5180 params.text_document_position.text_document.uri.as_str(),
5181 uri!("file:///dir/one.rs")
5182 );
5183 assert_eq!(
5184 params.text_document_position.position,
5185 lsp::Position::new(0, 7)
5186 );
5187 assert_eq!(params.new_name, "THREE");
5188 Ok(Some(lsp::WorkspaceEdit {
5189 changes: Some(
5190 [
5191 (
5192 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
5193 vec![lsp::TextEdit::new(
5194 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5195 "THREE".to_string(),
5196 )],
5197 ),
5198 (
5199 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
5200 vec![
5201 lsp::TextEdit::new(
5202 lsp::Range::new(
5203 lsp::Position::new(0, 24),
5204 lsp::Position::new(0, 27),
5205 ),
5206 "THREE".to_string(),
5207 ),
5208 lsp::TextEdit::new(
5209 lsp::Range::new(
5210 lsp::Position::new(0, 35),
5211 lsp::Position::new(0, 38),
5212 ),
5213 "THREE".to_string(),
5214 ),
5215 ],
5216 ),
5217 ]
5218 .into_iter()
5219 .collect(),
5220 ),
5221 ..Default::default()
5222 }))
5223 })
5224 .next()
5225 .await
5226 .unwrap();
5227 let mut transaction = response.await.unwrap().0;
5228 assert_eq!(transaction.len(), 2);
5229 assert_eq!(
5230 transaction
5231 .remove_entry(&buffer)
5232 .unwrap()
5233 .0
5234 .update(cx, |buffer, _| buffer.text()),
5235 "const THREE: usize = 1;"
5236 );
5237 assert_eq!(
5238 transaction
5239 .into_keys()
5240 .next()
5241 .unwrap()
5242 .update(cx, |buffer, _| buffer.text()),
5243 "const TWO: usize = one::THREE + one::THREE;"
5244 );
5245}
5246
5247#[gpui::test]
5248async fn test_search(cx: &mut gpui::TestAppContext) {
5249 init_test(cx);
5250
5251 let fs = FakeFs::new(cx.executor());
5252 fs.insert_tree(
5253 path!("/dir"),
5254 json!({
5255 "one.rs": "const ONE: usize = 1;",
5256 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5257 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5258 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5259 }),
5260 )
5261 .await;
5262 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5263 assert_eq!(
5264 search(
5265 &project,
5266 SearchQuery::text(
5267 "TWO",
5268 false,
5269 true,
5270 false,
5271 Default::default(),
5272 Default::default(),
5273 false,
5274 None
5275 )
5276 .unwrap(),
5277 cx
5278 )
5279 .await
5280 .unwrap(),
5281 HashMap::from_iter([
5282 (path!("dir/two.rs").to_string(), vec![6..9]),
5283 (path!("dir/three.rs").to_string(), vec![37..40])
5284 ])
5285 );
5286
5287 let buffer_4 = project
5288 .update(cx, |project, cx| {
5289 project.open_local_buffer(path!("/dir/four.rs"), cx)
5290 })
5291 .await
5292 .unwrap();
5293 buffer_4.update(cx, |buffer, cx| {
5294 let text = "two::TWO";
5295 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5296 });
5297
5298 assert_eq!(
5299 search(
5300 &project,
5301 SearchQuery::text(
5302 "TWO",
5303 false,
5304 true,
5305 false,
5306 Default::default(),
5307 Default::default(),
5308 false,
5309 None,
5310 )
5311 .unwrap(),
5312 cx
5313 )
5314 .await
5315 .unwrap(),
5316 HashMap::from_iter([
5317 (path!("dir/two.rs").to_string(), vec![6..9]),
5318 (path!("dir/three.rs").to_string(), vec![37..40]),
5319 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5320 ])
5321 );
5322}
5323
5324#[gpui::test]
5325async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5326 init_test(cx);
5327
5328 let search_query = "file";
5329
5330 let fs = FakeFs::new(cx.executor());
5331 fs.insert_tree(
5332 path!("/dir"),
5333 json!({
5334 "one.rs": r#"// Rust file one"#,
5335 "one.ts": r#"// TypeScript file one"#,
5336 "two.rs": r#"// Rust file two"#,
5337 "two.ts": r#"// TypeScript file two"#,
5338 }),
5339 )
5340 .await;
5341 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5342
5343 assert!(
5344 search(
5345 &project,
5346 SearchQuery::text(
5347 search_query,
5348 false,
5349 true,
5350 false,
5351 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5352 Default::default(),
5353 false,
5354 None
5355 )
5356 .unwrap(),
5357 cx
5358 )
5359 .await
5360 .unwrap()
5361 .is_empty(),
5362 "If no inclusions match, no files should be returned"
5363 );
5364
5365 assert_eq!(
5366 search(
5367 &project,
5368 SearchQuery::text(
5369 search_query,
5370 false,
5371 true,
5372 false,
5373 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5374 Default::default(),
5375 false,
5376 None
5377 )
5378 .unwrap(),
5379 cx
5380 )
5381 .await
5382 .unwrap(),
5383 HashMap::from_iter([
5384 (path!("dir/one.rs").to_string(), vec![8..12]),
5385 (path!("dir/two.rs").to_string(), vec![8..12]),
5386 ]),
5387 "Rust only search should give only Rust files"
5388 );
5389
5390 assert_eq!(
5391 search(
5392 &project,
5393 SearchQuery::text(
5394 search_query,
5395 false,
5396 true,
5397 false,
5398 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5399 Default::default(),
5400 false,
5401 None,
5402 )
5403 .unwrap(),
5404 cx
5405 )
5406 .await
5407 .unwrap(),
5408 HashMap::from_iter([
5409 (path!("dir/one.ts").to_string(), vec![14..18]),
5410 (path!("dir/two.ts").to_string(), vec![14..18]),
5411 ]),
5412 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5413 );
5414
5415 assert_eq!(
5416 search(
5417 &project,
5418 SearchQuery::text(
5419 search_query,
5420 false,
5421 true,
5422 false,
5423 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5424 .unwrap(),
5425 Default::default(),
5426 false,
5427 None,
5428 )
5429 .unwrap(),
5430 cx
5431 )
5432 .await
5433 .unwrap(),
5434 HashMap::from_iter([
5435 (path!("dir/two.ts").to_string(), vec![14..18]),
5436 (path!("dir/one.rs").to_string(), vec![8..12]),
5437 (path!("dir/one.ts").to_string(), vec![14..18]),
5438 (path!("dir/two.rs").to_string(), vec![8..12]),
5439 ]),
5440 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5441 );
5442}
5443
5444#[gpui::test]
5445async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5446 init_test(cx);
5447
5448 let search_query = "file";
5449
5450 let fs = FakeFs::new(cx.executor());
5451 fs.insert_tree(
5452 path!("/dir"),
5453 json!({
5454 "one.rs": r#"// Rust file one"#,
5455 "one.ts": r#"// TypeScript file one"#,
5456 "two.rs": r#"// Rust file two"#,
5457 "two.ts": r#"// TypeScript file two"#,
5458 }),
5459 )
5460 .await;
5461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5462
5463 assert_eq!(
5464 search(
5465 &project,
5466 SearchQuery::text(
5467 search_query,
5468 false,
5469 true,
5470 false,
5471 Default::default(),
5472 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5473 false,
5474 None,
5475 )
5476 .unwrap(),
5477 cx
5478 )
5479 .await
5480 .unwrap(),
5481 HashMap::from_iter([
5482 (path!("dir/one.rs").to_string(), vec![8..12]),
5483 (path!("dir/one.ts").to_string(), vec![14..18]),
5484 (path!("dir/two.rs").to_string(), vec![8..12]),
5485 (path!("dir/two.ts").to_string(), vec![14..18]),
5486 ]),
5487 "If no exclusions match, all files should be returned"
5488 );
5489
5490 assert_eq!(
5491 search(
5492 &project,
5493 SearchQuery::text(
5494 search_query,
5495 false,
5496 true,
5497 false,
5498 Default::default(),
5499 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5500 false,
5501 None,
5502 )
5503 .unwrap(),
5504 cx
5505 )
5506 .await
5507 .unwrap(),
5508 HashMap::from_iter([
5509 (path!("dir/one.ts").to_string(), vec![14..18]),
5510 (path!("dir/two.ts").to_string(), vec![14..18]),
5511 ]),
5512 "Rust exclusion search should give only TypeScript files"
5513 );
5514
5515 assert_eq!(
5516 search(
5517 &project,
5518 SearchQuery::text(
5519 search_query,
5520 false,
5521 true,
5522 false,
5523 Default::default(),
5524 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5525 false,
5526 None,
5527 )
5528 .unwrap(),
5529 cx
5530 )
5531 .await
5532 .unwrap(),
5533 HashMap::from_iter([
5534 (path!("dir/one.rs").to_string(), vec![8..12]),
5535 (path!("dir/two.rs").to_string(), vec![8..12]),
5536 ]),
5537 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5538 );
5539
5540 assert!(
5541 search(
5542 &project,
5543 SearchQuery::text(
5544 search_query,
5545 false,
5546 true,
5547 false,
5548 Default::default(),
5549 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5550 .unwrap(),
5551 false,
5552 None,
5553 )
5554 .unwrap(),
5555 cx
5556 )
5557 .await
5558 .unwrap()
5559 .is_empty(),
5560 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5561 );
5562}
5563
5564#[gpui::test]
5565async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5566 init_test(cx);
5567
5568 let search_query = "file";
5569
5570 let fs = FakeFs::new(cx.executor());
5571 fs.insert_tree(
5572 path!("/dir"),
5573 json!({
5574 "one.rs": r#"// Rust file one"#,
5575 "one.ts": r#"// TypeScript file one"#,
5576 "two.rs": r#"// Rust file two"#,
5577 "two.ts": r#"// TypeScript file two"#,
5578 }),
5579 )
5580 .await;
5581
5582 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5583 let _buffer = project.update(cx, |project, cx| {
5584 let buffer = project.create_local_buffer("file", None, cx);
5585 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5586 buffer
5587 });
5588
5589 assert_eq!(
5590 search(
5591 &project,
5592 SearchQuery::text(
5593 search_query,
5594 false,
5595 true,
5596 false,
5597 Default::default(),
5598 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5599 false,
5600 None,
5601 )
5602 .unwrap(),
5603 cx
5604 )
5605 .await
5606 .unwrap(),
5607 HashMap::from_iter([
5608 (path!("dir/one.rs").to_string(), vec![8..12]),
5609 (path!("dir/one.ts").to_string(), vec![14..18]),
5610 (path!("dir/two.rs").to_string(), vec![8..12]),
5611 (path!("dir/two.ts").to_string(), vec![14..18]),
5612 ]),
5613 "If no exclusions match, all files should be returned"
5614 );
5615
5616 assert_eq!(
5617 search(
5618 &project,
5619 SearchQuery::text(
5620 search_query,
5621 false,
5622 true,
5623 false,
5624 Default::default(),
5625 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5626 false,
5627 None,
5628 )
5629 .unwrap(),
5630 cx
5631 )
5632 .await
5633 .unwrap(),
5634 HashMap::from_iter([
5635 (path!("dir/one.ts").to_string(), vec![14..18]),
5636 (path!("dir/two.ts").to_string(), vec![14..18]),
5637 ]),
5638 "Rust exclusion search should give only TypeScript files"
5639 );
5640
5641 assert_eq!(
5642 search(
5643 &project,
5644 SearchQuery::text(
5645 search_query,
5646 false,
5647 true,
5648 false,
5649 Default::default(),
5650 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5651 false,
5652 None,
5653 )
5654 .unwrap(),
5655 cx
5656 )
5657 .await
5658 .unwrap(),
5659 HashMap::from_iter([
5660 (path!("dir/one.rs").to_string(), vec![8..12]),
5661 (path!("dir/two.rs").to_string(), vec![8..12]),
5662 ]),
5663 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5664 );
5665
5666 assert!(
5667 search(
5668 &project,
5669 SearchQuery::text(
5670 search_query,
5671 false,
5672 true,
5673 false,
5674 Default::default(),
5675 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5676 .unwrap(),
5677 false,
5678 None,
5679 )
5680 .unwrap(),
5681 cx
5682 )
5683 .await
5684 .unwrap()
5685 .is_empty(),
5686 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5687 );
5688}
5689
5690#[gpui::test]
5691async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let search_query = "file";
5695
5696 let fs = FakeFs::new(cx.executor());
5697 fs.insert_tree(
5698 path!("/dir"),
5699 json!({
5700 "one.rs": r#"// Rust file one"#,
5701 "one.ts": r#"// TypeScript file one"#,
5702 "two.rs": r#"// Rust file two"#,
5703 "two.ts": r#"// TypeScript file two"#,
5704 }),
5705 )
5706 .await;
5707 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5708
5709 assert!(
5710 search(
5711 &project,
5712 SearchQuery::text(
5713 search_query,
5714 false,
5715 true,
5716 false,
5717 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5718 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5719 false,
5720 None,
5721 )
5722 .unwrap(),
5723 cx
5724 )
5725 .await
5726 .unwrap()
5727 .is_empty(),
5728 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5729 );
5730
5731 assert!(
5732 search(
5733 &project,
5734 SearchQuery::text(
5735 search_query,
5736 false,
5737 true,
5738 false,
5739 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5740 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5741 false,
5742 None,
5743 )
5744 .unwrap(),
5745 cx
5746 )
5747 .await
5748 .unwrap()
5749 .is_empty(),
5750 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5751 );
5752
5753 assert!(
5754 search(
5755 &project,
5756 SearchQuery::text(
5757 search_query,
5758 false,
5759 true,
5760 false,
5761 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5762 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5763 false,
5764 None,
5765 )
5766 .unwrap(),
5767 cx
5768 )
5769 .await
5770 .unwrap()
5771 .is_empty(),
5772 "Non-matching inclusions and exclusions should not change that."
5773 );
5774
5775 assert_eq!(
5776 search(
5777 &project,
5778 SearchQuery::text(
5779 search_query,
5780 false,
5781 true,
5782 false,
5783 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5784 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5785 false,
5786 None,
5787 )
5788 .unwrap(),
5789 cx
5790 )
5791 .await
5792 .unwrap(),
5793 HashMap::from_iter([
5794 (path!("dir/one.ts").to_string(), vec![14..18]),
5795 (path!("dir/two.ts").to_string(), vec![14..18]),
5796 ]),
5797 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5798 );
5799}
5800
5801#[gpui::test]
5802async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5803 init_test(cx);
5804
5805 let fs = FakeFs::new(cx.executor());
5806 fs.insert_tree(
5807 path!("/worktree-a"),
5808 json!({
5809 "haystack.rs": r#"// NEEDLE"#,
5810 "haystack.ts": r#"// NEEDLE"#,
5811 }),
5812 )
5813 .await;
5814 fs.insert_tree(
5815 path!("/worktree-b"),
5816 json!({
5817 "haystack.rs": r#"// NEEDLE"#,
5818 "haystack.ts": r#"// NEEDLE"#,
5819 }),
5820 )
5821 .await;
5822
5823 let project = Project::test(
5824 fs.clone(),
5825 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5826 cx,
5827 )
5828 .await;
5829
5830 assert_eq!(
5831 search(
5832 &project,
5833 SearchQuery::text(
5834 "NEEDLE",
5835 false,
5836 true,
5837 false,
5838 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5839 Default::default(),
5840 true,
5841 None,
5842 )
5843 .unwrap(),
5844 cx
5845 )
5846 .await
5847 .unwrap(),
5848 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5849 "should only return results from included worktree"
5850 );
5851 assert_eq!(
5852 search(
5853 &project,
5854 SearchQuery::text(
5855 "NEEDLE",
5856 false,
5857 true,
5858 false,
5859 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5860 Default::default(),
5861 true,
5862 None,
5863 )
5864 .unwrap(),
5865 cx
5866 )
5867 .await
5868 .unwrap(),
5869 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5870 "should only return results from included worktree"
5871 );
5872
5873 assert_eq!(
5874 search(
5875 &project,
5876 SearchQuery::text(
5877 "NEEDLE",
5878 false,
5879 true,
5880 false,
5881 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5882 Default::default(),
5883 false,
5884 None,
5885 )
5886 .unwrap(),
5887 cx
5888 )
5889 .await
5890 .unwrap(),
5891 HashMap::from_iter([
5892 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5893 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5894 ]),
5895 "should return results from both worktrees"
5896 );
5897}
5898
5899#[gpui::test]
5900async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5901 init_test(cx);
5902
5903 let fs = FakeFs::new(cx.background_executor.clone());
5904 fs.insert_tree(
5905 path!("/dir"),
5906 json!({
5907 ".git": {},
5908 ".gitignore": "**/target\n/node_modules\n",
5909 "target": {
5910 "index.txt": "index_key:index_value"
5911 },
5912 "node_modules": {
5913 "eslint": {
5914 "index.ts": "const eslint_key = 'eslint value'",
5915 "package.json": r#"{ "some_key": "some value" }"#,
5916 },
5917 "prettier": {
5918 "index.ts": "const prettier_key = 'prettier value'",
5919 "package.json": r#"{ "other_key": "other value" }"#,
5920 },
5921 },
5922 "package.json": r#"{ "main_key": "main value" }"#,
5923 }),
5924 )
5925 .await;
5926 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5927
5928 let query = "key";
5929 assert_eq!(
5930 search(
5931 &project,
5932 SearchQuery::text(
5933 query,
5934 false,
5935 false,
5936 false,
5937 Default::default(),
5938 Default::default(),
5939 false,
5940 None,
5941 )
5942 .unwrap(),
5943 cx
5944 )
5945 .await
5946 .unwrap(),
5947 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5948 "Only one non-ignored file should have the query"
5949 );
5950
5951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5952 assert_eq!(
5953 search(
5954 &project,
5955 SearchQuery::text(
5956 query,
5957 false,
5958 false,
5959 true,
5960 Default::default(),
5961 Default::default(),
5962 false,
5963 None,
5964 )
5965 .unwrap(),
5966 cx
5967 )
5968 .await
5969 .unwrap(),
5970 HashMap::from_iter([
5971 (path!("dir/package.json").to_string(), vec![8..11]),
5972 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5973 (
5974 path!("dir/node_modules/prettier/package.json").to_string(),
5975 vec![9..12]
5976 ),
5977 (
5978 path!("dir/node_modules/prettier/index.ts").to_string(),
5979 vec![15..18]
5980 ),
5981 (
5982 path!("dir/node_modules/eslint/index.ts").to_string(),
5983 vec![13..16]
5984 ),
5985 (
5986 path!("dir/node_modules/eslint/package.json").to_string(),
5987 vec![8..11]
5988 ),
5989 ]),
5990 "Unrestricted search with ignored directories should find every file with the query"
5991 );
5992
5993 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5994 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5996 assert_eq!(
5997 search(
5998 &project,
5999 SearchQuery::text(
6000 query,
6001 false,
6002 false,
6003 true,
6004 files_to_include,
6005 files_to_exclude,
6006 false,
6007 None,
6008 )
6009 .unwrap(),
6010 cx
6011 )
6012 .await
6013 .unwrap(),
6014 HashMap::from_iter([(
6015 path!("dir/node_modules/prettier/package.json").to_string(),
6016 vec![9..12]
6017 )]),
6018 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6019 );
6020}
6021
6022#[gpui::test]
6023async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6024 init_test(cx);
6025
6026 let fs = FakeFs::new(cx.executor());
6027 fs.insert_tree(
6028 path!("/dir"),
6029 json!({
6030 "one.rs": "// ПРИВЕТ? привет!",
6031 "two.rs": "// ПРИВЕТ.",
6032 "three.rs": "// привет",
6033 }),
6034 )
6035 .await;
6036 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6037
6038 let unicode_case_sensitive_query = SearchQuery::text(
6039 "привет",
6040 false,
6041 true,
6042 false,
6043 Default::default(),
6044 Default::default(),
6045 false,
6046 None,
6047 );
6048 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6049 assert_eq!(
6050 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6051 .await
6052 .unwrap(),
6053 HashMap::from_iter([
6054 (path!("dir/one.rs").to_string(), vec![17..29]),
6055 (path!("dir/three.rs").to_string(), vec![3..15]),
6056 ])
6057 );
6058
6059 let unicode_case_insensitive_query = SearchQuery::text(
6060 "привет",
6061 false,
6062 false,
6063 false,
6064 Default::default(),
6065 Default::default(),
6066 false,
6067 None,
6068 );
6069 assert_matches!(
6070 unicode_case_insensitive_query,
6071 Ok(SearchQuery::Regex { .. })
6072 );
6073 assert_eq!(
6074 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6075 .await
6076 .unwrap(),
6077 HashMap::from_iter([
6078 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6079 (path!("dir/two.rs").to_string(), vec![3..15]),
6080 (path!("dir/three.rs").to_string(), vec![3..15]),
6081 ])
6082 );
6083
6084 assert_eq!(
6085 search(
6086 &project,
6087 SearchQuery::text(
6088 "привет.",
6089 false,
6090 false,
6091 false,
6092 Default::default(),
6093 Default::default(),
6094 false,
6095 None,
6096 )
6097 .unwrap(),
6098 cx
6099 )
6100 .await
6101 .unwrap(),
6102 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6103 );
6104}
6105
6106#[gpui::test]
6107async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6108 init_test(cx);
6109
6110 let fs = FakeFs::new(cx.executor());
6111 fs.insert_tree(
6112 "/one/two",
6113 json!({
6114 "three": {
6115 "a.txt": "",
6116 "four": {}
6117 },
6118 "c.rs": ""
6119 }),
6120 )
6121 .await;
6122
6123 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6124 project
6125 .update(cx, |project, cx| {
6126 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6127 project.create_entry((id, "b.."), true, cx)
6128 })
6129 .await
6130 .unwrap()
6131 .into_included()
6132 .unwrap();
6133
6134 // Can't create paths outside the project
6135 let result = project
6136 .update(cx, |project, cx| {
6137 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6138 project.create_entry((id, "../../boop"), true, cx)
6139 })
6140 .await;
6141 assert!(result.is_err());
6142
6143 // Can't create paths with '..'
6144 let result = project
6145 .update(cx, |project, cx| {
6146 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6147 project.create_entry((id, "four/../beep"), true, cx)
6148 })
6149 .await;
6150 assert!(result.is_err());
6151
6152 assert_eq!(
6153 fs.paths(true),
6154 vec![
6155 PathBuf::from(path!("/")),
6156 PathBuf::from(path!("/one")),
6157 PathBuf::from(path!("/one/two")),
6158 PathBuf::from(path!("/one/two/c.rs")),
6159 PathBuf::from(path!("/one/two/three")),
6160 PathBuf::from(path!("/one/two/three/a.txt")),
6161 PathBuf::from(path!("/one/two/three/b..")),
6162 PathBuf::from(path!("/one/two/three/four")),
6163 ]
6164 );
6165
6166 // And we cannot open buffers with '..'
6167 let result = project
6168 .update(cx, |project, cx| {
6169 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6170 project.open_buffer((id, "../c.rs"), cx)
6171 })
6172 .await;
6173 assert!(result.is_err())
6174}
6175
6176#[gpui::test]
6177async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6178 init_test(cx);
6179
6180 let fs = FakeFs::new(cx.executor());
6181 fs.insert_tree(
6182 path!("/dir"),
6183 json!({
6184 "a.tsx": "a",
6185 }),
6186 )
6187 .await;
6188
6189 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6190
6191 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6192 language_registry.add(tsx_lang());
6193 let language_server_names = [
6194 "TypeScriptServer",
6195 "TailwindServer",
6196 "ESLintServer",
6197 "NoHoverCapabilitiesServer",
6198 ];
6199 let mut language_servers = [
6200 language_registry.register_fake_lsp(
6201 "tsx",
6202 FakeLspAdapter {
6203 name: language_server_names[0],
6204 capabilities: lsp::ServerCapabilities {
6205 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6206 ..lsp::ServerCapabilities::default()
6207 },
6208 ..FakeLspAdapter::default()
6209 },
6210 ),
6211 language_registry.register_fake_lsp(
6212 "tsx",
6213 FakeLspAdapter {
6214 name: language_server_names[1],
6215 capabilities: lsp::ServerCapabilities {
6216 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6217 ..lsp::ServerCapabilities::default()
6218 },
6219 ..FakeLspAdapter::default()
6220 },
6221 ),
6222 language_registry.register_fake_lsp(
6223 "tsx",
6224 FakeLspAdapter {
6225 name: language_server_names[2],
6226 capabilities: lsp::ServerCapabilities {
6227 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6228 ..lsp::ServerCapabilities::default()
6229 },
6230 ..FakeLspAdapter::default()
6231 },
6232 ),
6233 language_registry.register_fake_lsp(
6234 "tsx",
6235 FakeLspAdapter {
6236 name: language_server_names[3],
6237 capabilities: lsp::ServerCapabilities {
6238 hover_provider: None,
6239 ..lsp::ServerCapabilities::default()
6240 },
6241 ..FakeLspAdapter::default()
6242 },
6243 ),
6244 ];
6245
6246 let (buffer, _handle) = project
6247 .update(cx, |p, cx| {
6248 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6249 })
6250 .await
6251 .unwrap();
6252 cx.executor().run_until_parked();
6253
6254 let mut servers_with_hover_requests = HashMap::default();
6255 for i in 0..language_server_names.len() {
6256 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6257 panic!(
6258 "Failed to get language server #{i} with name {}",
6259 &language_server_names[i]
6260 )
6261 });
6262 let new_server_name = new_server.server.name();
6263 assert!(
6264 !servers_with_hover_requests.contains_key(&new_server_name),
6265 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6266 );
6267 match new_server_name.as_ref() {
6268 "TailwindServer" | "TypeScriptServer" => {
6269 servers_with_hover_requests.insert(
6270 new_server_name.clone(),
6271 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6272 move |_, _| {
6273 let name = new_server_name.clone();
6274 async move {
6275 Ok(Some(lsp::Hover {
6276 contents: lsp::HoverContents::Scalar(
6277 lsp::MarkedString::String(format!("{name} hover")),
6278 ),
6279 range: None,
6280 }))
6281 }
6282 },
6283 ),
6284 );
6285 }
6286 "ESLintServer" => {
6287 servers_with_hover_requests.insert(
6288 new_server_name,
6289 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6290 |_, _| async move { Ok(None) },
6291 ),
6292 );
6293 }
6294 "NoHoverCapabilitiesServer" => {
6295 let _never_handled = new_server
6296 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6297 panic!(
6298 "Should not call for hovers server with no corresponding capabilities"
6299 )
6300 });
6301 }
6302 unexpected => panic!("Unexpected server name: {unexpected}"),
6303 }
6304 }
6305
6306 let hover_task = project.update(cx, |project, cx| {
6307 project.hover(&buffer, Point::new(0, 0), cx)
6308 });
6309 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6310 |mut hover_request| async move {
6311 hover_request
6312 .next()
6313 .await
6314 .expect("All hover requests should have been triggered")
6315 },
6316 ))
6317 .await;
6318 assert_eq!(
6319 vec!["TailwindServer hover", "TypeScriptServer hover"],
6320 hover_task
6321 .await
6322 .into_iter()
6323 .flatten()
6324 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6325 .sorted()
6326 .collect::<Vec<_>>(),
6327 "Should receive hover responses from all related servers with hover capabilities"
6328 );
6329}
6330
6331#[gpui::test]
6332async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6333 init_test(cx);
6334
6335 let fs = FakeFs::new(cx.executor());
6336 fs.insert_tree(
6337 path!("/dir"),
6338 json!({
6339 "a.ts": "a",
6340 }),
6341 )
6342 .await;
6343
6344 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6345
6346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6347 language_registry.add(typescript_lang());
6348 let mut fake_language_servers = language_registry.register_fake_lsp(
6349 "TypeScript",
6350 FakeLspAdapter {
6351 capabilities: lsp::ServerCapabilities {
6352 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6353 ..lsp::ServerCapabilities::default()
6354 },
6355 ..FakeLspAdapter::default()
6356 },
6357 );
6358
6359 let (buffer, _handle) = project
6360 .update(cx, |p, cx| {
6361 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6362 })
6363 .await
6364 .unwrap();
6365 cx.executor().run_until_parked();
6366
6367 let fake_server = fake_language_servers
6368 .next()
6369 .await
6370 .expect("failed to get the language server");
6371
6372 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6373 move |_, _| async move {
6374 Ok(Some(lsp::Hover {
6375 contents: lsp::HoverContents::Array(vec![
6376 lsp::MarkedString::String("".to_string()),
6377 lsp::MarkedString::String(" ".to_string()),
6378 lsp::MarkedString::String("\n\n\n".to_string()),
6379 ]),
6380 range: None,
6381 }))
6382 },
6383 );
6384
6385 let hover_task = project.update(cx, |project, cx| {
6386 project.hover(&buffer, Point::new(0, 0), cx)
6387 });
6388 let () = request_handled
6389 .next()
6390 .await
6391 .expect("All hover requests should have been triggered");
6392 assert_eq!(
6393 Vec::<String>::new(),
6394 hover_task
6395 .await
6396 .into_iter()
6397 .flatten()
6398 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6399 .sorted()
6400 .collect::<Vec<_>>(),
6401 "Empty hover parts should be ignored"
6402 );
6403}
6404
6405#[gpui::test]
6406async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6407 init_test(cx);
6408
6409 let fs = FakeFs::new(cx.executor());
6410 fs.insert_tree(
6411 path!("/dir"),
6412 json!({
6413 "a.ts": "a",
6414 }),
6415 )
6416 .await;
6417
6418 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6419
6420 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6421 language_registry.add(typescript_lang());
6422 let mut fake_language_servers = language_registry.register_fake_lsp(
6423 "TypeScript",
6424 FakeLspAdapter {
6425 capabilities: lsp::ServerCapabilities {
6426 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6427 ..lsp::ServerCapabilities::default()
6428 },
6429 ..FakeLspAdapter::default()
6430 },
6431 );
6432
6433 let (buffer, _handle) = project
6434 .update(cx, |p, cx| {
6435 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6436 })
6437 .await
6438 .unwrap();
6439 cx.executor().run_until_parked();
6440
6441 let fake_server = fake_language_servers
6442 .next()
6443 .await
6444 .expect("failed to get the language server");
6445
6446 let mut request_handled = fake_server
6447 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6448 Ok(Some(vec![
6449 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6450 title: "organize imports".to_string(),
6451 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6452 ..lsp::CodeAction::default()
6453 }),
6454 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6455 title: "fix code".to_string(),
6456 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6457 ..lsp::CodeAction::default()
6458 }),
6459 ]))
6460 });
6461
6462 let code_actions_task = project.update(cx, |project, cx| {
6463 project.code_actions(
6464 &buffer,
6465 0..buffer.read(cx).len(),
6466 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6467 cx,
6468 )
6469 });
6470
6471 let () = request_handled
6472 .next()
6473 .await
6474 .expect("The code action request should have been triggered");
6475
6476 let code_actions = code_actions_task.await.unwrap().unwrap();
6477 assert_eq!(code_actions.len(), 1);
6478 assert_eq!(
6479 code_actions[0].lsp_action.action_kind(),
6480 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6481 );
6482}
6483
6484#[gpui::test]
6485async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6486 init_test(cx);
6487
6488 let fs = FakeFs::new(cx.executor());
6489 fs.insert_tree(
6490 path!("/dir"),
6491 json!({
6492 "a.tsx": "a",
6493 }),
6494 )
6495 .await;
6496
6497 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6498
6499 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6500 language_registry.add(tsx_lang());
6501 let language_server_names = [
6502 "TypeScriptServer",
6503 "TailwindServer",
6504 "ESLintServer",
6505 "NoActionsCapabilitiesServer",
6506 ];
6507
6508 let mut language_server_rxs = [
6509 language_registry.register_fake_lsp(
6510 "tsx",
6511 FakeLspAdapter {
6512 name: language_server_names[0],
6513 capabilities: lsp::ServerCapabilities {
6514 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6515 ..lsp::ServerCapabilities::default()
6516 },
6517 ..FakeLspAdapter::default()
6518 },
6519 ),
6520 language_registry.register_fake_lsp(
6521 "tsx",
6522 FakeLspAdapter {
6523 name: language_server_names[1],
6524 capabilities: lsp::ServerCapabilities {
6525 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6526 ..lsp::ServerCapabilities::default()
6527 },
6528 ..FakeLspAdapter::default()
6529 },
6530 ),
6531 language_registry.register_fake_lsp(
6532 "tsx",
6533 FakeLspAdapter {
6534 name: language_server_names[2],
6535 capabilities: lsp::ServerCapabilities {
6536 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6537 ..lsp::ServerCapabilities::default()
6538 },
6539 ..FakeLspAdapter::default()
6540 },
6541 ),
6542 language_registry.register_fake_lsp(
6543 "tsx",
6544 FakeLspAdapter {
6545 name: language_server_names[3],
6546 capabilities: lsp::ServerCapabilities {
6547 code_action_provider: None,
6548 ..lsp::ServerCapabilities::default()
6549 },
6550 ..FakeLspAdapter::default()
6551 },
6552 ),
6553 ];
6554
6555 let (buffer, _handle) = project
6556 .update(cx, |p, cx| {
6557 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6558 })
6559 .await
6560 .unwrap();
6561 cx.executor().run_until_parked();
6562
6563 let mut servers_with_actions_requests = HashMap::default();
6564 for i in 0..language_server_names.len() {
6565 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6566 panic!(
6567 "Failed to get language server #{i} with name {}",
6568 &language_server_names[i]
6569 )
6570 });
6571 let new_server_name = new_server.server.name();
6572
6573 assert!(
6574 !servers_with_actions_requests.contains_key(&new_server_name),
6575 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6576 );
6577 match new_server_name.0.as_ref() {
6578 "TailwindServer" | "TypeScriptServer" => {
6579 servers_with_actions_requests.insert(
6580 new_server_name.clone(),
6581 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6582 move |_, _| {
6583 let name = new_server_name.clone();
6584 async move {
6585 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6586 lsp::CodeAction {
6587 title: format!("{name} code action"),
6588 ..lsp::CodeAction::default()
6589 },
6590 )]))
6591 }
6592 },
6593 ),
6594 );
6595 }
6596 "ESLintServer" => {
6597 servers_with_actions_requests.insert(
6598 new_server_name,
6599 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6600 |_, _| async move { Ok(None) },
6601 ),
6602 );
6603 }
6604 "NoActionsCapabilitiesServer" => {
6605 let _never_handled = new_server
6606 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6607 panic!(
6608 "Should not call for code actions server with no corresponding capabilities"
6609 )
6610 });
6611 }
6612 unexpected => panic!("Unexpected server name: {unexpected}"),
6613 }
6614 }
6615
6616 let code_actions_task = project.update(cx, |project, cx| {
6617 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6618 });
6619
6620 // cx.run_until_parked();
6621 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6622 |mut code_actions_request| async move {
6623 code_actions_request
6624 .next()
6625 .await
6626 .expect("All code actions requests should have been triggered")
6627 },
6628 ))
6629 .await;
6630 assert_eq!(
6631 vec!["TailwindServer code action", "TypeScriptServer code action"],
6632 code_actions_task
6633 .await
6634 .unwrap()
6635 .unwrap()
6636 .into_iter()
6637 .map(|code_action| code_action.lsp_action.title().to_owned())
6638 .sorted()
6639 .collect::<Vec<_>>(),
6640 "Should receive code actions responses from all related servers with hover capabilities"
6641 );
6642}
6643
6644#[gpui::test]
6645async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6646 init_test(cx);
6647
6648 let fs = FakeFs::new(cx.executor());
6649 fs.insert_tree(
6650 "/dir",
6651 json!({
6652 "a.rs": "let a = 1;",
6653 "b.rs": "let b = 2;",
6654 "c.rs": "let c = 2;",
6655 }),
6656 )
6657 .await;
6658
6659 let project = Project::test(
6660 fs,
6661 [
6662 "/dir/a.rs".as_ref(),
6663 "/dir/b.rs".as_ref(),
6664 "/dir/c.rs".as_ref(),
6665 ],
6666 cx,
6667 )
6668 .await;
6669
6670 // check the initial state and get the worktrees
6671 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6672 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6673 assert_eq!(worktrees.len(), 3);
6674
6675 let worktree_a = worktrees[0].read(cx);
6676 let worktree_b = worktrees[1].read(cx);
6677 let worktree_c = worktrees[2].read(cx);
6678
6679 // check they start in the right order
6680 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6681 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6682 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6683
6684 (
6685 worktrees[0].clone(),
6686 worktrees[1].clone(),
6687 worktrees[2].clone(),
6688 )
6689 });
6690
6691 // move first worktree to after the second
6692 // [a, b, c] -> [b, a, c]
6693 project
6694 .update(cx, |project, cx| {
6695 let first = worktree_a.read(cx);
6696 let second = worktree_b.read(cx);
6697 project.move_worktree(first.id(), second.id(), cx)
6698 })
6699 .expect("moving first after second");
6700
6701 // check the state after moving
6702 project.update(cx, |project, cx| {
6703 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6704 assert_eq!(worktrees.len(), 3);
6705
6706 let first = worktrees[0].read(cx);
6707 let second = worktrees[1].read(cx);
6708 let third = worktrees[2].read(cx);
6709
6710 // check they are now in the right order
6711 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6712 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6713 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6714 });
6715
6716 // move the second worktree to before the first
6717 // [b, a, c] -> [a, b, c]
6718 project
6719 .update(cx, |project, cx| {
6720 let second = worktree_a.read(cx);
6721 let first = worktree_b.read(cx);
6722 project.move_worktree(first.id(), second.id(), cx)
6723 })
6724 .expect("moving second before first");
6725
6726 // check the state after moving
6727 project.update(cx, |project, cx| {
6728 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6729 assert_eq!(worktrees.len(), 3);
6730
6731 let first = worktrees[0].read(cx);
6732 let second = worktrees[1].read(cx);
6733 let third = worktrees[2].read(cx);
6734
6735 // check they are now in the right order
6736 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6737 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6738 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6739 });
6740
6741 // move the second worktree to after the third
6742 // [a, b, c] -> [a, c, b]
6743 project
6744 .update(cx, |project, cx| {
6745 let second = worktree_b.read(cx);
6746 let third = worktree_c.read(cx);
6747 project.move_worktree(second.id(), third.id(), cx)
6748 })
6749 .expect("moving second after third");
6750
6751 // check the state after moving
6752 project.update(cx, |project, cx| {
6753 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6754 assert_eq!(worktrees.len(), 3);
6755
6756 let first = worktrees[0].read(cx);
6757 let second = worktrees[1].read(cx);
6758 let third = worktrees[2].read(cx);
6759
6760 // check they are now in the right order
6761 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6762 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6763 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6764 });
6765
6766 // move the third worktree to before the second
6767 // [a, c, b] -> [a, b, c]
6768 project
6769 .update(cx, |project, cx| {
6770 let third = worktree_c.read(cx);
6771 let second = worktree_b.read(cx);
6772 project.move_worktree(third.id(), second.id(), cx)
6773 })
6774 .expect("moving third before second");
6775
6776 // check the state after moving
6777 project.update(cx, |project, cx| {
6778 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6779 assert_eq!(worktrees.len(), 3);
6780
6781 let first = worktrees[0].read(cx);
6782 let second = worktrees[1].read(cx);
6783 let third = worktrees[2].read(cx);
6784
6785 // check they are now in the right order
6786 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6787 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6788 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6789 });
6790
6791 // move the first worktree to after the third
6792 // [a, b, c] -> [b, c, a]
6793 project
6794 .update(cx, |project, cx| {
6795 let first = worktree_a.read(cx);
6796 let third = worktree_c.read(cx);
6797 project.move_worktree(first.id(), third.id(), cx)
6798 })
6799 .expect("moving first after third");
6800
6801 // check the state after moving
6802 project.update(cx, |project, cx| {
6803 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6804 assert_eq!(worktrees.len(), 3);
6805
6806 let first = worktrees[0].read(cx);
6807 let second = worktrees[1].read(cx);
6808 let third = worktrees[2].read(cx);
6809
6810 // check they are now in the right order
6811 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6812 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6813 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6814 });
6815
6816 // move the third worktree to before the first
6817 // [b, c, a] -> [a, b, c]
6818 project
6819 .update(cx, |project, cx| {
6820 let third = worktree_a.read(cx);
6821 let first = worktree_b.read(cx);
6822 project.move_worktree(third.id(), first.id(), cx)
6823 })
6824 .expect("moving third before first");
6825
6826 // check the state after moving
6827 project.update(cx, |project, cx| {
6828 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6829 assert_eq!(worktrees.len(), 3);
6830
6831 let first = worktrees[0].read(cx);
6832 let second = worktrees[1].read(cx);
6833 let third = worktrees[2].read(cx);
6834
6835 // check they are now in the right order
6836 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6837 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6838 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6839 });
6840}
6841
6842#[gpui::test]
6843async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6844 init_test(cx);
6845
6846 let staged_contents = r#"
6847 fn main() {
6848 println!("hello world");
6849 }
6850 "#
6851 .unindent();
6852 let file_contents = r#"
6853 // print goodbye
6854 fn main() {
6855 println!("goodbye world");
6856 }
6857 "#
6858 .unindent();
6859
6860 let fs = FakeFs::new(cx.background_executor.clone());
6861 fs.insert_tree(
6862 "/dir",
6863 json!({
6864 ".git": {},
6865 "src": {
6866 "main.rs": file_contents,
6867 }
6868 }),
6869 )
6870 .await;
6871
6872 fs.set_index_for_repo(
6873 Path::new("/dir/.git"),
6874 &[("src/main.rs".into(), staged_contents)],
6875 );
6876
6877 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6878
6879 let buffer = project
6880 .update(cx, |project, cx| {
6881 project.open_local_buffer("/dir/src/main.rs", cx)
6882 })
6883 .await
6884 .unwrap();
6885 let unstaged_diff = project
6886 .update(cx, |project, cx| {
6887 project.open_unstaged_diff(buffer.clone(), cx)
6888 })
6889 .await
6890 .unwrap();
6891
6892 cx.run_until_parked();
6893 unstaged_diff.update(cx, |unstaged_diff, cx| {
6894 let snapshot = buffer.read(cx).snapshot();
6895 assert_hunks(
6896 unstaged_diff.hunks(&snapshot, cx),
6897 &snapshot,
6898 &unstaged_diff.base_text_string().unwrap(),
6899 &[
6900 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6901 (
6902 2..3,
6903 " println!(\"hello world\");\n",
6904 " println!(\"goodbye world\");\n",
6905 DiffHunkStatus::modified_none(),
6906 ),
6907 ],
6908 );
6909 });
6910
6911 let staged_contents = r#"
6912 // print goodbye
6913 fn main() {
6914 }
6915 "#
6916 .unindent();
6917
6918 fs.set_index_for_repo(
6919 Path::new("/dir/.git"),
6920 &[("src/main.rs".into(), staged_contents)],
6921 );
6922
6923 cx.run_until_parked();
6924 unstaged_diff.update(cx, |unstaged_diff, cx| {
6925 let snapshot = buffer.read(cx).snapshot();
6926 assert_hunks(
6927 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6928 &snapshot,
6929 &unstaged_diff.base_text().text(),
6930 &[(
6931 2..3,
6932 "",
6933 " println!(\"goodbye world\");\n",
6934 DiffHunkStatus::added_none(),
6935 )],
6936 );
6937 });
6938}
6939
6940#[gpui::test]
6941async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6942 init_test(cx);
6943
6944 let committed_contents = r#"
6945 fn main() {
6946 println!("hello world");
6947 }
6948 "#
6949 .unindent();
6950 let staged_contents = r#"
6951 fn main() {
6952 println!("goodbye world");
6953 }
6954 "#
6955 .unindent();
6956 let file_contents = r#"
6957 // print goodbye
6958 fn main() {
6959 println!("goodbye world");
6960 }
6961 "#
6962 .unindent();
6963
6964 let fs = FakeFs::new(cx.background_executor.clone());
6965 fs.insert_tree(
6966 "/dir",
6967 json!({
6968 ".git": {},
6969 "src": {
6970 "modification.rs": file_contents,
6971 }
6972 }),
6973 )
6974 .await;
6975
6976 fs.set_head_for_repo(
6977 Path::new("/dir/.git"),
6978 &[
6979 ("src/modification.rs".into(), committed_contents),
6980 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6981 ],
6982 "deadbeef",
6983 );
6984 fs.set_index_for_repo(
6985 Path::new("/dir/.git"),
6986 &[
6987 ("src/modification.rs".into(), staged_contents),
6988 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6989 ],
6990 );
6991
6992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6993 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6994 let language = rust_lang();
6995 language_registry.add(language.clone());
6996
6997 let buffer_1 = project
6998 .update(cx, |project, cx| {
6999 project.open_local_buffer("/dir/src/modification.rs", cx)
7000 })
7001 .await
7002 .unwrap();
7003 let diff_1 = project
7004 .update(cx, |project, cx| {
7005 project.open_uncommitted_diff(buffer_1.clone(), cx)
7006 })
7007 .await
7008 .unwrap();
7009 diff_1.read_with(cx, |diff, _| {
7010 assert_eq!(diff.base_text().language().cloned(), Some(language))
7011 });
7012 cx.run_until_parked();
7013 diff_1.update(cx, |diff, cx| {
7014 let snapshot = buffer_1.read(cx).snapshot();
7015 assert_hunks(
7016 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7017 &snapshot,
7018 &diff.base_text_string().unwrap(),
7019 &[
7020 (
7021 0..1,
7022 "",
7023 "// print goodbye\n",
7024 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7025 ),
7026 (
7027 2..3,
7028 " println!(\"hello world\");\n",
7029 " println!(\"goodbye world\");\n",
7030 DiffHunkStatus::modified_none(),
7031 ),
7032 ],
7033 );
7034 });
7035
7036 // Reset HEAD to a version that differs from both the buffer and the index.
7037 let committed_contents = r#"
7038 // print goodbye
7039 fn main() {
7040 }
7041 "#
7042 .unindent();
7043 fs.set_head_for_repo(
7044 Path::new("/dir/.git"),
7045 &[
7046 ("src/modification.rs".into(), committed_contents.clone()),
7047 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
7048 ],
7049 "deadbeef",
7050 );
7051
7052 // Buffer now has an unstaged hunk.
7053 cx.run_until_parked();
7054 diff_1.update(cx, |diff, cx| {
7055 let snapshot = buffer_1.read(cx).snapshot();
7056 assert_hunks(
7057 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7058 &snapshot,
7059 &diff.base_text().text(),
7060 &[(
7061 2..3,
7062 "",
7063 " println!(\"goodbye world\");\n",
7064 DiffHunkStatus::added_none(),
7065 )],
7066 );
7067 });
7068
7069 // Open a buffer for a file that's been deleted.
7070 let buffer_2 = project
7071 .update(cx, |project, cx| {
7072 project.open_local_buffer("/dir/src/deletion.rs", cx)
7073 })
7074 .await
7075 .unwrap();
7076 let diff_2 = project
7077 .update(cx, |project, cx| {
7078 project.open_uncommitted_diff(buffer_2.clone(), cx)
7079 })
7080 .await
7081 .unwrap();
7082 cx.run_until_parked();
7083 diff_2.update(cx, |diff, cx| {
7084 let snapshot = buffer_2.read(cx).snapshot();
7085 assert_hunks(
7086 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7087 &snapshot,
7088 &diff.base_text_string().unwrap(),
7089 &[(
7090 0..0,
7091 "// the-deleted-contents\n",
7092 "",
7093 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7094 )],
7095 );
7096 });
7097
7098 // Stage the deletion of this file
7099 fs.set_index_for_repo(
7100 Path::new("/dir/.git"),
7101 &[("src/modification.rs".into(), committed_contents.clone())],
7102 );
7103 cx.run_until_parked();
7104 diff_2.update(cx, |diff, cx| {
7105 let snapshot = buffer_2.read(cx).snapshot();
7106 assert_hunks(
7107 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7108 &snapshot,
7109 &diff.base_text_string().unwrap(),
7110 &[(
7111 0..0,
7112 "// the-deleted-contents\n",
7113 "",
7114 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7115 )],
7116 );
7117 });
7118}
7119
7120#[gpui::test]
7121async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7122 use DiffHunkSecondaryStatus::*;
7123 init_test(cx);
7124
7125 let committed_contents = r#"
7126 zero
7127 one
7128 two
7129 three
7130 four
7131 five
7132 "#
7133 .unindent();
7134 let file_contents = r#"
7135 one
7136 TWO
7137 three
7138 FOUR
7139 five
7140 "#
7141 .unindent();
7142
7143 let fs = FakeFs::new(cx.background_executor.clone());
7144 fs.insert_tree(
7145 "/dir",
7146 json!({
7147 ".git": {},
7148 "file.txt": file_contents.clone()
7149 }),
7150 )
7151 .await;
7152
7153 fs.set_head_and_index_for_repo(
7154 "/dir/.git".as_ref(),
7155 &[("file.txt".into(), committed_contents.clone())],
7156 );
7157
7158 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7159
7160 let buffer = project
7161 .update(cx, |project, cx| {
7162 project.open_local_buffer("/dir/file.txt", cx)
7163 })
7164 .await
7165 .unwrap();
7166 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7167 let uncommitted_diff = project
7168 .update(cx, |project, cx| {
7169 project.open_uncommitted_diff(buffer.clone(), cx)
7170 })
7171 .await
7172 .unwrap();
7173 let mut diff_events = cx.events(&uncommitted_diff);
7174
7175 // The hunks are initially unstaged.
7176 uncommitted_diff.read_with(cx, |diff, cx| {
7177 assert_hunks(
7178 diff.hunks(&snapshot, cx),
7179 &snapshot,
7180 &diff.base_text_string().unwrap(),
7181 &[
7182 (
7183 0..0,
7184 "zero\n",
7185 "",
7186 DiffHunkStatus::deleted(HasSecondaryHunk),
7187 ),
7188 (
7189 1..2,
7190 "two\n",
7191 "TWO\n",
7192 DiffHunkStatus::modified(HasSecondaryHunk),
7193 ),
7194 (
7195 3..4,
7196 "four\n",
7197 "FOUR\n",
7198 DiffHunkStatus::modified(HasSecondaryHunk),
7199 ),
7200 ],
7201 );
7202 });
7203
7204 // Stage a hunk. It appears as optimistically staged.
7205 uncommitted_diff.update(cx, |diff, cx| {
7206 let range =
7207 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7208 let hunks = diff
7209 .hunks_intersecting_range(range, &snapshot, cx)
7210 .collect::<Vec<_>>();
7211 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7212
7213 assert_hunks(
7214 diff.hunks(&snapshot, cx),
7215 &snapshot,
7216 &diff.base_text_string().unwrap(),
7217 &[
7218 (
7219 0..0,
7220 "zero\n",
7221 "",
7222 DiffHunkStatus::deleted(HasSecondaryHunk),
7223 ),
7224 (
7225 1..2,
7226 "two\n",
7227 "TWO\n",
7228 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7229 ),
7230 (
7231 3..4,
7232 "four\n",
7233 "FOUR\n",
7234 DiffHunkStatus::modified(HasSecondaryHunk),
7235 ),
7236 ],
7237 );
7238 });
7239
7240 // The diff emits a change event for the range of the staged hunk.
7241 assert!(matches!(
7242 diff_events.next().await.unwrap(),
7243 BufferDiffEvent::HunksStagedOrUnstaged(_)
7244 ));
7245 let event = diff_events.next().await.unwrap();
7246 if let BufferDiffEvent::DiffChanged {
7247 changed_range: Some(changed_range),
7248 } = event
7249 {
7250 let changed_range = changed_range.to_point(&snapshot);
7251 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7252 } else {
7253 panic!("Unexpected event {event:?}");
7254 }
7255
7256 // When the write to the index completes, it appears as staged.
7257 cx.run_until_parked();
7258 uncommitted_diff.update(cx, |diff, cx| {
7259 assert_hunks(
7260 diff.hunks(&snapshot, cx),
7261 &snapshot,
7262 &diff.base_text_string().unwrap(),
7263 &[
7264 (
7265 0..0,
7266 "zero\n",
7267 "",
7268 DiffHunkStatus::deleted(HasSecondaryHunk),
7269 ),
7270 (
7271 1..2,
7272 "two\n",
7273 "TWO\n",
7274 DiffHunkStatus::modified(NoSecondaryHunk),
7275 ),
7276 (
7277 3..4,
7278 "four\n",
7279 "FOUR\n",
7280 DiffHunkStatus::modified(HasSecondaryHunk),
7281 ),
7282 ],
7283 );
7284 });
7285
7286 // The diff emits a change event for the changed index text.
7287 let event = diff_events.next().await.unwrap();
7288 if let BufferDiffEvent::DiffChanged {
7289 changed_range: Some(changed_range),
7290 } = event
7291 {
7292 let changed_range = changed_range.to_point(&snapshot);
7293 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7294 } else {
7295 panic!("Unexpected event {event:?}");
7296 }
7297
7298 // Simulate a problem writing to the git index.
7299 fs.set_error_message_for_index_write(
7300 "/dir/.git".as_ref(),
7301 Some("failed to write git index".into()),
7302 );
7303
7304 // Stage another hunk.
7305 uncommitted_diff.update(cx, |diff, cx| {
7306 let range =
7307 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7308 let hunks = diff
7309 .hunks_intersecting_range(range, &snapshot, cx)
7310 .collect::<Vec<_>>();
7311 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7312
7313 assert_hunks(
7314 diff.hunks(&snapshot, cx),
7315 &snapshot,
7316 &diff.base_text_string().unwrap(),
7317 &[
7318 (
7319 0..0,
7320 "zero\n",
7321 "",
7322 DiffHunkStatus::deleted(HasSecondaryHunk),
7323 ),
7324 (
7325 1..2,
7326 "two\n",
7327 "TWO\n",
7328 DiffHunkStatus::modified(NoSecondaryHunk),
7329 ),
7330 (
7331 3..4,
7332 "four\n",
7333 "FOUR\n",
7334 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7335 ),
7336 ],
7337 );
7338 });
7339 assert!(matches!(
7340 diff_events.next().await.unwrap(),
7341 BufferDiffEvent::HunksStagedOrUnstaged(_)
7342 ));
7343 let event = diff_events.next().await.unwrap();
7344 if let BufferDiffEvent::DiffChanged {
7345 changed_range: Some(changed_range),
7346 } = event
7347 {
7348 let changed_range = changed_range.to_point(&snapshot);
7349 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7350 } else {
7351 panic!("Unexpected event {event:?}");
7352 }
7353
7354 // When the write fails, the hunk returns to being unstaged.
7355 cx.run_until_parked();
7356 uncommitted_diff.update(cx, |diff, cx| {
7357 assert_hunks(
7358 diff.hunks(&snapshot, cx),
7359 &snapshot,
7360 &diff.base_text_string().unwrap(),
7361 &[
7362 (
7363 0..0,
7364 "zero\n",
7365 "",
7366 DiffHunkStatus::deleted(HasSecondaryHunk),
7367 ),
7368 (
7369 1..2,
7370 "two\n",
7371 "TWO\n",
7372 DiffHunkStatus::modified(NoSecondaryHunk),
7373 ),
7374 (
7375 3..4,
7376 "four\n",
7377 "FOUR\n",
7378 DiffHunkStatus::modified(HasSecondaryHunk),
7379 ),
7380 ],
7381 );
7382 });
7383
7384 let event = diff_events.next().await.unwrap();
7385 if let BufferDiffEvent::DiffChanged {
7386 changed_range: Some(changed_range),
7387 } = event
7388 {
7389 let changed_range = changed_range.to_point(&snapshot);
7390 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7391 } else {
7392 panic!("Unexpected event {event:?}");
7393 }
7394
7395 // Allow writing to the git index to succeed again.
7396 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7397
7398 // Stage two hunks with separate operations.
7399 uncommitted_diff.update(cx, |diff, cx| {
7400 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7401 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7402 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7403 });
7404
7405 // Both staged hunks appear as pending.
7406 uncommitted_diff.update(cx, |diff, cx| {
7407 assert_hunks(
7408 diff.hunks(&snapshot, cx),
7409 &snapshot,
7410 &diff.base_text_string().unwrap(),
7411 &[
7412 (
7413 0..0,
7414 "zero\n",
7415 "",
7416 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7417 ),
7418 (
7419 1..2,
7420 "two\n",
7421 "TWO\n",
7422 DiffHunkStatus::modified(NoSecondaryHunk),
7423 ),
7424 (
7425 3..4,
7426 "four\n",
7427 "FOUR\n",
7428 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7429 ),
7430 ],
7431 );
7432 });
7433
7434 // Both staging operations take effect.
7435 cx.run_until_parked();
7436 uncommitted_diff.update(cx, |diff, cx| {
7437 assert_hunks(
7438 diff.hunks(&snapshot, cx),
7439 &snapshot,
7440 &diff.base_text_string().unwrap(),
7441 &[
7442 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7443 (
7444 1..2,
7445 "two\n",
7446 "TWO\n",
7447 DiffHunkStatus::modified(NoSecondaryHunk),
7448 ),
7449 (
7450 3..4,
7451 "four\n",
7452 "FOUR\n",
7453 DiffHunkStatus::modified(NoSecondaryHunk),
7454 ),
7455 ],
7456 );
7457 });
7458}
7459
7460#[gpui::test(seeds(340, 472))]
7461async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7462 use DiffHunkSecondaryStatus::*;
7463 init_test(cx);
7464
7465 let committed_contents = r#"
7466 zero
7467 one
7468 two
7469 three
7470 four
7471 five
7472 "#
7473 .unindent();
7474 let file_contents = r#"
7475 one
7476 TWO
7477 three
7478 FOUR
7479 five
7480 "#
7481 .unindent();
7482
7483 let fs = FakeFs::new(cx.background_executor.clone());
7484 fs.insert_tree(
7485 "/dir",
7486 json!({
7487 ".git": {},
7488 "file.txt": file_contents.clone()
7489 }),
7490 )
7491 .await;
7492
7493 fs.set_head_for_repo(
7494 "/dir/.git".as_ref(),
7495 &[("file.txt".into(), committed_contents.clone())],
7496 "deadbeef",
7497 );
7498 fs.set_index_for_repo(
7499 "/dir/.git".as_ref(),
7500 &[("file.txt".into(), committed_contents.clone())],
7501 );
7502
7503 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7504
7505 let buffer = project
7506 .update(cx, |project, cx| {
7507 project.open_local_buffer("/dir/file.txt", cx)
7508 })
7509 .await
7510 .unwrap();
7511 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7512 let uncommitted_diff = project
7513 .update(cx, |project, cx| {
7514 project.open_uncommitted_diff(buffer.clone(), cx)
7515 })
7516 .await
7517 .unwrap();
7518
7519 // The hunks are initially unstaged.
7520 uncommitted_diff.read_with(cx, |diff, cx| {
7521 assert_hunks(
7522 diff.hunks(&snapshot, cx),
7523 &snapshot,
7524 &diff.base_text_string().unwrap(),
7525 &[
7526 (
7527 0..0,
7528 "zero\n",
7529 "",
7530 DiffHunkStatus::deleted(HasSecondaryHunk),
7531 ),
7532 (
7533 1..2,
7534 "two\n",
7535 "TWO\n",
7536 DiffHunkStatus::modified(HasSecondaryHunk),
7537 ),
7538 (
7539 3..4,
7540 "four\n",
7541 "FOUR\n",
7542 DiffHunkStatus::modified(HasSecondaryHunk),
7543 ),
7544 ],
7545 );
7546 });
7547
7548 // Pause IO events
7549 fs.pause_events();
7550
7551 // Stage the first hunk.
7552 uncommitted_diff.update(cx, |diff, cx| {
7553 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7554 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7555 assert_hunks(
7556 diff.hunks(&snapshot, cx),
7557 &snapshot,
7558 &diff.base_text_string().unwrap(),
7559 &[
7560 (
7561 0..0,
7562 "zero\n",
7563 "",
7564 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7565 ),
7566 (
7567 1..2,
7568 "two\n",
7569 "TWO\n",
7570 DiffHunkStatus::modified(HasSecondaryHunk),
7571 ),
7572 (
7573 3..4,
7574 "four\n",
7575 "FOUR\n",
7576 DiffHunkStatus::modified(HasSecondaryHunk),
7577 ),
7578 ],
7579 );
7580 });
7581
7582 // Stage the second hunk *before* receiving the FS event for the first hunk.
7583 cx.run_until_parked();
7584 uncommitted_diff.update(cx, |diff, cx| {
7585 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7586 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7587 assert_hunks(
7588 diff.hunks(&snapshot, cx),
7589 &snapshot,
7590 &diff.base_text_string().unwrap(),
7591 &[
7592 (
7593 0..0,
7594 "zero\n",
7595 "",
7596 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7597 ),
7598 (
7599 1..2,
7600 "two\n",
7601 "TWO\n",
7602 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7603 ),
7604 (
7605 3..4,
7606 "four\n",
7607 "FOUR\n",
7608 DiffHunkStatus::modified(HasSecondaryHunk),
7609 ),
7610 ],
7611 );
7612 });
7613
7614 // Process the FS event for staging the first hunk (second event is still pending).
7615 fs.flush_events(1);
7616 cx.run_until_parked();
7617
7618 // Stage the third hunk before receiving the second FS event.
7619 uncommitted_diff.update(cx, |diff, cx| {
7620 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7621 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7622 });
7623
7624 // Wait for all remaining IO.
7625 cx.run_until_parked();
7626 fs.flush_events(fs.buffered_event_count());
7627
7628 // Now all hunks are staged.
7629 cx.run_until_parked();
7630 uncommitted_diff.update(cx, |diff, cx| {
7631 assert_hunks(
7632 diff.hunks(&snapshot, cx),
7633 &snapshot,
7634 &diff.base_text_string().unwrap(),
7635 &[
7636 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7637 (
7638 1..2,
7639 "two\n",
7640 "TWO\n",
7641 DiffHunkStatus::modified(NoSecondaryHunk),
7642 ),
7643 (
7644 3..4,
7645 "four\n",
7646 "FOUR\n",
7647 DiffHunkStatus::modified(NoSecondaryHunk),
7648 ),
7649 ],
7650 );
7651 });
7652}
7653
7654#[gpui::test(iterations = 25)]
7655async fn test_staging_random_hunks(
7656 mut rng: StdRng,
7657 executor: BackgroundExecutor,
7658 cx: &mut gpui::TestAppContext,
7659) {
7660 let operations = env::var("OPERATIONS")
7661 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7662 .unwrap_or(20);
7663
7664 // Try to induce races between diff recalculation and index writes.
7665 if rng.gen_bool(0.5) {
7666 executor.deprioritize(*CALCULATE_DIFF_TASK);
7667 }
7668
7669 use DiffHunkSecondaryStatus::*;
7670 init_test(cx);
7671
7672 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7673 let index_text = committed_text.clone();
7674 let buffer_text = (0..30)
7675 .map(|i| match i % 5 {
7676 0 => format!("line {i} (modified)\n"),
7677 _ => format!("line {i}\n"),
7678 })
7679 .collect::<String>();
7680
7681 let fs = FakeFs::new(cx.background_executor.clone());
7682 fs.insert_tree(
7683 path!("/dir"),
7684 json!({
7685 ".git": {},
7686 "file.txt": buffer_text.clone()
7687 }),
7688 )
7689 .await;
7690 fs.set_head_for_repo(
7691 path!("/dir/.git").as_ref(),
7692 &[("file.txt".into(), committed_text.clone())],
7693 "deadbeef",
7694 );
7695 fs.set_index_for_repo(
7696 path!("/dir/.git").as_ref(),
7697 &[("file.txt".into(), index_text.clone())],
7698 );
7699 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7700
7701 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7702 let buffer = project
7703 .update(cx, |project, cx| {
7704 project.open_local_buffer(path!("/dir/file.txt"), cx)
7705 })
7706 .await
7707 .unwrap();
7708 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7709 let uncommitted_diff = project
7710 .update(cx, |project, cx| {
7711 project.open_uncommitted_diff(buffer.clone(), cx)
7712 })
7713 .await
7714 .unwrap();
7715
7716 let mut hunks =
7717 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7718 assert_eq!(hunks.len(), 6);
7719
7720 for _i in 0..operations {
7721 let hunk_ix = rng.gen_range(0..hunks.len());
7722 let hunk = &mut hunks[hunk_ix];
7723 let row = hunk.range.start.row;
7724
7725 if hunk.status().has_secondary_hunk() {
7726 log::info!("staging hunk at {row}");
7727 uncommitted_diff.update(cx, |diff, cx| {
7728 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7729 });
7730 hunk.secondary_status = SecondaryHunkRemovalPending;
7731 } else {
7732 log::info!("unstaging hunk at {row}");
7733 uncommitted_diff.update(cx, |diff, cx| {
7734 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7735 });
7736 hunk.secondary_status = SecondaryHunkAdditionPending;
7737 }
7738
7739 for _ in 0..rng.gen_range(0..10) {
7740 log::info!("yielding");
7741 cx.executor().simulate_random_delay().await;
7742 }
7743 }
7744
7745 cx.executor().run_until_parked();
7746
7747 for hunk in &mut hunks {
7748 if hunk.secondary_status == SecondaryHunkRemovalPending {
7749 hunk.secondary_status = NoSecondaryHunk;
7750 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7751 hunk.secondary_status = HasSecondaryHunk;
7752 }
7753 }
7754
7755 log::info!(
7756 "index text:\n{}",
7757 repo.load_index_text("file.txt".into()).await.unwrap()
7758 );
7759
7760 uncommitted_diff.update(cx, |diff, cx| {
7761 let expected_hunks = hunks
7762 .iter()
7763 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7764 .collect::<Vec<_>>();
7765 let actual_hunks = diff
7766 .hunks(&snapshot, cx)
7767 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7768 .collect::<Vec<_>>();
7769 assert_eq!(actual_hunks, expected_hunks);
7770 });
7771}
7772
7773#[gpui::test]
7774async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7775 init_test(cx);
7776
7777 let committed_contents = r#"
7778 fn main() {
7779 println!("hello from HEAD");
7780 }
7781 "#
7782 .unindent();
7783 let file_contents = r#"
7784 fn main() {
7785 println!("hello from the working copy");
7786 }
7787 "#
7788 .unindent();
7789
7790 let fs = FakeFs::new(cx.background_executor.clone());
7791 fs.insert_tree(
7792 "/dir",
7793 json!({
7794 ".git": {},
7795 "src": {
7796 "main.rs": file_contents,
7797 }
7798 }),
7799 )
7800 .await;
7801
7802 fs.set_head_for_repo(
7803 Path::new("/dir/.git"),
7804 &[("src/main.rs".into(), committed_contents.clone())],
7805 "deadbeef",
7806 );
7807 fs.set_index_for_repo(
7808 Path::new("/dir/.git"),
7809 &[("src/main.rs".into(), committed_contents.clone())],
7810 );
7811
7812 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7813
7814 let buffer = project
7815 .update(cx, |project, cx| {
7816 project.open_local_buffer("/dir/src/main.rs", cx)
7817 })
7818 .await
7819 .unwrap();
7820 let uncommitted_diff = project
7821 .update(cx, |project, cx| {
7822 project.open_uncommitted_diff(buffer.clone(), cx)
7823 })
7824 .await
7825 .unwrap();
7826
7827 cx.run_until_parked();
7828 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7829 let snapshot = buffer.read(cx).snapshot();
7830 assert_hunks(
7831 uncommitted_diff.hunks(&snapshot, cx),
7832 &snapshot,
7833 &uncommitted_diff.base_text_string().unwrap(),
7834 &[(
7835 1..2,
7836 " println!(\"hello from HEAD\");\n",
7837 " println!(\"hello from the working copy\");\n",
7838 DiffHunkStatus {
7839 kind: DiffHunkStatusKind::Modified,
7840 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7841 },
7842 )],
7843 );
7844 });
7845}
7846
7847#[gpui::test]
7848async fn test_repository_and_path_for_project_path(
7849 background_executor: BackgroundExecutor,
7850 cx: &mut gpui::TestAppContext,
7851) {
7852 init_test(cx);
7853 let fs = FakeFs::new(background_executor);
7854 fs.insert_tree(
7855 path!("/root"),
7856 json!({
7857 "c.txt": "",
7858 "dir1": {
7859 ".git": {},
7860 "deps": {
7861 "dep1": {
7862 ".git": {},
7863 "src": {
7864 "a.txt": ""
7865 }
7866 }
7867 },
7868 "src": {
7869 "b.txt": ""
7870 }
7871 },
7872 }),
7873 )
7874 .await;
7875
7876 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7877 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7878 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7879 project
7880 .update(cx, |project, cx| project.git_scans_complete(cx))
7881 .await;
7882 cx.run_until_parked();
7883
7884 project.read_with(cx, |project, cx| {
7885 let git_store = project.git_store().read(cx);
7886 let pairs = [
7887 ("c.txt", None),
7888 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7889 (
7890 "dir1/deps/dep1/src/a.txt",
7891 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7892 ),
7893 ];
7894 let expected = pairs
7895 .iter()
7896 .map(|(path, result)| {
7897 (
7898 path,
7899 result.map(|(repo, repo_path)| {
7900 (Path::new(repo).into(), RepoPath::from(repo_path))
7901 }),
7902 )
7903 })
7904 .collect::<Vec<_>>();
7905 let actual = pairs
7906 .iter()
7907 .map(|(path, _)| {
7908 let project_path = (tree_id, Path::new(path)).into();
7909 let result = maybe!({
7910 let (repo, repo_path) =
7911 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7912 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7913 });
7914 (path, result)
7915 })
7916 .collect::<Vec<_>>();
7917 pretty_assertions::assert_eq!(expected, actual);
7918 });
7919
7920 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7921 .await
7922 .unwrap();
7923 cx.run_until_parked();
7924
7925 project.read_with(cx, |project, cx| {
7926 let git_store = project.git_store().read(cx);
7927 assert_eq!(
7928 git_store.repository_and_path_for_project_path(
7929 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7930 cx
7931 ),
7932 None
7933 );
7934 });
7935}
7936
7937#[gpui::test]
7938async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7939 init_test(cx);
7940 let fs = FakeFs::new(cx.background_executor.clone());
7941 fs.insert_tree(
7942 path!("/root"),
7943 json!({
7944 "home": {
7945 ".git": {},
7946 "project": {
7947 "a.txt": "A"
7948 },
7949 },
7950 }),
7951 )
7952 .await;
7953 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7954
7955 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7956 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7957 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7958
7959 project
7960 .update(cx, |project, cx| project.git_scans_complete(cx))
7961 .await;
7962 tree.flush_fs_events(cx).await;
7963
7964 project.read_with(cx, |project, cx| {
7965 let containing = project
7966 .git_store()
7967 .read(cx)
7968 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7969 assert!(containing.is_none());
7970 });
7971
7972 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7973 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7974 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7975 project
7976 .update(cx, |project, cx| project.git_scans_complete(cx))
7977 .await;
7978 tree.flush_fs_events(cx).await;
7979
7980 project.read_with(cx, |project, cx| {
7981 let containing = project
7982 .git_store()
7983 .read(cx)
7984 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7985 assert_eq!(
7986 containing
7987 .unwrap()
7988 .0
7989 .read(cx)
7990 .work_directory_abs_path
7991 .as_ref(),
7992 Path::new(path!("/root/home"))
7993 );
7994 });
7995}
7996
7997#[gpui::test]
7998async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7999 init_test(cx);
8000 cx.executor().allow_parking();
8001
8002 let root = TempTree::new(json!({
8003 "project": {
8004 "a.txt": "a", // Modified
8005 "b.txt": "bb", // Added
8006 "c.txt": "ccc", // Unchanged
8007 "d.txt": "dddd", // Deleted
8008 },
8009 }));
8010
8011 // Set up git repository before creating the project.
8012 let work_dir = root.path().join("project");
8013 let repo = git_init(work_dir.as_path());
8014 git_add("a.txt", &repo);
8015 git_add("c.txt", &repo);
8016 git_add("d.txt", &repo);
8017 git_commit("Initial commit", &repo);
8018 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8019 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8020
8021 let project = Project::test(
8022 Arc::new(RealFs::new(None, cx.executor())),
8023 [root.path()],
8024 cx,
8025 )
8026 .await;
8027
8028 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8029 tree.flush_fs_events(cx).await;
8030 project
8031 .update(cx, |project, cx| project.git_scans_complete(cx))
8032 .await;
8033 cx.executor().run_until_parked();
8034
8035 let repository = project.read_with(cx, |project, cx| {
8036 project.repositories(cx).values().next().unwrap().clone()
8037 });
8038
8039 // Check that the right git state is observed on startup
8040 repository.read_with(cx, |repository, _| {
8041 let entries = repository.cached_status().collect::<Vec<_>>();
8042 assert_eq!(
8043 entries,
8044 [
8045 StatusEntry {
8046 repo_path: "a.txt".into(),
8047 status: StatusCode::Modified.worktree(),
8048 },
8049 StatusEntry {
8050 repo_path: "b.txt".into(),
8051 status: FileStatus::Untracked,
8052 },
8053 StatusEntry {
8054 repo_path: "d.txt".into(),
8055 status: StatusCode::Deleted.worktree(),
8056 },
8057 ]
8058 );
8059 });
8060
8061 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8062
8063 tree.flush_fs_events(cx).await;
8064 project
8065 .update(cx, |project, cx| project.git_scans_complete(cx))
8066 .await;
8067 cx.executor().run_until_parked();
8068
8069 repository.read_with(cx, |repository, _| {
8070 let entries = repository.cached_status().collect::<Vec<_>>();
8071 assert_eq!(
8072 entries,
8073 [
8074 StatusEntry {
8075 repo_path: "a.txt".into(),
8076 status: StatusCode::Modified.worktree(),
8077 },
8078 StatusEntry {
8079 repo_path: "b.txt".into(),
8080 status: FileStatus::Untracked,
8081 },
8082 StatusEntry {
8083 repo_path: "c.txt".into(),
8084 status: StatusCode::Modified.worktree(),
8085 },
8086 StatusEntry {
8087 repo_path: "d.txt".into(),
8088 status: StatusCode::Deleted.worktree(),
8089 },
8090 ]
8091 );
8092 });
8093
8094 git_add("a.txt", &repo);
8095 git_add("c.txt", &repo);
8096 git_remove_index(Path::new("d.txt"), &repo);
8097 git_commit("Another commit", &repo);
8098 tree.flush_fs_events(cx).await;
8099 project
8100 .update(cx, |project, cx| project.git_scans_complete(cx))
8101 .await;
8102 cx.executor().run_until_parked();
8103
8104 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8105 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8106 tree.flush_fs_events(cx).await;
8107 project
8108 .update(cx, |project, cx| project.git_scans_complete(cx))
8109 .await;
8110 cx.executor().run_until_parked();
8111
8112 repository.read_with(cx, |repository, _cx| {
8113 let entries = repository.cached_status().collect::<Vec<_>>();
8114
8115 // Deleting an untracked entry, b.txt, should leave no status
8116 // a.txt was tracked, and so should have a status
8117 assert_eq!(
8118 entries,
8119 [StatusEntry {
8120 repo_path: "a.txt".into(),
8121 status: StatusCode::Deleted.worktree(),
8122 }]
8123 );
8124 });
8125}
8126
8127#[gpui::test]
8128async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8129 init_test(cx);
8130 cx.executor().allow_parking();
8131
8132 let root = TempTree::new(json!({
8133 "project": {
8134 "sub": {},
8135 "a.txt": "",
8136 },
8137 }));
8138
8139 let work_dir = root.path().join("project");
8140 let repo = git_init(work_dir.as_path());
8141 // a.txt exists in HEAD and the working copy but is deleted in the index.
8142 git_add("a.txt", &repo);
8143 git_commit("Initial commit", &repo);
8144 git_remove_index("a.txt".as_ref(), &repo);
8145 // `sub` is a nested git repository.
8146 let _sub = git_init(&work_dir.join("sub"));
8147
8148 let project = Project::test(
8149 Arc::new(RealFs::new(None, cx.executor())),
8150 [root.path()],
8151 cx,
8152 )
8153 .await;
8154
8155 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8156 tree.flush_fs_events(cx).await;
8157 project
8158 .update(cx, |project, cx| project.git_scans_complete(cx))
8159 .await;
8160 cx.executor().run_until_parked();
8161
8162 let repository = project.read_with(cx, |project, cx| {
8163 project
8164 .repositories(cx)
8165 .values()
8166 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8167 .unwrap()
8168 .clone()
8169 });
8170
8171 repository.read_with(cx, |repository, _cx| {
8172 let entries = repository.cached_status().collect::<Vec<_>>();
8173
8174 // `sub` doesn't appear in our computed statuses.
8175 // a.txt appears with a combined `DA` status.
8176 assert_eq!(
8177 entries,
8178 [StatusEntry {
8179 repo_path: "a.txt".into(),
8180 status: TrackedStatus {
8181 index_status: StatusCode::Deleted,
8182 worktree_status: StatusCode::Added
8183 }
8184 .into(),
8185 }]
8186 )
8187 });
8188}
8189
8190#[gpui::test]
8191async fn test_repository_subfolder_git_status(
8192 executor: gpui::BackgroundExecutor,
8193 cx: &mut gpui::TestAppContext,
8194) {
8195 init_test(cx);
8196
8197 let fs = FakeFs::new(executor);
8198 fs.insert_tree(
8199 path!("/root"),
8200 json!({
8201 "my-repo": {
8202 ".git": {},
8203 "a.txt": "a",
8204 "sub-folder-1": {
8205 "sub-folder-2": {
8206 "c.txt": "cc",
8207 "d": {
8208 "e.txt": "eee"
8209 }
8210 },
8211 }
8212 },
8213 }),
8214 )
8215 .await;
8216
8217 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8218 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8219
8220 fs.set_status_for_repo(
8221 path!("/root/my-repo/.git").as_ref(),
8222 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8223 );
8224
8225 let project = Project::test(
8226 fs.clone(),
8227 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8228 cx,
8229 )
8230 .await;
8231
8232 project
8233 .update(cx, |project, cx| project.git_scans_complete(cx))
8234 .await;
8235 cx.run_until_parked();
8236
8237 let repository = project.read_with(cx, |project, cx| {
8238 project.repositories(cx).values().next().unwrap().clone()
8239 });
8240
8241 // Ensure that the git status is loaded correctly
8242 repository.read_with(cx, |repository, _cx| {
8243 assert_eq!(
8244 repository.work_directory_abs_path,
8245 Path::new(path!("/root/my-repo")).into()
8246 );
8247
8248 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8249 assert_eq!(
8250 repository.status_for_path(&E_TXT.into()).unwrap().status,
8251 FileStatus::Untracked
8252 );
8253 });
8254
8255 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8256 project
8257 .update(cx, |project, cx| project.git_scans_complete(cx))
8258 .await;
8259 cx.run_until_parked();
8260
8261 repository.read_with(cx, |repository, _cx| {
8262 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8263 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8264 });
8265}
8266
8267// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8268#[cfg(any())]
8269#[gpui::test]
8270async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8271 init_test(cx);
8272 cx.executor().allow_parking();
8273
8274 let root = TempTree::new(json!({
8275 "project": {
8276 "a.txt": "a",
8277 },
8278 }));
8279 let root_path = root.path();
8280
8281 let repo = git_init(&root_path.join("project"));
8282 git_add("a.txt", &repo);
8283 git_commit("init", &repo);
8284
8285 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8286
8287 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8288 tree.flush_fs_events(cx).await;
8289 project
8290 .update(cx, |project, cx| project.git_scans_complete(cx))
8291 .await;
8292 cx.executor().run_until_parked();
8293
8294 let repository = project.read_with(cx, |project, cx| {
8295 project.repositories(cx).values().next().unwrap().clone()
8296 });
8297
8298 git_branch("other-branch", &repo);
8299 git_checkout("refs/heads/other-branch", &repo);
8300 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8301 git_add("a.txt", &repo);
8302 git_commit("capitalize", &repo);
8303 let commit = repo
8304 .head()
8305 .expect("Failed to get HEAD")
8306 .peel_to_commit()
8307 .expect("HEAD is not a commit");
8308 git_checkout("refs/heads/main", &repo);
8309 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8310 git_add("a.txt", &repo);
8311 git_commit("improve letter", &repo);
8312 git_cherry_pick(&commit, &repo);
8313 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8314 .expect("No CHERRY_PICK_HEAD");
8315 pretty_assertions::assert_eq!(
8316 git_status(&repo),
8317 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8318 );
8319 tree.flush_fs_events(cx).await;
8320 project
8321 .update(cx, |project, cx| project.git_scans_complete(cx))
8322 .await;
8323 cx.executor().run_until_parked();
8324 let conflicts = repository.update(cx, |repository, _| {
8325 repository
8326 .merge_conflicts
8327 .iter()
8328 .cloned()
8329 .collect::<Vec<_>>()
8330 });
8331 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8332
8333 git_add("a.txt", &repo);
8334 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8335 git_commit("whatevs", &repo);
8336 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8337 .expect("Failed to remove CHERRY_PICK_HEAD");
8338 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8339 tree.flush_fs_events(cx).await;
8340 let conflicts = repository.update(cx, |repository, _| {
8341 repository
8342 .merge_conflicts
8343 .iter()
8344 .cloned()
8345 .collect::<Vec<_>>()
8346 });
8347 pretty_assertions::assert_eq!(conflicts, []);
8348}
8349
8350#[gpui::test]
8351async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8352 init_test(cx);
8353 let fs = FakeFs::new(cx.background_executor.clone());
8354 fs.insert_tree(
8355 path!("/root"),
8356 json!({
8357 ".git": {},
8358 ".gitignore": "*.txt\n",
8359 "a.xml": "<a></a>",
8360 "b.txt": "Some text"
8361 }),
8362 )
8363 .await;
8364
8365 fs.set_head_and_index_for_repo(
8366 path!("/root/.git").as_ref(),
8367 &[
8368 (".gitignore".into(), "*.txt\n".into()),
8369 ("a.xml".into(), "<a></a>".into()),
8370 ],
8371 );
8372
8373 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8374
8375 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8376 tree.flush_fs_events(cx).await;
8377 project
8378 .update(cx, |project, cx| project.git_scans_complete(cx))
8379 .await;
8380 cx.executor().run_until_parked();
8381
8382 let repository = project.read_with(cx, |project, cx| {
8383 project.repositories(cx).values().next().unwrap().clone()
8384 });
8385
8386 // One file is unmodified, the other is ignored.
8387 cx.read(|cx| {
8388 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8389 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8390 });
8391
8392 // Change the gitignore, and stage the newly non-ignored file.
8393 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8394 .await
8395 .unwrap();
8396 fs.set_index_for_repo(
8397 Path::new(path!("/root/.git")),
8398 &[
8399 (".gitignore".into(), "*.txt\n".into()),
8400 ("a.xml".into(), "<a></a>".into()),
8401 ("b.txt".into(), "Some text".into()),
8402 ],
8403 );
8404
8405 cx.executor().run_until_parked();
8406 cx.read(|cx| {
8407 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8408 assert_entry_git_state(
8409 tree.read(cx),
8410 repository.read(cx),
8411 "b.txt",
8412 Some(StatusCode::Added),
8413 false,
8414 );
8415 });
8416}
8417
8418// NOTE:
8419// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8420// a directory which some program has already open.
8421// This is a limitation of the Windows.
8422// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8423#[gpui::test]
8424#[cfg_attr(target_os = "windows", ignore)]
8425async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8426 init_test(cx);
8427 cx.executor().allow_parking();
8428 let root = TempTree::new(json!({
8429 "projects": {
8430 "project1": {
8431 "a": "",
8432 "b": "",
8433 }
8434 },
8435
8436 }));
8437 let root_path = root.path();
8438
8439 let repo = git_init(&root_path.join("projects/project1"));
8440 git_add("a", &repo);
8441 git_commit("init", &repo);
8442 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8443
8444 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8445
8446 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8447 tree.flush_fs_events(cx).await;
8448 project
8449 .update(cx, |project, cx| project.git_scans_complete(cx))
8450 .await;
8451 cx.executor().run_until_parked();
8452
8453 let repository = project.read_with(cx, |project, cx| {
8454 project.repositories(cx).values().next().unwrap().clone()
8455 });
8456
8457 repository.read_with(cx, |repository, _| {
8458 assert_eq!(
8459 repository.work_directory_abs_path.as_ref(),
8460 root_path.join("projects/project1").as_path()
8461 );
8462 assert_eq!(
8463 repository
8464 .status_for_path(&"a".into())
8465 .map(|entry| entry.status),
8466 Some(StatusCode::Modified.worktree()),
8467 );
8468 assert_eq!(
8469 repository
8470 .status_for_path(&"b".into())
8471 .map(|entry| entry.status),
8472 Some(FileStatus::Untracked),
8473 );
8474 });
8475
8476 std::fs::rename(
8477 root_path.join("projects/project1"),
8478 root_path.join("projects/project2"),
8479 )
8480 .unwrap();
8481 tree.flush_fs_events(cx).await;
8482
8483 repository.read_with(cx, |repository, _| {
8484 assert_eq!(
8485 repository.work_directory_abs_path.as_ref(),
8486 root_path.join("projects/project2").as_path()
8487 );
8488 assert_eq!(
8489 repository.status_for_path(&"a".into()).unwrap().status,
8490 StatusCode::Modified.worktree(),
8491 );
8492 assert_eq!(
8493 repository.status_for_path(&"b".into()).unwrap().status,
8494 FileStatus::Untracked,
8495 );
8496 });
8497}
8498
8499// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8500// you can't rename a directory which some program has already open. This is a
8501// limitation of the Windows. See:
8502// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8503#[gpui::test]
8504#[cfg_attr(target_os = "windows", ignore)]
8505async fn test_file_status(cx: &mut gpui::TestAppContext) {
8506 init_test(cx);
8507 cx.executor().allow_parking();
8508 const IGNORE_RULE: &str = "**/target";
8509
8510 let root = TempTree::new(json!({
8511 "project": {
8512 "a.txt": "a",
8513 "b.txt": "bb",
8514 "c": {
8515 "d": {
8516 "e.txt": "eee"
8517 }
8518 },
8519 "f.txt": "ffff",
8520 "target": {
8521 "build_file": "???"
8522 },
8523 ".gitignore": IGNORE_RULE
8524 },
8525
8526 }));
8527 let root_path = root.path();
8528
8529 const A_TXT: &str = "a.txt";
8530 const B_TXT: &str = "b.txt";
8531 const E_TXT: &str = "c/d/e.txt";
8532 const F_TXT: &str = "f.txt";
8533 const DOTGITIGNORE: &str = ".gitignore";
8534 const BUILD_FILE: &str = "target/build_file";
8535
8536 // Set up git repository before creating the worktree.
8537 let work_dir = root.path().join("project");
8538 let mut repo = git_init(work_dir.as_path());
8539 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8540 git_add(A_TXT, &repo);
8541 git_add(E_TXT, &repo);
8542 git_add(DOTGITIGNORE, &repo);
8543 git_commit("Initial commit", &repo);
8544
8545 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8546
8547 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8548 tree.flush_fs_events(cx).await;
8549 project
8550 .update(cx, |project, cx| project.git_scans_complete(cx))
8551 .await;
8552 cx.executor().run_until_parked();
8553
8554 let repository = project.read_with(cx, |project, cx| {
8555 project.repositories(cx).values().next().unwrap().clone()
8556 });
8557
8558 // Check that the right git state is observed on startup
8559 repository.read_with(cx, |repository, _cx| {
8560 assert_eq!(
8561 repository.work_directory_abs_path.as_ref(),
8562 root_path.join("project").as_path()
8563 );
8564
8565 assert_eq!(
8566 repository.status_for_path(&B_TXT.into()).unwrap().status,
8567 FileStatus::Untracked,
8568 );
8569 assert_eq!(
8570 repository.status_for_path(&F_TXT.into()).unwrap().status,
8571 FileStatus::Untracked,
8572 );
8573 });
8574
8575 // Modify a file in the working copy.
8576 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8577 tree.flush_fs_events(cx).await;
8578 project
8579 .update(cx, |project, cx| project.git_scans_complete(cx))
8580 .await;
8581 cx.executor().run_until_parked();
8582
8583 // The worktree detects that the file's git status has changed.
8584 repository.read_with(cx, |repository, _| {
8585 assert_eq!(
8586 repository.status_for_path(&A_TXT.into()).unwrap().status,
8587 StatusCode::Modified.worktree(),
8588 );
8589 });
8590
8591 // Create a commit in the git repository.
8592 git_add(A_TXT, &repo);
8593 git_add(B_TXT, &repo);
8594 git_commit("Committing modified and added", &repo);
8595 tree.flush_fs_events(cx).await;
8596 project
8597 .update(cx, |project, cx| project.git_scans_complete(cx))
8598 .await;
8599 cx.executor().run_until_parked();
8600
8601 // The worktree detects that the files' git status have changed.
8602 repository.read_with(cx, |repository, _cx| {
8603 assert_eq!(
8604 repository.status_for_path(&F_TXT.into()).unwrap().status,
8605 FileStatus::Untracked,
8606 );
8607 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8608 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8609 });
8610
8611 // Modify files in the working copy and perform git operations on other files.
8612 git_reset(0, &repo);
8613 git_remove_index(Path::new(B_TXT), &repo);
8614 git_stash(&mut repo);
8615 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8616 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8617 tree.flush_fs_events(cx).await;
8618 project
8619 .update(cx, |project, cx| project.git_scans_complete(cx))
8620 .await;
8621 cx.executor().run_until_parked();
8622
8623 // Check that more complex repo changes are tracked
8624 repository.read_with(cx, |repository, _cx| {
8625 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8626 assert_eq!(
8627 repository.status_for_path(&B_TXT.into()).unwrap().status,
8628 FileStatus::Untracked,
8629 );
8630 assert_eq!(
8631 repository.status_for_path(&E_TXT.into()).unwrap().status,
8632 StatusCode::Modified.worktree(),
8633 );
8634 });
8635
8636 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8637 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8638 std::fs::write(
8639 work_dir.join(DOTGITIGNORE),
8640 [IGNORE_RULE, "f.txt"].join("\n"),
8641 )
8642 .unwrap();
8643
8644 git_add(Path::new(DOTGITIGNORE), &repo);
8645 git_commit("Committing modified git ignore", &repo);
8646
8647 tree.flush_fs_events(cx).await;
8648 cx.executor().run_until_parked();
8649
8650 let mut renamed_dir_name = "first_directory/second_directory";
8651 const RENAMED_FILE: &str = "rf.txt";
8652
8653 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8654 std::fs::write(
8655 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8656 "new-contents",
8657 )
8658 .unwrap();
8659
8660 tree.flush_fs_events(cx).await;
8661 project
8662 .update(cx, |project, cx| project.git_scans_complete(cx))
8663 .await;
8664 cx.executor().run_until_parked();
8665
8666 repository.read_with(cx, |repository, _cx| {
8667 assert_eq!(
8668 repository
8669 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8670 .unwrap()
8671 .status,
8672 FileStatus::Untracked,
8673 );
8674 });
8675
8676 renamed_dir_name = "new_first_directory/second_directory";
8677
8678 std::fs::rename(
8679 work_dir.join("first_directory"),
8680 work_dir.join("new_first_directory"),
8681 )
8682 .unwrap();
8683
8684 tree.flush_fs_events(cx).await;
8685 project
8686 .update(cx, |project, cx| project.git_scans_complete(cx))
8687 .await;
8688 cx.executor().run_until_parked();
8689
8690 repository.read_with(cx, |repository, _cx| {
8691 assert_eq!(
8692 repository
8693 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8694 .unwrap()
8695 .status,
8696 FileStatus::Untracked,
8697 );
8698 });
8699}
8700
8701#[gpui::test]
8702async fn test_repos_in_invisible_worktrees(
8703 executor: BackgroundExecutor,
8704 cx: &mut gpui::TestAppContext,
8705) {
8706 init_test(cx);
8707 let fs = FakeFs::new(executor);
8708 fs.insert_tree(
8709 path!("/root"),
8710 json!({
8711 "dir1": {
8712 ".git": {},
8713 "dep1": {
8714 ".git": {},
8715 "src": {
8716 "a.txt": "",
8717 },
8718 },
8719 "b.txt": "",
8720 },
8721 }),
8722 )
8723 .await;
8724
8725 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8726 let _visible_worktree =
8727 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8728 project
8729 .update(cx, |project, cx| project.git_scans_complete(cx))
8730 .await;
8731
8732 let repos = project.read_with(cx, |project, cx| {
8733 project
8734 .repositories(cx)
8735 .values()
8736 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8737 .collect::<Vec<_>>()
8738 });
8739 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8740
8741 let (_invisible_worktree, _) = project
8742 .update(cx, |project, cx| {
8743 project.worktree_store.update(cx, |worktree_store, cx| {
8744 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8745 })
8746 })
8747 .await
8748 .expect("failed to create worktree");
8749 project
8750 .update(cx, |project, cx| project.git_scans_complete(cx))
8751 .await;
8752
8753 let repos = project.read_with(cx, |project, cx| {
8754 project
8755 .repositories(cx)
8756 .values()
8757 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8758 .collect::<Vec<_>>()
8759 });
8760 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8761}
8762
8763#[gpui::test(iterations = 10)]
8764async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8765 init_test(cx);
8766 cx.update(|cx| {
8767 cx.update_global::<SettingsStore, _>(|store, cx| {
8768 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8769 project_settings.file_scan_exclusions = Some(Vec::new());
8770 });
8771 });
8772 });
8773 let fs = FakeFs::new(cx.background_executor.clone());
8774 fs.insert_tree(
8775 path!("/root"),
8776 json!({
8777 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8778 "tree": {
8779 ".git": {},
8780 ".gitignore": "ignored-dir\n",
8781 "tracked-dir": {
8782 "tracked-file1": "",
8783 "ancestor-ignored-file1": "",
8784 },
8785 "ignored-dir": {
8786 "ignored-file1": ""
8787 }
8788 }
8789 }),
8790 )
8791 .await;
8792 fs.set_head_and_index_for_repo(
8793 path!("/root/tree/.git").as_ref(),
8794 &[
8795 (".gitignore".into(), "ignored-dir\n".into()),
8796 ("tracked-dir/tracked-file1".into(), "".into()),
8797 ],
8798 );
8799
8800 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8801
8802 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8803 tree.flush_fs_events(cx).await;
8804 project
8805 .update(cx, |project, cx| project.git_scans_complete(cx))
8806 .await;
8807 cx.executor().run_until_parked();
8808
8809 let repository = project.read_with(cx, |project, cx| {
8810 project.repositories(cx).values().next().unwrap().clone()
8811 });
8812
8813 tree.read_with(cx, |tree, _| {
8814 tree.as_local()
8815 .unwrap()
8816 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8817 })
8818 .recv()
8819 .await;
8820
8821 cx.read(|cx| {
8822 assert_entry_git_state(
8823 tree.read(cx),
8824 repository.read(cx),
8825 "tracked-dir/tracked-file1",
8826 None,
8827 false,
8828 );
8829 assert_entry_git_state(
8830 tree.read(cx),
8831 repository.read(cx),
8832 "tracked-dir/ancestor-ignored-file1",
8833 None,
8834 false,
8835 );
8836 assert_entry_git_state(
8837 tree.read(cx),
8838 repository.read(cx),
8839 "ignored-dir/ignored-file1",
8840 None,
8841 true,
8842 );
8843 });
8844
8845 fs.create_file(
8846 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8847 Default::default(),
8848 )
8849 .await
8850 .unwrap();
8851 fs.set_index_for_repo(
8852 path!("/root/tree/.git").as_ref(),
8853 &[
8854 (".gitignore".into(), "ignored-dir\n".into()),
8855 ("tracked-dir/tracked-file1".into(), "".into()),
8856 ("tracked-dir/tracked-file2".into(), "".into()),
8857 ],
8858 );
8859 fs.create_file(
8860 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8861 Default::default(),
8862 )
8863 .await
8864 .unwrap();
8865 fs.create_file(
8866 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8867 Default::default(),
8868 )
8869 .await
8870 .unwrap();
8871
8872 cx.executor().run_until_parked();
8873 cx.read(|cx| {
8874 assert_entry_git_state(
8875 tree.read(cx),
8876 repository.read(cx),
8877 "tracked-dir/tracked-file2",
8878 Some(StatusCode::Added),
8879 false,
8880 );
8881 assert_entry_git_state(
8882 tree.read(cx),
8883 repository.read(cx),
8884 "tracked-dir/ancestor-ignored-file2",
8885 None,
8886 false,
8887 );
8888 assert_entry_git_state(
8889 tree.read(cx),
8890 repository.read(cx),
8891 "ignored-dir/ignored-file2",
8892 None,
8893 true,
8894 );
8895 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8896 });
8897}
8898
8899#[gpui::test]
8900async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8901 init_test(cx);
8902
8903 let fs = FakeFs::new(cx.executor());
8904 fs.insert_tree(
8905 path!("/project"),
8906 json!({
8907 ".git": {
8908 "worktrees": {
8909 "some-worktree": {
8910 "commondir": "../..\n",
8911 // For is_git_dir
8912 "HEAD": "",
8913 "config": ""
8914 }
8915 },
8916 "modules": {
8917 "subdir": {
8918 "some-submodule": {
8919 // For is_git_dir
8920 "HEAD": "",
8921 "config": "",
8922 }
8923 }
8924 }
8925 },
8926 "src": {
8927 "a.txt": "A",
8928 },
8929 "some-worktree": {
8930 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8931 "src": {
8932 "b.txt": "B",
8933 }
8934 },
8935 "subdir": {
8936 "some-submodule": {
8937 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8938 "c.txt": "C",
8939 }
8940 }
8941 }),
8942 )
8943 .await;
8944
8945 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8946 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8947 scan_complete.await;
8948
8949 let mut repositories = project.update(cx, |project, cx| {
8950 project
8951 .repositories(cx)
8952 .values()
8953 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8954 .collect::<Vec<_>>()
8955 });
8956 repositories.sort();
8957 pretty_assertions::assert_eq!(
8958 repositories,
8959 [
8960 Path::new(path!("/project")).into(),
8961 Path::new(path!("/project/some-worktree")).into(),
8962 Path::new(path!("/project/subdir/some-submodule")).into(),
8963 ]
8964 );
8965
8966 // Generate a git-related event for the worktree and check that it's refreshed.
8967 fs.with_git_state(
8968 path!("/project/some-worktree/.git").as_ref(),
8969 true,
8970 |state| {
8971 state
8972 .head_contents
8973 .insert("src/b.txt".into(), "b".to_owned());
8974 state
8975 .index_contents
8976 .insert("src/b.txt".into(), "b".to_owned());
8977 },
8978 )
8979 .unwrap();
8980 cx.run_until_parked();
8981
8982 let buffer = project
8983 .update(cx, |project, cx| {
8984 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8985 })
8986 .await
8987 .unwrap();
8988 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8989 let (repo, _) = project
8990 .git_store()
8991 .read(cx)
8992 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8993 .unwrap();
8994 pretty_assertions::assert_eq!(
8995 repo.read(cx).work_directory_abs_path,
8996 Path::new(path!("/project/some-worktree")).into(),
8997 );
8998 let barrier = repo.update(cx, |repo, _| repo.barrier());
8999 (repo.clone(), barrier)
9000 });
9001 barrier.await.unwrap();
9002 worktree_repo.update(cx, |repo, _| {
9003 pretty_assertions::assert_eq!(
9004 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
9005 StatusCode::Modified.worktree(),
9006 );
9007 });
9008
9009 // The same for the submodule.
9010 fs.with_git_state(
9011 path!("/project/subdir/some-submodule/.git").as_ref(),
9012 true,
9013 |state| {
9014 state.head_contents.insert("c.txt".into(), "c".to_owned());
9015 state.index_contents.insert("c.txt".into(), "c".to_owned());
9016 },
9017 )
9018 .unwrap();
9019 cx.run_until_parked();
9020
9021 let buffer = project
9022 .update(cx, |project, cx| {
9023 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9024 })
9025 .await
9026 .unwrap();
9027 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9028 let (repo, _) = project
9029 .git_store()
9030 .read(cx)
9031 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9032 .unwrap();
9033 pretty_assertions::assert_eq!(
9034 repo.read(cx).work_directory_abs_path,
9035 Path::new(path!("/project/subdir/some-submodule")).into(),
9036 );
9037 let barrier = repo.update(cx, |repo, _| repo.barrier());
9038 (repo.clone(), barrier)
9039 });
9040 barrier.await.unwrap();
9041 submodule_repo.update(cx, |repo, _| {
9042 pretty_assertions::assert_eq!(
9043 repo.status_for_path(&"c.txt".into()).unwrap().status,
9044 StatusCode::Modified.worktree(),
9045 );
9046 });
9047}
9048
9049#[gpui::test]
9050async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9051 init_test(cx);
9052 let fs = FakeFs::new(cx.background_executor.clone());
9053 fs.insert_tree(
9054 path!("/root"),
9055 json!({
9056 "project": {
9057 ".git": {},
9058 "child1": {
9059 "a.txt": "A",
9060 },
9061 "child2": {
9062 "b.txt": "B",
9063 }
9064 }
9065 }),
9066 )
9067 .await;
9068
9069 let project = Project::test(
9070 fs.clone(),
9071 [
9072 path!("/root/project/child1").as_ref(),
9073 path!("/root/project/child2").as_ref(),
9074 ],
9075 cx,
9076 )
9077 .await;
9078
9079 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9080 tree.flush_fs_events(cx).await;
9081 project
9082 .update(cx, |project, cx| project.git_scans_complete(cx))
9083 .await;
9084 cx.executor().run_until_parked();
9085
9086 let repos = project.read_with(cx, |project, cx| {
9087 project
9088 .repositories(cx)
9089 .values()
9090 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9091 .collect::<Vec<_>>()
9092 });
9093 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9094}
9095
9096async fn search(
9097 project: &Entity<Project>,
9098 query: SearchQuery,
9099 cx: &mut gpui::TestAppContext,
9100) -> Result<HashMap<String, Vec<Range<usize>>>> {
9101 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9102 let mut results = HashMap::default();
9103 while let Ok(search_result) = search_rx.recv().await {
9104 match search_result {
9105 SearchResult::Buffer { buffer, ranges } => {
9106 results.entry(buffer).or_insert(ranges);
9107 }
9108 SearchResult::LimitReached => {}
9109 }
9110 }
9111 Ok(results
9112 .into_iter()
9113 .map(|(buffer, ranges)| {
9114 buffer.update(cx, |buffer, cx| {
9115 let path = buffer
9116 .file()
9117 .unwrap()
9118 .full_path(cx)
9119 .to_string_lossy()
9120 .to_string();
9121 let ranges = ranges
9122 .into_iter()
9123 .map(|range| range.to_offset(buffer))
9124 .collect::<Vec<_>>();
9125 (path, ranges)
9126 })
9127 })
9128 .collect())
9129}
9130
9131pub fn init_test(cx: &mut gpui::TestAppContext) {
9132 zlog::init_test();
9133
9134 cx.update(|cx| {
9135 let settings_store = SettingsStore::test(cx);
9136 cx.set_global(settings_store);
9137 release_channel::init(SemanticVersion::default(), cx);
9138 language::init(cx);
9139 Project::init_settings(cx);
9140 });
9141}
9142
9143fn json_lang() -> Arc<Language> {
9144 Arc::new(Language::new(
9145 LanguageConfig {
9146 name: "JSON".into(),
9147 matcher: LanguageMatcher {
9148 path_suffixes: vec!["json".to_string()],
9149 ..Default::default()
9150 },
9151 ..Default::default()
9152 },
9153 None,
9154 ))
9155}
9156
9157fn js_lang() -> Arc<Language> {
9158 Arc::new(Language::new(
9159 LanguageConfig {
9160 name: "JavaScript".into(),
9161 matcher: LanguageMatcher {
9162 path_suffixes: vec!["js".to_string()],
9163 ..Default::default()
9164 },
9165 ..Default::default()
9166 },
9167 None,
9168 ))
9169}
9170
9171fn rust_lang() -> Arc<Language> {
9172 Arc::new(Language::new(
9173 LanguageConfig {
9174 name: "Rust".into(),
9175 matcher: LanguageMatcher {
9176 path_suffixes: vec!["rs".to_string()],
9177 ..Default::default()
9178 },
9179 ..Default::default()
9180 },
9181 Some(tree_sitter_rust::LANGUAGE.into()),
9182 ))
9183}
9184
9185fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9186 struct PythonMootToolchainLister(Arc<FakeFs>);
9187 #[async_trait]
9188 impl ToolchainLister for PythonMootToolchainLister {
9189 async fn list(
9190 &self,
9191 worktree_root: PathBuf,
9192 subroot_relative_path: Arc<Path>,
9193 _: Option<HashMap<String, String>>,
9194 ) -> ToolchainList {
9195 // This lister will always return a path .venv directories within ancestors
9196 let ancestors = subroot_relative_path
9197 .ancestors()
9198 .map(ToOwned::to_owned)
9199 .collect::<Vec<_>>();
9200 let mut toolchains = vec![];
9201 for ancestor in ancestors {
9202 let venv_path = worktree_root.join(ancestor).join(".venv");
9203 if self.0.is_dir(&venv_path).await {
9204 toolchains.push(Toolchain {
9205 name: SharedString::new("Python Venv"),
9206 path: venv_path.to_string_lossy().into_owned().into(),
9207 language_name: LanguageName(SharedString::new_static("Python")),
9208 as_json: serde_json::Value::Null,
9209 })
9210 }
9211 }
9212 ToolchainList {
9213 toolchains,
9214 ..Default::default()
9215 }
9216 }
9217 // Returns a term which we should use in UI to refer to a toolchain.
9218 fn term(&self) -> SharedString {
9219 SharedString::new_static("virtual environment")
9220 }
9221 /// Returns the name of the manifest file for this toolchain.
9222 fn manifest_name(&self) -> ManifestName {
9223 SharedString::new_static("pyproject.toml").into()
9224 }
9225 async fn activation_script(&self, _: &Toolchain, _: &dyn Fs) -> Option<String> {
9226 None
9227 }
9228 }
9229 Arc::new(
9230 Language::new(
9231 LanguageConfig {
9232 name: "Python".into(),
9233 matcher: LanguageMatcher {
9234 path_suffixes: vec!["py".to_string()],
9235 ..Default::default()
9236 },
9237 ..Default::default()
9238 },
9239 None, // We're not testing Python parsing with this language.
9240 )
9241 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9242 "pyproject.toml",
9243 ))))
9244 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9245 )
9246}
9247
9248fn typescript_lang() -> Arc<Language> {
9249 Arc::new(Language::new(
9250 LanguageConfig {
9251 name: "TypeScript".into(),
9252 matcher: LanguageMatcher {
9253 path_suffixes: vec!["ts".to_string()],
9254 ..Default::default()
9255 },
9256 ..Default::default()
9257 },
9258 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9259 ))
9260}
9261
9262fn tsx_lang() -> Arc<Language> {
9263 Arc::new(Language::new(
9264 LanguageConfig {
9265 name: "tsx".into(),
9266 matcher: LanguageMatcher {
9267 path_suffixes: vec!["tsx".to_string()],
9268 ..Default::default()
9269 },
9270 ..Default::default()
9271 },
9272 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9273 ))
9274}
9275
9276fn get_all_tasks(
9277 project: &Entity<Project>,
9278 task_contexts: Arc<TaskContexts>,
9279 cx: &mut App,
9280) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9281 let new_tasks = project.update(cx, |project, cx| {
9282 project.task_store.update(cx, |task_store, cx| {
9283 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9284 this.used_and_current_resolved_tasks(task_contexts, cx)
9285 })
9286 })
9287 });
9288
9289 cx.background_spawn(async move {
9290 let (mut old, new) = new_tasks.await;
9291 old.extend(new);
9292 old
9293 })
9294}
9295
9296#[track_caller]
9297fn assert_entry_git_state(
9298 tree: &Worktree,
9299 repository: &Repository,
9300 path: &str,
9301 index_status: Option<StatusCode>,
9302 is_ignored: bool,
9303) {
9304 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9305 let entry = tree
9306 .entry_for_path(path)
9307 .unwrap_or_else(|| panic!("entry {path} not found"));
9308 let status = repository
9309 .status_for_path(&path.into())
9310 .map(|entry| entry.status);
9311 let expected = index_status.map(|index_status| {
9312 TrackedStatus {
9313 index_status,
9314 worktree_status: StatusCode::Unmodified,
9315 }
9316 .into()
9317 });
9318 assert_eq!(
9319 status, expected,
9320 "expected {path} to have git status: {expected:?}"
9321 );
9322 assert_eq!(
9323 entry.is_ignored, is_ignored,
9324 "expected {path} to have is_ignored: {is_ignored}"
9325 );
9326}
9327
9328#[track_caller]
9329fn git_init(path: &Path) -> git2::Repository {
9330 let mut init_opts = RepositoryInitOptions::new();
9331 init_opts.initial_head("main");
9332 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9333}
9334
9335#[track_caller]
9336fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9337 let path = path.as_ref();
9338 let mut index = repo.index().expect("Failed to get index");
9339 index.add_path(path).expect("Failed to add file");
9340 index.write().expect("Failed to write index");
9341}
9342
9343#[track_caller]
9344fn git_remove_index(path: &Path, repo: &git2::Repository) {
9345 let mut index = repo.index().expect("Failed to get index");
9346 index.remove_path(path).expect("Failed to add file");
9347 index.write().expect("Failed to write index");
9348}
9349
9350#[track_caller]
9351fn git_commit(msg: &'static str, repo: &git2::Repository) {
9352 use git2::Signature;
9353
9354 let signature = Signature::now("test", "test@zed.dev").unwrap();
9355 let oid = repo.index().unwrap().write_tree().unwrap();
9356 let tree = repo.find_tree(oid).unwrap();
9357 if let Ok(head) = repo.head() {
9358 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9359
9360 let parent_commit = parent_obj.as_commit().unwrap();
9361
9362 repo.commit(
9363 Some("HEAD"),
9364 &signature,
9365 &signature,
9366 msg,
9367 &tree,
9368 &[parent_commit],
9369 )
9370 .expect("Failed to commit with parent");
9371 } else {
9372 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9373 .expect("Failed to commit");
9374 }
9375}
9376
9377#[cfg(any())]
9378#[track_caller]
9379fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9380 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9381}
9382
9383#[track_caller]
9384fn git_stash(repo: &mut git2::Repository) {
9385 use git2::Signature;
9386
9387 let signature = Signature::now("test", "test@zed.dev").unwrap();
9388 repo.stash_save(&signature, "N/A", None)
9389 .expect("Failed to stash");
9390}
9391
9392#[track_caller]
9393fn git_reset(offset: usize, repo: &git2::Repository) {
9394 let head = repo.head().expect("Couldn't get repo head");
9395 let object = head.peel(git2::ObjectType::Commit).unwrap();
9396 let commit = object.as_commit().unwrap();
9397 let new_head = commit
9398 .parents()
9399 .inspect(|parnet| {
9400 parnet.message();
9401 })
9402 .nth(offset)
9403 .expect("Not enough history");
9404 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9405 .expect("Could not reset");
9406}
9407
9408#[cfg(any())]
9409#[track_caller]
9410fn git_branch(name: &str, repo: &git2::Repository) {
9411 let head = repo
9412 .head()
9413 .expect("Couldn't get repo head")
9414 .peel_to_commit()
9415 .expect("HEAD is not a commit");
9416 repo.branch(name, &head, false).expect("Failed to commit");
9417}
9418
9419#[cfg(any())]
9420#[track_caller]
9421fn git_checkout(name: &str, repo: &git2::Repository) {
9422 repo.set_head(name).expect("Failed to set head");
9423 repo.checkout_head(None).expect("Failed to check out head");
9424}
9425
9426#[cfg(any())]
9427#[track_caller]
9428fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9429 repo.statuses(None)
9430 .unwrap()
9431 .iter()
9432 .map(|status| (status.path().unwrap().to_string(), status.status()))
9433 .collect()
9434}
9435
9436#[gpui::test]
9437async fn test_find_project_path_abs(
9438 background_executor: BackgroundExecutor,
9439 cx: &mut gpui::TestAppContext,
9440) {
9441 // find_project_path should work with absolute paths
9442 init_test(cx);
9443
9444 let fs = FakeFs::new(background_executor);
9445 fs.insert_tree(
9446 path!("/root"),
9447 json!({
9448 "project1": {
9449 "file1.txt": "content1",
9450 "subdir": {
9451 "file2.txt": "content2"
9452 }
9453 },
9454 "project2": {
9455 "file3.txt": "content3"
9456 }
9457 }),
9458 )
9459 .await;
9460
9461 let project = Project::test(
9462 fs.clone(),
9463 [
9464 path!("/root/project1").as_ref(),
9465 path!("/root/project2").as_ref(),
9466 ],
9467 cx,
9468 )
9469 .await;
9470
9471 // Make sure the worktrees are fully initialized
9472 project
9473 .update(cx, |project, cx| project.git_scans_complete(cx))
9474 .await;
9475 cx.run_until_parked();
9476
9477 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9478 project.read_with(cx, |project, cx| {
9479 let worktrees: Vec<_> = project.worktrees(cx).collect();
9480 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9481 let id1 = worktrees[0].read(cx).id();
9482 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9483 let id2 = worktrees[1].read(cx).id();
9484 (abs_path1, id1, abs_path2, id2)
9485 });
9486
9487 project.update(cx, |project, cx| {
9488 let abs_path = project1_abs_path.join("file1.txt");
9489 let found_path = project.find_project_path(abs_path, cx).unwrap();
9490 assert_eq!(found_path.worktree_id, project1_id);
9491 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9492
9493 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9494 let found_path = project.find_project_path(abs_path, cx).unwrap();
9495 assert_eq!(found_path.worktree_id, project1_id);
9496 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9497
9498 let abs_path = project2_abs_path.join("file3.txt");
9499 let found_path = project.find_project_path(abs_path, cx).unwrap();
9500 assert_eq!(found_path.worktree_id, project2_id);
9501 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9502
9503 let abs_path = project1_abs_path.join("nonexistent.txt");
9504 let found_path = project.find_project_path(abs_path, cx);
9505 assert!(
9506 found_path.is_some(),
9507 "Should find project path for nonexistent file in worktree"
9508 );
9509
9510 // Test with an absolute path outside any worktree
9511 let abs_path = Path::new("/some/other/path");
9512 let found_path = project.find_project_path(abs_path, cx);
9513 assert!(
9514 found_path.is_none(),
9515 "Should not find project path for path outside any worktree"
9516 );
9517 });
9518}