1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::RepoPath,
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
25 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister,
26 language_settings::{LanguageSettingsContent, language_settings},
27 tree_sitter_rust, tree_sitter_typescript,
28};
29use lsp::{
30 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
31 Uri, WillRenameFiles, notification::DidRenameFiles,
32};
33use parking_lot::Mutex;
34use paths::{config_dir, global_gitignore_path, tasks_file};
35use postage::stream::Stream as _;
36use pretty_assertions::{assert_eq, assert_matches};
37use rand::{Rng as _, rngs::StdRng};
38use serde_json::json;
39#[cfg(not(windows))]
40use std::os;
41use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
42use task::{ResolvedTask, ShellKind, TaskContext};
43use unindent::Unindent as _;
44use util::{
45 TryFutureExt as _, assert_set_eq, maybe, path,
46 paths::PathMatcher,
47 test::{TempTree, marked_text_offsets},
48 uri,
49};
50use worktree::WorktreeModelHandle as _;
51
52#[gpui::test]
53async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
54 cx.executor().allow_parking();
55
56 let (tx, mut rx) = futures::channel::mpsc::unbounded();
57 let _thread = std::thread::spawn(move || {
58 #[cfg(not(target_os = "windows"))]
59 std::fs::metadata("/tmp").unwrap();
60 #[cfg(target_os = "windows")]
61 std::fs::metadata("C:/Windows").unwrap();
62 std::thread::sleep(Duration::from_millis(1000));
63 tx.unbounded_send(1).unwrap();
64 });
65 rx.next().await.unwrap();
66}
67
68#[gpui::test]
69async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
70 cx.executor().allow_parking();
71
72 let io_task = smol::unblock(move || {
73 println!("sleeping on thread {:?}", std::thread::current().id());
74 std::thread::sleep(Duration::from_millis(10));
75 1
76 });
77
78 let task = cx.foreground_executor().spawn(async move {
79 io_task.await;
80 });
81
82 task.await;
83}
84
85#[cfg(not(windows))]
86#[gpui::test]
87async fn test_symlinks(cx: &mut gpui::TestAppContext) {
88 init_test(cx);
89 cx.executor().allow_parking();
90
91 let dir = TempTree::new(json!({
92 "root": {
93 "apple": "",
94 "banana": {
95 "carrot": {
96 "date": "",
97 "endive": "",
98 }
99 },
100 "fennel": {
101 "grape": "",
102 }
103 }
104 }));
105
106 let root_link_path = dir.path().join("root_link");
107 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
108 os::unix::fs::symlink(
109 dir.path().join("root/fennel"),
110 dir.path().join("root/finnochio"),
111 )
112 .unwrap();
113
114 let project = Project::test(
115 Arc::new(RealFs::new(None, cx.executor())),
116 [root_link_path.as_ref()],
117 cx,
118 )
119 .await;
120
121 project.update(cx, |project, cx| {
122 let tree = project.worktrees(cx).next().unwrap().read(cx);
123 assert_eq!(tree.file_count(), 5);
124 assert_eq!(
125 tree.inode_for_path("fennel/grape"),
126 tree.inode_for_path("finnochio/grape")
127 );
128 });
129}
130
131#[gpui::test]
132async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
133 init_test(cx);
134
135 let dir = TempTree::new(json!({
136 ".editorconfig": r#"
137 root = true
138 [*.rs]
139 indent_style = tab
140 indent_size = 3
141 end_of_line = lf
142 insert_final_newline = true
143 trim_trailing_whitespace = true
144 max_line_length = 120
145 [*.js]
146 tab_width = 10
147 max_line_length = off
148 "#,
149 ".zed": {
150 "settings.json": r#"{
151 "tab_size": 8,
152 "hard_tabs": false,
153 "ensure_final_newline_on_save": false,
154 "remove_trailing_whitespace_on_save": false,
155 "preferred_line_length": 64,
156 "soft_wrap": "editor_width",
157 }"#,
158 },
159 "a.rs": "fn a() {\n A\n}",
160 "b": {
161 ".editorconfig": r#"
162 [*.rs]
163 indent_size = 2
164 max_line_length = off,
165 "#,
166 "b.rs": "fn b() {\n B\n}",
167 },
168 "c.js": "def c\n C\nend",
169 "README.json": "tabs are better\n",
170 }));
171
172 let path = dir.path();
173 let fs = FakeFs::new(cx.executor());
174 fs.insert_tree_from_real_fs(path, path).await;
175 let project = Project::test(fs, [path], cx).await;
176
177 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
178 language_registry.add(js_lang());
179 language_registry.add(json_lang());
180 language_registry.add(rust_lang());
181
182 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
183
184 cx.executor().run_until_parked();
185
186 cx.update(|cx| {
187 let tree = worktree.read(cx);
188 let settings_for = |path: &str| {
189 let file_entry = tree.entry_for_path(path).unwrap().clone();
190 let file = File::for_entry(file_entry, worktree.clone());
191 let file_language = project
192 .read(cx)
193 .languages()
194 .language_for_file_path(file.path.as_ref());
195 let file_language = cx
196 .background_executor()
197 .block(file_language)
198 .expect("Failed to get file language");
199 let file = file as _;
200 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
201 };
202
203 let settings_a = settings_for("a.rs");
204 let settings_b = settings_for("b/b.rs");
205 let settings_c = settings_for("c.js");
206 let settings_readme = settings_for("README.json");
207
208 // .editorconfig overrides .zed/settings
209 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
210 assert_eq!(settings_a.hard_tabs, true);
211 assert_eq!(settings_a.ensure_final_newline_on_save, true);
212 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
213 assert_eq!(settings_a.preferred_line_length, 120);
214
215 // .editorconfig in b/ overrides .editorconfig in root
216 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
217
218 // "indent_size" is not set, so "tab_width" is used
219 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
220
221 // When max_line_length is "off", default to .zed/settings.json
222 assert_eq!(settings_b.preferred_line_length, 64);
223 assert_eq!(settings_c.preferred_line_length, 64);
224
225 // README.md should not be affected by .editorconfig's globe "*.rs"
226 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
227 });
228}
229
230#[gpui::test]
231async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
232 init_test(cx);
233 cx.update(|cx| {
234 GitHostingProviderRegistry::default_global(cx);
235 git_hosting_providers::init(cx);
236 });
237
238 let fs = FakeFs::new(cx.executor());
239 let str_path = path!("/dir");
240 let path = Path::new(str_path);
241
242 fs.insert_tree(
243 path!("/dir"),
244 json!({
245 ".zed": {
246 "settings.json": r#"{
247 "git_hosting_providers": [
248 {
249 "provider": "gitlab",
250 "base_url": "https://google.com",
251 "name": "foo"
252 }
253 ]
254 }"#
255 },
256 }),
257 )
258 .await;
259
260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
261 let (_worktree, _) =
262 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
263 cx.executor().run_until_parked();
264
265 cx.update(|cx| {
266 let provider = GitHostingProviderRegistry::global(cx);
267 assert!(
268 provider
269 .list_hosting_providers()
270 .into_iter()
271 .any(|provider| provider.name() == "foo")
272 );
273 });
274
275 fs.atomic_write(
276 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
277 "{}".into(),
278 )
279 .await
280 .unwrap();
281
282 cx.run_until_parked();
283
284 cx.update(|cx| {
285 let provider = GitHostingProviderRegistry::global(cx);
286 assert!(
287 !provider
288 .list_hosting_providers()
289 .into_iter()
290 .any(|provider| provider.name() == "foo")
291 );
292 });
293}
294
295#[gpui::test]
296async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
297 init_test(cx);
298 TaskStore::init(None);
299
300 let fs = FakeFs::new(cx.executor());
301 fs.insert_tree(
302 path!("/dir"),
303 json!({
304 ".zed": {
305 "settings.json": r#"{ "tab_size": 8 }"#,
306 "tasks.json": r#"[{
307 "label": "cargo check all",
308 "command": "cargo",
309 "args": ["check", "--all"]
310 },]"#,
311 },
312 "a": {
313 "a.rs": "fn a() {\n A\n}"
314 },
315 "b": {
316 ".zed": {
317 "settings.json": r#"{ "tab_size": 2 }"#,
318 "tasks.json": r#"[{
319 "label": "cargo check",
320 "command": "cargo",
321 "args": ["check"]
322 },]"#,
323 },
324 "b.rs": "fn b() {\n B\n}"
325 }
326 }),
327 )
328 .await;
329
330 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
331 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
332
333 cx.executor().run_until_parked();
334 let worktree_id = cx.update(|cx| {
335 project.update(cx, |project, cx| {
336 project.worktrees(cx).next().unwrap().read(cx).id()
337 })
338 });
339
340 let mut task_contexts = TaskContexts::default();
341 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
342 let task_contexts = Arc::new(task_contexts);
343
344 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
345 id: worktree_id,
346 directory_in_worktree: PathBuf::from(".zed"),
347 id_base: "local worktree tasks from directory \".zed\"".into(),
348 };
349
350 let all_tasks = cx
351 .update(|cx| {
352 let tree = worktree.read(cx);
353
354 let file_a = File::for_entry(
355 tree.entry_for_path("a/a.rs").unwrap().clone(),
356 worktree.clone(),
357 ) as _;
358 let settings_a = language_settings(None, Some(&file_a), cx);
359 let file_b = File::for_entry(
360 tree.entry_for_path("b/b.rs").unwrap().clone(),
361 worktree.clone(),
362 ) as _;
363 let settings_b = language_settings(None, Some(&file_b), cx);
364
365 assert_eq!(settings_a.tab_size.get(), 8);
366 assert_eq!(settings_b.tab_size.get(), 2);
367
368 get_all_tasks(&project, task_contexts.clone(), cx)
369 })
370 .await
371 .into_iter()
372 .map(|(source_kind, task)| {
373 let resolved = task.resolved;
374 (
375 source_kind,
376 task.resolved_label,
377 resolved.args,
378 resolved.env,
379 )
380 })
381 .collect::<Vec<_>>();
382 assert_eq!(
383 all_tasks,
384 vec![
385 (
386 TaskSourceKind::Worktree {
387 id: worktree_id,
388 directory_in_worktree: PathBuf::from(path!("b/.zed")),
389 id_base: if cfg!(windows) {
390 "local worktree tasks from directory \"b\\\\.zed\"".into()
391 } else {
392 "local worktree tasks from directory \"b/.zed\"".into()
393 },
394 },
395 "cargo check".to_string(),
396 vec!["check".to_string()],
397 HashMap::default(),
398 ),
399 (
400 topmost_local_task_source_kind.clone(),
401 "cargo check all".to_string(),
402 vec!["check".to_string(), "--all".to_string()],
403 HashMap::default(),
404 ),
405 ]
406 );
407
408 let (_, resolved_task) = cx
409 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
410 .await
411 .into_iter()
412 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
413 .expect("should have one global task");
414 project.update(cx, |project, cx| {
415 let task_inventory = project
416 .task_store
417 .read(cx)
418 .task_inventory()
419 .cloned()
420 .unwrap();
421 task_inventory.update(cx, |inventory, _| {
422 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
423 inventory
424 .update_file_based_tasks(
425 TaskSettingsLocation::Global(tasks_file()),
426 Some(
427 &json!([{
428 "label": "cargo check unstable",
429 "command": "cargo",
430 "args": [
431 "check",
432 "--all",
433 "--all-targets"
434 ],
435 "env": {
436 "RUSTFLAGS": "-Zunstable-options"
437 }
438 }])
439 .to_string(),
440 ),
441 )
442 .unwrap();
443 });
444 });
445 cx.run_until_parked();
446
447 let all_tasks = cx
448 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
449 .await
450 .into_iter()
451 .map(|(source_kind, task)| {
452 let resolved = task.resolved;
453 (
454 source_kind,
455 task.resolved_label,
456 resolved.args,
457 resolved.env,
458 )
459 })
460 .collect::<Vec<_>>();
461 assert_eq!(
462 all_tasks,
463 vec![
464 (
465 topmost_local_task_source_kind.clone(),
466 "cargo check all".to_string(),
467 vec!["check".to_string(), "--all".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::Worktree {
472 id: worktree_id,
473 directory_in_worktree: PathBuf::from(path!("b/.zed")),
474 id_base: if cfg!(windows) {
475 "local worktree tasks from directory \"b\\\\.zed\"".into()
476 } else {
477 "local worktree tasks from directory \"b/.zed\"".into()
478 },
479 },
480 "cargo check".to_string(),
481 vec!["check".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::AbsPath {
486 abs_path: paths::tasks_file().clone(),
487 id_base: "global tasks.json".into(),
488 },
489 "cargo check unstable".to_string(),
490 vec![
491 "check".to_string(),
492 "--all".to_string(),
493 "--all-targets".to_string(),
494 ],
495 HashMap::from_iter(Some((
496 "RUSTFLAGS".to_string(),
497 "-Zunstable-options".to_string()
498 ))),
499 ),
500 ]
501 );
502}
503
504#[gpui::test]
505async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
506 init_test(cx);
507 TaskStore::init(None);
508
509 let fs = FakeFs::new(cx.executor());
510 fs.insert_tree(
511 path!("/dir"),
512 json!({
513 ".zed": {
514 "tasks.json": r#"[{
515 "label": "test worktree root",
516 "command": "echo $ZED_WORKTREE_ROOT"
517 }]"#,
518 },
519 "a": {
520 "a.rs": "fn a() {\n A\n}"
521 },
522 }),
523 )
524 .await;
525
526 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
527 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
528
529 cx.executor().run_until_parked();
530 let worktree_id = cx.update(|cx| {
531 project.update(cx, |project, cx| {
532 project.worktrees(cx).next().unwrap().read(cx).id()
533 })
534 });
535
536 let active_non_worktree_item_tasks = cx
537 .update(|cx| {
538 get_all_tasks(
539 &project,
540 Arc::new(TaskContexts {
541 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
542 active_worktree_context: None,
543 other_worktree_contexts: Vec::new(),
544 lsp_task_sources: HashMap::default(),
545 latest_selection: None,
546 }),
547 cx,
548 )
549 })
550 .await;
551 assert!(
552 active_non_worktree_item_tasks.is_empty(),
553 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
554 );
555
556 let active_worktree_tasks = cx
557 .update(|cx| {
558 get_all_tasks(
559 &project,
560 Arc::new(TaskContexts {
561 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
562 active_worktree_context: Some((worktree_id, {
563 let mut worktree_context = TaskContext::default();
564 worktree_context
565 .task_variables
566 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
567 worktree_context
568 })),
569 other_worktree_contexts: Vec::new(),
570 lsp_task_sources: HashMap::default(),
571 latest_selection: None,
572 }),
573 cx,
574 )
575 })
576 .await;
577 assert_eq!(
578 active_worktree_tasks
579 .into_iter()
580 .map(|(source_kind, task)| {
581 let resolved = task.resolved;
582 (source_kind, resolved.command.unwrap())
583 })
584 .collect::<Vec<_>>(),
585 vec![(
586 TaskSourceKind::Worktree {
587 id: worktree_id,
588 directory_in_worktree: PathBuf::from(path!(".zed")),
589 id_base: if cfg!(windows) {
590 "local worktree tasks from directory \".zed\"".into()
591 } else {
592 "local worktree tasks from directory \".zed\"".into()
593 },
594 },
595 "echo /dir".to_string(),
596 )]
597 );
598}
599
600#[gpui::test]
601async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
602 cx: &mut gpui::TestAppContext,
603) {
604 pub(crate) struct PyprojectTomlManifestProvider;
605
606 impl ManifestProvider for PyprojectTomlManifestProvider {
607 fn name(&self) -> ManifestName {
608 SharedString::new_static("pyproject.toml").into()
609 }
610
611 fn search(
612 &self,
613 ManifestQuery {
614 path,
615 depth,
616 delegate,
617 }: ManifestQuery,
618 ) -> Option<Arc<Path>> {
619 for path in path.ancestors().take(depth) {
620 let p = path.join("pyproject.toml");
621 if delegate.exists(&p, Some(false)) {
622 return Some(path.into());
623 }
624 }
625
626 None
627 }
628 }
629
630 init_test(cx);
631 let fs = FakeFs::new(cx.executor());
632
633 fs.insert_tree(
634 path!("/the-root"),
635 json!({
636 ".zed": {
637 "settings.json": r#"
638 {
639 "languages": {
640 "Python": {
641 "language_servers": ["ty"]
642 }
643 }
644 }"#
645 },
646 "project-a": {
647 ".venv": {},
648 "file.py": "",
649 "pyproject.toml": ""
650 },
651 "project-b": {
652 ".venv": {},
653 "source_file.py":"",
654 "another_file.py": "",
655 "pyproject.toml": ""
656 }
657 }),
658 )
659 .await;
660 cx.update(|cx| {
661 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
662 });
663
664 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
666 let _fake_python_server = language_registry.register_fake_lsp(
667 "Python",
668 FakeLspAdapter {
669 name: "ty",
670 capabilities: lsp::ServerCapabilities {
671 ..Default::default()
672 },
673 ..Default::default()
674 },
675 );
676
677 language_registry.add(python_lang(fs.clone()));
678 let (first_buffer, _handle) = project
679 .update(cx, |project, cx| {
680 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
681 })
682 .await
683 .unwrap();
684 cx.executor().run_until_parked();
685 let servers = project.update(cx, |project, cx| {
686 project.lsp_store.update(cx, |this, cx| {
687 first_buffer.update(cx, |buffer, cx| {
688 this.language_servers_for_local_buffer(buffer, cx)
689 .map(|(adapter, server)| (adapter.clone(), server.clone()))
690 .collect::<Vec<_>>()
691 })
692 })
693 });
694 cx.executor().run_until_parked();
695 assert_eq!(servers.len(), 1);
696 let (adapter, server) = servers.into_iter().next().unwrap();
697 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
698 assert_eq!(server.server_id(), LanguageServerId(0));
699 // `workspace_folders` are set to the rooting point.
700 assert_eq!(
701 server.workspace_folders(),
702 BTreeSet::from_iter(
703 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
704 )
705 );
706
707 let (second_project_buffer, _other_handle) = project
708 .update(cx, |project, cx| {
709 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
710 })
711 .await
712 .unwrap();
713 cx.executor().run_until_parked();
714 let servers = project.update(cx, |project, cx| {
715 project.lsp_store.update(cx, |this, cx| {
716 second_project_buffer.update(cx, |buffer, cx| {
717 this.language_servers_for_local_buffer(buffer, cx)
718 .map(|(adapter, server)| (adapter.clone(), server.clone()))
719 .collect::<Vec<_>>()
720 })
721 })
722 });
723 cx.executor().run_until_parked();
724 assert_eq!(servers.len(), 1);
725 let (adapter, server) = servers.into_iter().next().unwrap();
726 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
727 // We're not using venvs at all here, so both folders should fall under the same root.
728 assert_eq!(server.server_id(), LanguageServerId(0));
729 // Now, let's select a different toolchain for one of subprojects.
730
731 let Toolchains {
732 toolchains: available_toolchains_for_b,
733 root_path,
734 ..
735 } = project
736 .update(cx, |this, cx| {
737 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
738 this.available_toolchains(
739 ProjectPath {
740 worktree_id,
741 path: Arc::from("project-b/source_file.py".as_ref()),
742 },
743 LanguageName::new("Python"),
744 cx,
745 )
746 })
747 .await
748 .expect("A toolchain to be discovered");
749 assert_eq!(root_path.as_ref(), Path::new("project-b"));
750 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
751 let currently_active_toolchain = project
752 .update(cx, |this, cx| {
753 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
754 this.active_toolchain(
755 ProjectPath {
756 worktree_id,
757 path: Arc::from("project-b/source_file.py".as_ref()),
758 },
759 LanguageName::new("Python"),
760 cx,
761 )
762 })
763 .await;
764
765 assert!(currently_active_toolchain.is_none());
766 let _ = project
767 .update(cx, |this, cx| {
768 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
769 this.activate_toolchain(
770 ProjectPath {
771 worktree_id,
772 path: root_path,
773 },
774 available_toolchains_for_b
775 .toolchains
776 .into_iter()
777 .next()
778 .unwrap(),
779 cx,
780 )
781 })
782 .await
783 .unwrap();
784 cx.run_until_parked();
785 let servers = project.update(cx, |project, cx| {
786 project.lsp_store.update(cx, |this, cx| {
787 second_project_buffer.update(cx, |buffer, cx| {
788 this.language_servers_for_local_buffer(buffer, cx)
789 .map(|(adapter, server)| (adapter.clone(), server.clone()))
790 .collect::<Vec<_>>()
791 })
792 })
793 });
794 cx.executor().run_until_parked();
795 assert_eq!(servers.len(), 1);
796 let (adapter, server) = servers.into_iter().next().unwrap();
797 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
798 // There's a new language server in town.
799 assert_eq!(server.server_id(), LanguageServerId(1));
800}
801
802#[gpui::test]
803async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
804 init_test(cx);
805
806 let fs = FakeFs::new(cx.executor());
807 fs.insert_tree(
808 path!("/dir"),
809 json!({
810 "test.rs": "const A: i32 = 1;",
811 "test2.rs": "",
812 "Cargo.toml": "a = 1",
813 "package.json": "{\"a\": 1}",
814 }),
815 )
816 .await;
817
818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
819 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
820
821 let mut fake_rust_servers = language_registry.register_fake_lsp(
822 "Rust",
823 FakeLspAdapter {
824 name: "the-rust-language-server",
825 capabilities: lsp::ServerCapabilities {
826 completion_provider: Some(lsp::CompletionOptions {
827 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
828 ..Default::default()
829 }),
830 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
831 lsp::TextDocumentSyncOptions {
832 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
833 ..Default::default()
834 },
835 )),
836 ..Default::default()
837 },
838 ..Default::default()
839 },
840 );
841 let mut fake_json_servers = language_registry.register_fake_lsp(
842 "JSON",
843 FakeLspAdapter {
844 name: "the-json-language-server",
845 capabilities: lsp::ServerCapabilities {
846 completion_provider: Some(lsp::CompletionOptions {
847 trigger_characters: Some(vec![":".to_string()]),
848 ..Default::default()
849 }),
850 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
851 lsp::TextDocumentSyncOptions {
852 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
853 ..Default::default()
854 },
855 )),
856 ..Default::default()
857 },
858 ..Default::default()
859 },
860 );
861
862 // Open a buffer without an associated language server.
863 let (toml_buffer, _handle) = project
864 .update(cx, |project, cx| {
865 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
866 })
867 .await
868 .unwrap();
869
870 // Open a buffer with an associated language server before the language for it has been loaded.
871 let (rust_buffer, _handle2) = project
872 .update(cx, |project, cx| {
873 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
874 })
875 .await
876 .unwrap();
877 rust_buffer.update(cx, |buffer, _| {
878 assert_eq!(buffer.language().map(|l| l.name()), None);
879 });
880
881 // Now we add the languages to the project, and ensure they get assigned to all
882 // the relevant open buffers.
883 language_registry.add(json_lang());
884 language_registry.add(rust_lang());
885 cx.executor().run_until_parked();
886 rust_buffer.update(cx, |buffer, _| {
887 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
888 });
889
890 // A server is started up, and it is notified about Rust files.
891 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
892 assert_eq!(
893 fake_rust_server
894 .receive_notification::<lsp::notification::DidOpenTextDocument>()
895 .await
896 .text_document,
897 lsp::TextDocumentItem {
898 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
899 version: 0,
900 text: "const A: i32 = 1;".to_string(),
901 language_id: "rust".to_string(),
902 }
903 );
904
905 // The buffer is configured based on the language server's capabilities.
906 rust_buffer.update(cx, |buffer, _| {
907 assert_eq!(
908 buffer
909 .completion_triggers()
910 .iter()
911 .cloned()
912 .collect::<Vec<_>>(),
913 &[".".to_string(), "::".to_string()]
914 );
915 });
916 toml_buffer.update(cx, |buffer, _| {
917 assert!(buffer.completion_triggers().is_empty());
918 });
919
920 // Edit a buffer. The changes are reported to the language server.
921 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
922 assert_eq!(
923 fake_rust_server
924 .receive_notification::<lsp::notification::DidChangeTextDocument>()
925 .await
926 .text_document,
927 lsp::VersionedTextDocumentIdentifier::new(
928 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
929 1
930 )
931 );
932
933 // Open a third buffer with a different associated language server.
934 let (json_buffer, _json_handle) = project
935 .update(cx, |project, cx| {
936 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
937 })
938 .await
939 .unwrap();
940
941 // A json language server is started up and is only notified about the json buffer.
942 let mut fake_json_server = fake_json_servers.next().await.unwrap();
943 assert_eq!(
944 fake_json_server
945 .receive_notification::<lsp::notification::DidOpenTextDocument>()
946 .await
947 .text_document,
948 lsp::TextDocumentItem {
949 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
950 version: 0,
951 text: "{\"a\": 1}".to_string(),
952 language_id: "json".to_string(),
953 }
954 );
955
956 // This buffer is configured based on the second language server's
957 // capabilities.
958 json_buffer.update(cx, |buffer, _| {
959 assert_eq!(
960 buffer
961 .completion_triggers()
962 .iter()
963 .cloned()
964 .collect::<Vec<_>>(),
965 &[":".to_string()]
966 );
967 });
968
969 // When opening another buffer whose language server is already running,
970 // it is also configured based on the existing language server's capabilities.
971 let (rust_buffer2, _handle4) = project
972 .update(cx, |project, cx| {
973 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
974 })
975 .await
976 .unwrap();
977 rust_buffer2.update(cx, |buffer, _| {
978 assert_eq!(
979 buffer
980 .completion_triggers()
981 .iter()
982 .cloned()
983 .collect::<Vec<_>>(),
984 &[".".to_string(), "::".to_string()]
985 );
986 });
987
988 // Changes are reported only to servers matching the buffer's language.
989 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
990 rust_buffer2.update(cx, |buffer, cx| {
991 buffer.edit([(0..0, "let x = 1;")], None, cx)
992 });
993 assert_eq!(
994 fake_rust_server
995 .receive_notification::<lsp::notification::DidChangeTextDocument>()
996 .await
997 .text_document,
998 lsp::VersionedTextDocumentIdentifier::new(
999 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1000 1
1001 )
1002 );
1003
1004 // Save notifications are reported to all servers.
1005 project
1006 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1007 .await
1008 .unwrap();
1009 assert_eq!(
1010 fake_rust_server
1011 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1012 .await
1013 .text_document,
1014 lsp::TextDocumentIdentifier::new(
1015 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1016 )
1017 );
1018 assert_eq!(
1019 fake_json_server
1020 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1021 .await
1022 .text_document,
1023 lsp::TextDocumentIdentifier::new(
1024 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1025 )
1026 );
1027
1028 // Renames are reported only to servers matching the buffer's language.
1029 fs.rename(
1030 Path::new(path!("/dir/test2.rs")),
1031 Path::new(path!("/dir/test3.rs")),
1032 Default::default(),
1033 )
1034 .await
1035 .unwrap();
1036 assert_eq!(
1037 fake_rust_server
1038 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1039 .await
1040 .text_document,
1041 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1042 );
1043 assert_eq!(
1044 fake_rust_server
1045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1046 .await
1047 .text_document,
1048 lsp::TextDocumentItem {
1049 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1050 version: 0,
1051 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1052 language_id: "rust".to_string(),
1053 },
1054 );
1055
1056 rust_buffer2.update(cx, |buffer, cx| {
1057 buffer.update_diagnostics(
1058 LanguageServerId(0),
1059 DiagnosticSet::from_sorted_entries(
1060 vec![DiagnosticEntry {
1061 diagnostic: Default::default(),
1062 range: Anchor::MIN..Anchor::MAX,
1063 }],
1064 &buffer.snapshot(),
1065 ),
1066 cx,
1067 );
1068 assert_eq!(
1069 buffer
1070 .snapshot()
1071 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1072 .count(),
1073 1
1074 );
1075 });
1076
1077 // When the rename changes the extension of the file, the buffer gets closed on the old
1078 // language server and gets opened on the new one.
1079 fs.rename(
1080 Path::new(path!("/dir/test3.rs")),
1081 Path::new(path!("/dir/test3.json")),
1082 Default::default(),
1083 )
1084 .await
1085 .unwrap();
1086 assert_eq!(
1087 fake_rust_server
1088 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1089 .await
1090 .text_document,
1091 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1092 );
1093 assert_eq!(
1094 fake_json_server
1095 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1096 .await
1097 .text_document,
1098 lsp::TextDocumentItem {
1099 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1100 version: 0,
1101 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1102 language_id: "json".to_string(),
1103 },
1104 );
1105
1106 // We clear the diagnostics, since the language has changed.
1107 rust_buffer2.update(cx, |buffer, _| {
1108 assert_eq!(
1109 buffer
1110 .snapshot()
1111 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1112 .count(),
1113 0
1114 );
1115 });
1116
1117 // The renamed file's version resets after changing language server.
1118 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1119 assert_eq!(
1120 fake_json_server
1121 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1122 .await
1123 .text_document,
1124 lsp::VersionedTextDocumentIdentifier::new(
1125 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1126 1
1127 )
1128 );
1129
1130 // Restart language servers
1131 project.update(cx, |project, cx| {
1132 project.restart_language_servers_for_buffers(
1133 vec![rust_buffer.clone(), json_buffer.clone()],
1134 HashSet::default(),
1135 cx,
1136 );
1137 });
1138
1139 let mut rust_shutdown_requests = fake_rust_server
1140 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1141 let mut json_shutdown_requests = fake_json_server
1142 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1143 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1144
1145 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1146 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1147
1148 // Ensure rust document is reopened in new rust language server
1149 assert_eq!(
1150 fake_rust_server
1151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1152 .await
1153 .text_document,
1154 lsp::TextDocumentItem {
1155 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1156 version: 0,
1157 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1158 language_id: "rust".to_string(),
1159 }
1160 );
1161
1162 // Ensure json documents are reopened in new json language server
1163 assert_set_eq!(
1164 [
1165 fake_json_server
1166 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1167 .await
1168 .text_document,
1169 fake_json_server
1170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1171 .await
1172 .text_document,
1173 ],
1174 [
1175 lsp::TextDocumentItem {
1176 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1177 version: 0,
1178 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1179 language_id: "json".to_string(),
1180 },
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1183 version: 0,
1184 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 }
1187 ]
1188 );
1189
1190 // Close notifications are reported only to servers matching the buffer's language.
1191 cx.update(|_| drop(_json_handle));
1192 let close_message = lsp::DidCloseTextDocumentParams {
1193 text_document: lsp::TextDocumentIdentifier::new(
1194 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1195 ),
1196 };
1197 assert_eq!(
1198 fake_json_server
1199 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1200 .await,
1201 close_message,
1202 );
1203}
1204
1205#[gpui::test]
1206async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1207 init_test(cx);
1208
1209 let fs = FakeFs::new(cx.executor());
1210 fs.insert_tree(
1211 path!("/the-root"),
1212 json!({
1213 ".gitignore": "target\n",
1214 "Cargo.lock": "",
1215 "src": {
1216 "a.rs": "",
1217 "b.rs": "",
1218 },
1219 "target": {
1220 "x": {
1221 "out": {
1222 "x.rs": ""
1223 }
1224 },
1225 "y": {
1226 "out": {
1227 "y.rs": "",
1228 }
1229 },
1230 "z": {
1231 "out": {
1232 "z.rs": ""
1233 }
1234 }
1235 }
1236 }),
1237 )
1238 .await;
1239 fs.insert_tree(
1240 path!("/the-registry"),
1241 json!({
1242 "dep1": {
1243 "src": {
1244 "dep1.rs": "",
1245 }
1246 },
1247 "dep2": {
1248 "src": {
1249 "dep2.rs": "",
1250 }
1251 },
1252 }),
1253 )
1254 .await;
1255 fs.insert_tree(
1256 path!("/the/stdlib"),
1257 json!({
1258 "LICENSE": "",
1259 "src": {
1260 "string.rs": "",
1261 }
1262 }),
1263 )
1264 .await;
1265
1266 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1267 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1268 (project.languages().clone(), project.lsp_store())
1269 });
1270 language_registry.add(rust_lang());
1271 let mut fake_servers = language_registry.register_fake_lsp(
1272 "Rust",
1273 FakeLspAdapter {
1274 name: "the-language-server",
1275 ..Default::default()
1276 },
1277 );
1278
1279 cx.executor().run_until_parked();
1280
1281 // Start the language server by opening a buffer with a compatible file extension.
1282 project
1283 .update(cx, |project, cx| {
1284 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1285 })
1286 .await
1287 .unwrap();
1288
1289 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1290 project.update(cx, |project, cx| {
1291 let worktree = project.worktrees(cx).next().unwrap();
1292 assert_eq!(
1293 worktree
1294 .read(cx)
1295 .snapshot()
1296 .entries(true, 0)
1297 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1298 .collect::<Vec<_>>(),
1299 &[
1300 (Path::new(""), false),
1301 (Path::new(".gitignore"), false),
1302 (Path::new("Cargo.lock"), false),
1303 (Path::new("src"), false),
1304 (Path::new("src/a.rs"), false),
1305 (Path::new("src/b.rs"), false),
1306 (Path::new("target"), true),
1307 ]
1308 );
1309 });
1310
1311 let prev_read_dir_count = fs.read_dir_call_count();
1312
1313 let fake_server = fake_servers.next().await.unwrap();
1314 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1315 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1316 id
1317 });
1318
1319 // Simulate jumping to a definition in a dependency outside of the worktree.
1320 let _out_of_worktree_buffer = project
1321 .update(cx, |project, cx| {
1322 project.open_local_buffer_via_lsp(
1323 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1324 server_id,
1325 cx,
1326 )
1327 })
1328 .await
1329 .unwrap();
1330
1331 // Keep track of the FS events reported to the language server.
1332 let file_changes = Arc::new(Mutex::new(Vec::new()));
1333 fake_server
1334 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1335 registrations: vec![lsp::Registration {
1336 id: Default::default(),
1337 method: "workspace/didChangeWatchedFiles".to_string(),
1338 register_options: serde_json::to_value(
1339 lsp::DidChangeWatchedFilesRegistrationOptions {
1340 watchers: vec![
1341 lsp::FileSystemWatcher {
1342 glob_pattern: lsp::GlobPattern::String(
1343 path!("/the-root/Cargo.toml").to_string(),
1344 ),
1345 kind: None,
1346 },
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/src/*.{rs,c}").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/target/y/**/*.rs").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the/stdlib/src/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("**/Cargo.lock").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 ],
1372 },
1373 )
1374 .ok(),
1375 }],
1376 })
1377 .await
1378 .into_response()
1379 .unwrap();
1380 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1381 let file_changes = file_changes.clone();
1382 move |params, _| {
1383 let mut file_changes = file_changes.lock();
1384 file_changes.extend(params.changes);
1385 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1386 }
1387 });
1388
1389 cx.executor().run_until_parked();
1390 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1391 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1392
1393 let mut new_watched_paths = fs.watched_paths();
1394 new_watched_paths.retain(|path| {
1395 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1396 });
1397 assert_eq!(
1398 &new_watched_paths,
1399 &[
1400 Path::new(path!("/the-root")),
1401 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1402 Path::new(path!("/the/stdlib/src"))
1403 ]
1404 );
1405
1406 // Now the language server has asked us to watch an ignored directory path,
1407 // so we recursively load it.
1408 project.update(cx, |project, cx| {
1409 let worktree = project.visible_worktrees(cx).next().unwrap();
1410 assert_eq!(
1411 worktree
1412 .read(cx)
1413 .snapshot()
1414 .entries(true, 0)
1415 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1416 .collect::<Vec<_>>(),
1417 &[
1418 (Path::new(""), false),
1419 (Path::new(".gitignore"), false),
1420 (Path::new("Cargo.lock"), false),
1421 (Path::new("src"), false),
1422 (Path::new("src/a.rs"), false),
1423 (Path::new("src/b.rs"), false),
1424 (Path::new("target"), true),
1425 (Path::new("target/x"), true),
1426 (Path::new("target/y"), true),
1427 (Path::new("target/y/out"), true),
1428 (Path::new("target/y/out/y.rs"), true),
1429 (Path::new("target/z"), true),
1430 ]
1431 );
1432 });
1433
1434 // Perform some file system mutations, two of which match the watched patterns,
1435 // and one of which does not.
1436 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1437 .await
1438 .unwrap();
1439 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1440 .await
1441 .unwrap();
1442 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(
1446 path!("/the-root/target/x/out/x2.rs").as_ref(),
1447 Default::default(),
1448 )
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/y/out/y2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.save(
1458 path!("/the-root/Cargo.lock").as_ref(),
1459 &"".into(),
1460 Default::default(),
1461 )
1462 .await
1463 .unwrap();
1464 fs.save(
1465 path!("/the-stdlib/LICENSE").as_ref(),
1466 &"".into(),
1467 Default::default(),
1468 )
1469 .await
1470 .unwrap();
1471 fs.save(
1472 path!("/the/stdlib/src/string.rs").as_ref(),
1473 &"".into(),
1474 Default::default(),
1475 )
1476 .await
1477 .unwrap();
1478
1479 // The language server receives events for the FS mutations that match its watch patterns.
1480 cx.executor().run_until_parked();
1481 assert_eq!(
1482 &*file_changes.lock(),
1483 &[
1484 lsp::FileEvent {
1485 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1486 typ: lsp::FileChangeType::CHANGED,
1487 },
1488 lsp::FileEvent {
1489 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1490 typ: lsp::FileChangeType::DELETED,
1491 },
1492 lsp::FileEvent {
1493 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1494 typ: lsp::FileChangeType::CREATED,
1495 },
1496 lsp::FileEvent {
1497 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1498 typ: lsp::FileChangeType::CREATED,
1499 },
1500 lsp::FileEvent {
1501 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1502 typ: lsp::FileChangeType::CHANGED,
1503 },
1504 ]
1505 );
1506}
1507
1508#[gpui::test]
1509async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1510 init_test(cx);
1511
1512 let fs = FakeFs::new(cx.executor());
1513 fs.insert_tree(
1514 path!("/dir"),
1515 json!({
1516 "a.rs": "let a = 1;",
1517 "b.rs": "let b = 2;"
1518 }),
1519 )
1520 .await;
1521
1522 let project = Project::test(
1523 fs,
1524 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1525 cx,
1526 )
1527 .await;
1528 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1529
1530 let buffer_a = project
1531 .update(cx, |project, cx| {
1532 project.open_local_buffer(path!("/dir/a.rs"), cx)
1533 })
1534 .await
1535 .unwrap();
1536 let buffer_b = project
1537 .update(cx, |project, cx| {
1538 project.open_local_buffer(path!("/dir/b.rs"), cx)
1539 })
1540 .await
1541 .unwrap();
1542
1543 lsp_store.update(cx, |lsp_store, cx| {
1544 lsp_store
1545 .update_diagnostics(
1546 LanguageServerId(0),
1547 lsp::PublishDiagnosticsParams {
1548 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1549 version: None,
1550 diagnostics: vec![lsp::Diagnostic {
1551 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1552 severity: Some(lsp::DiagnosticSeverity::ERROR),
1553 message: "error 1".to_string(),
1554 ..Default::default()
1555 }],
1556 },
1557 None,
1558 DiagnosticSourceKind::Pushed,
1559 &[],
1560 cx,
1561 )
1562 .unwrap();
1563 lsp_store
1564 .update_diagnostics(
1565 LanguageServerId(0),
1566 lsp::PublishDiagnosticsParams {
1567 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1568 version: None,
1569 diagnostics: vec![lsp::Diagnostic {
1570 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1571 severity: Some(DiagnosticSeverity::WARNING),
1572 message: "error 2".to_string(),
1573 ..Default::default()
1574 }],
1575 },
1576 None,
1577 DiagnosticSourceKind::Pushed,
1578 &[],
1579 cx,
1580 )
1581 .unwrap();
1582 });
1583
1584 buffer_a.update(cx, |buffer, _| {
1585 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1586 assert_eq!(
1587 chunks
1588 .iter()
1589 .map(|(s, d)| (s.as_str(), *d))
1590 .collect::<Vec<_>>(),
1591 &[
1592 ("let ", None),
1593 ("a", Some(DiagnosticSeverity::ERROR)),
1594 (" = 1;", None),
1595 ]
1596 );
1597 });
1598 buffer_b.update(cx, |buffer, _| {
1599 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1600 assert_eq!(
1601 chunks
1602 .iter()
1603 .map(|(s, d)| (s.as_str(), *d))
1604 .collect::<Vec<_>>(),
1605 &[
1606 ("let ", None),
1607 ("b", Some(DiagnosticSeverity::WARNING)),
1608 (" = 2;", None),
1609 ]
1610 );
1611 });
1612}
1613
1614#[gpui::test]
1615async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1616 init_test(cx);
1617
1618 let fs = FakeFs::new(cx.executor());
1619 fs.insert_tree(
1620 path!("/root"),
1621 json!({
1622 "dir": {
1623 ".git": {
1624 "HEAD": "ref: refs/heads/main",
1625 },
1626 ".gitignore": "b.rs",
1627 "a.rs": "let a = 1;",
1628 "b.rs": "let b = 2;",
1629 },
1630 "other.rs": "let b = c;"
1631 }),
1632 )
1633 .await;
1634
1635 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1636 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1637 let (worktree, _) = project
1638 .update(cx, |project, cx| {
1639 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1640 })
1641 .await
1642 .unwrap();
1643 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1644
1645 let (worktree, _) = project
1646 .update(cx, |project, cx| {
1647 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1648 })
1649 .await
1650 .unwrap();
1651 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1652
1653 let server_id = LanguageServerId(0);
1654 lsp_store.update(cx, |lsp_store, cx| {
1655 lsp_store
1656 .update_diagnostics(
1657 server_id,
1658 lsp::PublishDiagnosticsParams {
1659 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1660 version: None,
1661 diagnostics: vec![lsp::Diagnostic {
1662 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1663 severity: Some(lsp::DiagnosticSeverity::ERROR),
1664 message: "unused variable 'b'".to_string(),
1665 ..Default::default()
1666 }],
1667 },
1668 None,
1669 DiagnosticSourceKind::Pushed,
1670 &[],
1671 cx,
1672 )
1673 .unwrap();
1674 lsp_store
1675 .update_diagnostics(
1676 server_id,
1677 lsp::PublishDiagnosticsParams {
1678 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1679 version: None,
1680 diagnostics: vec![lsp::Diagnostic {
1681 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1682 severity: Some(lsp::DiagnosticSeverity::ERROR),
1683 message: "unknown variable 'c'".to_string(),
1684 ..Default::default()
1685 }],
1686 },
1687 None,
1688 DiagnosticSourceKind::Pushed,
1689 &[],
1690 cx,
1691 )
1692 .unwrap();
1693 });
1694
1695 let main_ignored_buffer = project
1696 .update(cx, |project, cx| {
1697 project.open_buffer((main_worktree_id, "b.rs"), cx)
1698 })
1699 .await
1700 .unwrap();
1701 main_ignored_buffer.update(cx, |buffer, _| {
1702 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1703 assert_eq!(
1704 chunks
1705 .iter()
1706 .map(|(s, d)| (s.as_str(), *d))
1707 .collect::<Vec<_>>(),
1708 &[
1709 ("let ", None),
1710 ("b", Some(DiagnosticSeverity::ERROR)),
1711 (" = 2;", None),
1712 ],
1713 "Gigitnored buffers should still get in-buffer diagnostics",
1714 );
1715 });
1716 let other_buffer = project
1717 .update(cx, |project, cx| {
1718 project.open_buffer((other_worktree_id, ""), cx)
1719 })
1720 .await
1721 .unwrap();
1722 other_buffer.update(cx, |buffer, _| {
1723 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1724 assert_eq!(
1725 chunks
1726 .iter()
1727 .map(|(s, d)| (s.as_str(), *d))
1728 .collect::<Vec<_>>(),
1729 &[
1730 ("let b = ", None),
1731 ("c", Some(DiagnosticSeverity::ERROR)),
1732 (";", None),
1733 ],
1734 "Buffers from hidden projects should still get in-buffer diagnostics"
1735 );
1736 });
1737
1738 project.update(cx, |project, cx| {
1739 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1740 assert_eq!(
1741 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1742 vec![(
1743 ProjectPath {
1744 worktree_id: main_worktree_id,
1745 path: Arc::from(Path::new("b.rs")),
1746 },
1747 server_id,
1748 DiagnosticSummary {
1749 error_count: 1,
1750 warning_count: 0,
1751 }
1752 )]
1753 );
1754 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1755 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1756 });
1757}
1758
1759#[gpui::test]
1760async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1761 init_test(cx);
1762
1763 let progress_token = "the-progress-token";
1764
1765 let fs = FakeFs::new(cx.executor());
1766 fs.insert_tree(
1767 path!("/dir"),
1768 json!({
1769 "a.rs": "fn a() { A }",
1770 "b.rs": "const y: i32 = 1",
1771 }),
1772 )
1773 .await;
1774
1775 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1776 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1777
1778 language_registry.add(rust_lang());
1779 let mut fake_servers = language_registry.register_fake_lsp(
1780 "Rust",
1781 FakeLspAdapter {
1782 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1783 disk_based_diagnostics_sources: vec!["disk".into()],
1784 ..Default::default()
1785 },
1786 );
1787
1788 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1789
1790 // Cause worktree to start the fake language server
1791 let _ = project
1792 .update(cx, |project, cx| {
1793 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1794 })
1795 .await
1796 .unwrap();
1797
1798 let mut events = cx.events(&project);
1799
1800 let fake_server = fake_servers.next().await.unwrap();
1801 assert_eq!(
1802 events.next().await.unwrap(),
1803 Event::LanguageServerAdded(
1804 LanguageServerId(0),
1805 fake_server.server.name(),
1806 Some(worktree_id)
1807 ),
1808 );
1809
1810 fake_server
1811 .start_progress(format!("{}/0", progress_token))
1812 .await;
1813 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1814 assert_eq!(
1815 events.next().await.unwrap(),
1816 Event::DiskBasedDiagnosticsStarted {
1817 language_server_id: LanguageServerId(0),
1818 }
1819 );
1820
1821 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1822 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1823 version: None,
1824 diagnostics: vec![lsp::Diagnostic {
1825 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1826 severity: Some(lsp::DiagnosticSeverity::ERROR),
1827 message: "undefined variable 'A'".to_string(),
1828 ..Default::default()
1829 }],
1830 });
1831 assert_eq!(
1832 events.next().await.unwrap(),
1833 Event::DiagnosticsUpdated {
1834 language_server_id: LanguageServerId(0),
1835 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1836 }
1837 );
1838
1839 fake_server.end_progress(format!("{}/0", progress_token));
1840 assert_eq!(
1841 events.next().await.unwrap(),
1842 Event::DiskBasedDiagnosticsFinished {
1843 language_server_id: LanguageServerId(0)
1844 }
1845 );
1846
1847 let buffer = project
1848 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1849 .await
1850 .unwrap();
1851
1852 buffer.update(cx, |buffer, _| {
1853 let snapshot = buffer.snapshot();
1854 let diagnostics = snapshot
1855 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1856 .collect::<Vec<_>>();
1857 assert_eq!(
1858 diagnostics,
1859 &[DiagnosticEntry {
1860 range: Point::new(0, 9)..Point::new(0, 10),
1861 diagnostic: Diagnostic {
1862 severity: lsp::DiagnosticSeverity::ERROR,
1863 message: "undefined variable 'A'".to_string(),
1864 group_id: 0,
1865 is_primary: true,
1866 source_kind: DiagnosticSourceKind::Pushed,
1867 ..Diagnostic::default()
1868 }
1869 }]
1870 )
1871 });
1872
1873 // Ensure publishing empty diagnostics twice only results in one update event.
1874 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1875 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1876 version: None,
1877 diagnostics: Default::default(),
1878 });
1879 assert_eq!(
1880 events.next().await.unwrap(),
1881 Event::DiagnosticsUpdated {
1882 language_server_id: LanguageServerId(0),
1883 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1884 }
1885 );
1886
1887 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1888 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1889 version: None,
1890 diagnostics: Default::default(),
1891 });
1892 cx.executor().run_until_parked();
1893 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1894}
1895
1896#[gpui::test]
1897async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1898 init_test(cx);
1899
1900 let progress_token = "the-progress-token";
1901
1902 let fs = FakeFs::new(cx.executor());
1903 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1904
1905 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1906
1907 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1908 language_registry.add(rust_lang());
1909 let mut fake_servers = language_registry.register_fake_lsp(
1910 "Rust",
1911 FakeLspAdapter {
1912 name: "the-language-server",
1913 disk_based_diagnostics_sources: vec!["disk".into()],
1914 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1915 ..FakeLspAdapter::default()
1916 },
1917 );
1918
1919 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1920
1921 let (buffer, _handle) = project
1922 .update(cx, |project, cx| {
1923 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1924 })
1925 .await
1926 .unwrap();
1927 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1928 // Simulate diagnostics starting to update.
1929 let fake_server = fake_servers.next().await.unwrap();
1930 fake_server.start_progress(progress_token).await;
1931
1932 // Restart the server before the diagnostics finish updating.
1933 project.update(cx, |project, cx| {
1934 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1935 });
1936 let mut events = cx.events(&project);
1937
1938 // Simulate the newly started server sending more diagnostics.
1939 let fake_server = fake_servers.next().await.unwrap();
1940 assert_eq!(
1941 events.next().await.unwrap(),
1942 Event::LanguageServerRemoved(LanguageServerId(0))
1943 );
1944 assert_eq!(
1945 events.next().await.unwrap(),
1946 Event::LanguageServerAdded(
1947 LanguageServerId(1),
1948 fake_server.server.name(),
1949 Some(worktree_id)
1950 )
1951 );
1952 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1953 fake_server.start_progress(progress_token).await;
1954 assert_eq!(
1955 events.next().await.unwrap(),
1956 Event::LanguageServerBufferRegistered {
1957 server_id: LanguageServerId(1),
1958 buffer_id,
1959 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1960 name: Some(fake_server.server.name())
1961 }
1962 );
1963 assert_eq!(
1964 events.next().await.unwrap(),
1965 Event::DiskBasedDiagnosticsStarted {
1966 language_server_id: LanguageServerId(1)
1967 }
1968 );
1969 project.update(cx, |project, cx| {
1970 assert_eq!(
1971 project
1972 .language_servers_running_disk_based_diagnostics(cx)
1973 .collect::<Vec<_>>(),
1974 [LanguageServerId(1)]
1975 );
1976 });
1977
1978 // All diagnostics are considered done, despite the old server's diagnostic
1979 // task never completing.
1980 fake_server.end_progress(progress_token);
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiskBasedDiagnosticsFinished {
1984 language_server_id: LanguageServerId(1)
1985 }
1986 );
1987 project.update(cx, |project, cx| {
1988 assert_eq!(
1989 project
1990 .language_servers_running_disk_based_diagnostics(cx)
1991 .collect::<Vec<_>>(),
1992 [] as [language::LanguageServerId; 0]
1993 );
1994 });
1995}
1996
1997#[gpui::test]
1998async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1999 init_test(cx);
2000
2001 let fs = FakeFs::new(cx.executor());
2002 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2003
2004 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2005
2006 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2007 language_registry.add(rust_lang());
2008 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2009
2010 let (buffer, _) = project
2011 .update(cx, |project, cx| {
2012 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2013 })
2014 .await
2015 .unwrap();
2016
2017 // Publish diagnostics
2018 let fake_server = fake_servers.next().await.unwrap();
2019 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2020 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2021 version: None,
2022 diagnostics: vec![lsp::Diagnostic {
2023 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2024 severity: Some(lsp::DiagnosticSeverity::ERROR),
2025 message: "the message".to_string(),
2026 ..Default::default()
2027 }],
2028 });
2029
2030 cx.executor().run_until_parked();
2031 buffer.update(cx, |buffer, _| {
2032 assert_eq!(
2033 buffer
2034 .snapshot()
2035 .diagnostics_in_range::<_, usize>(0..1, false)
2036 .map(|entry| entry.diagnostic.message)
2037 .collect::<Vec<_>>(),
2038 ["the message".to_string()]
2039 );
2040 });
2041 project.update(cx, |project, cx| {
2042 assert_eq!(
2043 project.diagnostic_summary(false, cx),
2044 DiagnosticSummary {
2045 error_count: 1,
2046 warning_count: 0,
2047 }
2048 );
2049 });
2050
2051 project.update(cx, |project, cx| {
2052 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2053 });
2054
2055 // The diagnostics are cleared.
2056 cx.executor().run_until_parked();
2057 buffer.update(cx, |buffer, _| {
2058 assert_eq!(
2059 buffer
2060 .snapshot()
2061 .diagnostics_in_range::<_, usize>(0..1, false)
2062 .map(|entry| entry.diagnostic.message)
2063 .collect::<Vec<_>>(),
2064 Vec::<String>::new(),
2065 );
2066 });
2067 project.update(cx, |project, cx| {
2068 assert_eq!(
2069 project.diagnostic_summary(false, cx),
2070 DiagnosticSummary {
2071 error_count: 0,
2072 warning_count: 0,
2073 }
2074 );
2075 });
2076}
2077
2078#[gpui::test]
2079async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2080 init_test(cx);
2081
2082 let fs = FakeFs::new(cx.executor());
2083 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2084
2085 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2086 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2087
2088 language_registry.add(rust_lang());
2089 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2090
2091 let (buffer, _handle) = project
2092 .update(cx, |project, cx| {
2093 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2094 })
2095 .await
2096 .unwrap();
2097
2098 // Before restarting the server, report diagnostics with an unknown buffer version.
2099 let fake_server = fake_servers.next().await.unwrap();
2100 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2101 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2102 version: Some(10000),
2103 diagnostics: Vec::new(),
2104 });
2105 cx.executor().run_until_parked();
2106 project.update(cx, |project, cx| {
2107 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2108 });
2109
2110 let mut fake_server = fake_servers.next().await.unwrap();
2111 let notification = fake_server
2112 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2113 .await
2114 .text_document;
2115 assert_eq!(notification.version, 0);
2116}
2117
2118#[gpui::test]
2119async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2120 init_test(cx);
2121
2122 let progress_token = "the-progress-token";
2123
2124 let fs = FakeFs::new(cx.executor());
2125 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2126
2127 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2128
2129 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2130 language_registry.add(rust_lang());
2131 let mut fake_servers = language_registry.register_fake_lsp(
2132 "Rust",
2133 FakeLspAdapter {
2134 name: "the-language-server",
2135 disk_based_diagnostics_sources: vec!["disk".into()],
2136 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2137 ..Default::default()
2138 },
2139 );
2140
2141 let (buffer, _handle) = project
2142 .update(cx, |project, cx| {
2143 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2144 })
2145 .await
2146 .unwrap();
2147
2148 // Simulate diagnostics starting to update.
2149 let mut fake_server = fake_servers.next().await.unwrap();
2150 fake_server
2151 .start_progress_with(
2152 "another-token",
2153 lsp::WorkDoneProgressBegin {
2154 cancellable: Some(false),
2155 ..Default::default()
2156 },
2157 )
2158 .await;
2159 fake_server
2160 .start_progress_with(
2161 progress_token,
2162 lsp::WorkDoneProgressBegin {
2163 cancellable: Some(true),
2164 ..Default::default()
2165 },
2166 )
2167 .await;
2168 cx.executor().run_until_parked();
2169
2170 project.update(cx, |project, cx| {
2171 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2172 });
2173
2174 let cancel_notification = fake_server
2175 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2176 .await;
2177 assert_eq!(
2178 cancel_notification.token,
2179 NumberOrString::String(progress_token.into())
2180 );
2181}
2182
2183#[gpui::test]
2184async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2185 init_test(cx);
2186
2187 let fs = FakeFs::new(cx.executor());
2188 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2189 .await;
2190
2191 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2192 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2193
2194 let mut fake_rust_servers = language_registry.register_fake_lsp(
2195 "Rust",
2196 FakeLspAdapter {
2197 name: "rust-lsp",
2198 ..Default::default()
2199 },
2200 );
2201 let mut fake_js_servers = language_registry.register_fake_lsp(
2202 "JavaScript",
2203 FakeLspAdapter {
2204 name: "js-lsp",
2205 ..Default::default()
2206 },
2207 );
2208 language_registry.add(rust_lang());
2209 language_registry.add(js_lang());
2210
2211 let _rs_buffer = project
2212 .update(cx, |project, cx| {
2213 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2214 })
2215 .await
2216 .unwrap();
2217 let _js_buffer = project
2218 .update(cx, |project, cx| {
2219 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2220 })
2221 .await
2222 .unwrap();
2223
2224 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2225 assert_eq!(
2226 fake_rust_server_1
2227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2228 .await
2229 .text_document
2230 .uri
2231 .as_str(),
2232 uri!("file:///dir/a.rs")
2233 );
2234
2235 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2236 assert_eq!(
2237 fake_js_server
2238 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2239 .await
2240 .text_document
2241 .uri
2242 .as_str(),
2243 uri!("file:///dir/b.js")
2244 );
2245
2246 // Disable Rust language server, ensuring only that server gets stopped.
2247 cx.update(|cx| {
2248 SettingsStore::update_global(cx, |settings, cx| {
2249 settings.update_user_settings(cx, |settings| {
2250 settings.languages_mut().insert(
2251 "Rust".into(),
2252 LanguageSettingsContent {
2253 enable_language_server: Some(false),
2254 ..Default::default()
2255 },
2256 );
2257 });
2258 })
2259 });
2260 fake_rust_server_1
2261 .receive_notification::<lsp::notification::Exit>()
2262 .await;
2263
2264 // Enable Rust and disable JavaScript language servers, ensuring that the
2265 // former gets started again and that the latter stops.
2266 cx.update(|cx| {
2267 SettingsStore::update_global(cx, |settings, cx| {
2268 settings.update_user_settings(cx, |settings| {
2269 settings.languages_mut().insert(
2270 "Rust".into(),
2271 LanguageSettingsContent {
2272 enable_language_server: Some(true),
2273 ..Default::default()
2274 },
2275 );
2276 settings.languages_mut().insert(
2277 "JavaScript".into(),
2278 LanguageSettingsContent {
2279 enable_language_server: Some(false),
2280 ..Default::default()
2281 },
2282 );
2283 });
2284 })
2285 });
2286 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2287 assert_eq!(
2288 fake_rust_server_2
2289 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2290 .await
2291 .text_document
2292 .uri
2293 .as_str(),
2294 uri!("file:///dir/a.rs")
2295 );
2296 fake_js_server
2297 .receive_notification::<lsp::notification::Exit>()
2298 .await;
2299}
2300
2301#[gpui::test(iterations = 3)]
2302async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2303 init_test(cx);
2304
2305 let text = "
2306 fn a() { A }
2307 fn b() { BB }
2308 fn c() { CCC }
2309 "
2310 .unindent();
2311
2312 let fs = FakeFs::new(cx.executor());
2313 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2314
2315 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2316 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2317
2318 language_registry.add(rust_lang());
2319 let mut fake_servers = language_registry.register_fake_lsp(
2320 "Rust",
2321 FakeLspAdapter {
2322 disk_based_diagnostics_sources: vec!["disk".into()],
2323 ..Default::default()
2324 },
2325 );
2326
2327 let buffer = project
2328 .update(cx, |project, cx| {
2329 project.open_local_buffer(path!("/dir/a.rs"), cx)
2330 })
2331 .await
2332 .unwrap();
2333
2334 let _handle = project.update(cx, |project, cx| {
2335 project.register_buffer_with_language_servers(&buffer, cx)
2336 });
2337
2338 let mut fake_server = fake_servers.next().await.unwrap();
2339 let open_notification = fake_server
2340 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2341 .await;
2342
2343 // Edit the buffer, moving the content down
2344 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2345 let change_notification_1 = fake_server
2346 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2347 .await;
2348 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2349
2350 // Report some diagnostics for the initial version of the buffer
2351 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2352 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2353 version: Some(open_notification.text_document.version),
2354 diagnostics: vec![
2355 lsp::Diagnostic {
2356 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2357 severity: Some(DiagnosticSeverity::ERROR),
2358 message: "undefined variable 'A'".to_string(),
2359 source: Some("disk".to_string()),
2360 ..Default::default()
2361 },
2362 lsp::Diagnostic {
2363 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2364 severity: Some(DiagnosticSeverity::ERROR),
2365 message: "undefined variable 'BB'".to_string(),
2366 source: Some("disk".to_string()),
2367 ..Default::default()
2368 },
2369 lsp::Diagnostic {
2370 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2371 severity: Some(DiagnosticSeverity::ERROR),
2372 source: Some("disk".to_string()),
2373 message: "undefined variable 'CCC'".to_string(),
2374 ..Default::default()
2375 },
2376 ],
2377 });
2378
2379 // The diagnostics have moved down since they were created.
2380 cx.executor().run_until_parked();
2381 buffer.update(cx, |buffer, _| {
2382 assert_eq!(
2383 buffer
2384 .snapshot()
2385 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2386 .collect::<Vec<_>>(),
2387 &[
2388 DiagnosticEntry {
2389 range: Point::new(3, 9)..Point::new(3, 11),
2390 diagnostic: Diagnostic {
2391 source: Some("disk".into()),
2392 severity: DiagnosticSeverity::ERROR,
2393 message: "undefined variable 'BB'".to_string(),
2394 is_disk_based: true,
2395 group_id: 1,
2396 is_primary: true,
2397 source_kind: DiagnosticSourceKind::Pushed,
2398 ..Diagnostic::default()
2399 },
2400 },
2401 DiagnosticEntry {
2402 range: Point::new(4, 9)..Point::new(4, 12),
2403 diagnostic: Diagnostic {
2404 source: Some("disk".into()),
2405 severity: DiagnosticSeverity::ERROR,
2406 message: "undefined variable 'CCC'".to_string(),
2407 is_disk_based: true,
2408 group_id: 2,
2409 is_primary: true,
2410 source_kind: DiagnosticSourceKind::Pushed,
2411 ..Diagnostic::default()
2412 }
2413 }
2414 ]
2415 );
2416 assert_eq!(
2417 chunks_with_diagnostics(buffer, 0..buffer.len()),
2418 [
2419 ("\n\nfn a() { ".to_string(), None),
2420 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2421 (" }\nfn b() { ".to_string(), None),
2422 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2423 (" }\nfn c() { ".to_string(), None),
2424 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2425 (" }\n".to_string(), None),
2426 ]
2427 );
2428 assert_eq!(
2429 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2430 [
2431 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2432 (" }\nfn c() { ".to_string(), None),
2433 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2434 ]
2435 );
2436 });
2437
2438 // Ensure overlapping diagnostics are highlighted correctly.
2439 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2440 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2441 version: Some(open_notification.text_document.version),
2442 diagnostics: vec![
2443 lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2445 severity: Some(DiagnosticSeverity::ERROR),
2446 message: "undefined variable 'A'".to_string(),
2447 source: Some("disk".to_string()),
2448 ..Default::default()
2449 },
2450 lsp::Diagnostic {
2451 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2452 severity: Some(DiagnosticSeverity::WARNING),
2453 message: "unreachable statement".to_string(),
2454 source: Some("disk".to_string()),
2455 ..Default::default()
2456 },
2457 ],
2458 });
2459
2460 cx.executor().run_until_parked();
2461 buffer.update(cx, |buffer, _| {
2462 assert_eq!(
2463 buffer
2464 .snapshot()
2465 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2466 .collect::<Vec<_>>(),
2467 &[
2468 DiagnosticEntry {
2469 range: Point::new(2, 9)..Point::new(2, 12),
2470 diagnostic: Diagnostic {
2471 source: Some("disk".into()),
2472 severity: DiagnosticSeverity::WARNING,
2473 message: "unreachable statement".to_string(),
2474 is_disk_based: true,
2475 group_id: 4,
2476 is_primary: true,
2477 source_kind: DiagnosticSourceKind::Pushed,
2478 ..Diagnostic::default()
2479 }
2480 },
2481 DiagnosticEntry {
2482 range: Point::new(2, 9)..Point::new(2, 10),
2483 diagnostic: Diagnostic {
2484 source: Some("disk".into()),
2485 severity: DiagnosticSeverity::ERROR,
2486 message: "undefined variable 'A'".to_string(),
2487 is_disk_based: true,
2488 group_id: 3,
2489 is_primary: true,
2490 source_kind: DiagnosticSourceKind::Pushed,
2491 ..Diagnostic::default()
2492 },
2493 }
2494 ]
2495 );
2496 assert_eq!(
2497 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2498 [
2499 ("fn a() { ".to_string(), None),
2500 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2501 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2502 ("\n".to_string(), None),
2503 ]
2504 );
2505 assert_eq!(
2506 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2507 [
2508 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2509 ("\n".to_string(), None),
2510 ]
2511 );
2512 });
2513
2514 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2515 // changes since the last save.
2516 buffer.update(cx, |buffer, cx| {
2517 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2518 buffer.edit(
2519 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2520 None,
2521 cx,
2522 );
2523 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2524 });
2525 let change_notification_2 = fake_server
2526 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2527 .await;
2528 assert!(
2529 change_notification_2.text_document.version > change_notification_1.text_document.version
2530 );
2531
2532 // Handle out-of-order diagnostics
2533 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2534 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2535 version: Some(change_notification_2.text_document.version),
2536 diagnostics: vec![
2537 lsp::Diagnostic {
2538 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2539 severity: Some(DiagnosticSeverity::ERROR),
2540 message: "undefined variable 'BB'".to_string(),
2541 source: Some("disk".to_string()),
2542 ..Default::default()
2543 },
2544 lsp::Diagnostic {
2545 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2546 severity: Some(DiagnosticSeverity::WARNING),
2547 message: "undefined variable 'A'".to_string(),
2548 source: Some("disk".to_string()),
2549 ..Default::default()
2550 },
2551 ],
2552 });
2553
2554 cx.executor().run_until_parked();
2555 buffer.update(cx, |buffer, _| {
2556 assert_eq!(
2557 buffer
2558 .snapshot()
2559 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2560 .collect::<Vec<_>>(),
2561 &[
2562 DiagnosticEntry {
2563 range: Point::new(2, 21)..Point::new(2, 22),
2564 diagnostic: Diagnostic {
2565 source: Some("disk".into()),
2566 severity: DiagnosticSeverity::WARNING,
2567 message: "undefined variable 'A'".to_string(),
2568 is_disk_based: true,
2569 group_id: 6,
2570 is_primary: true,
2571 source_kind: DiagnosticSourceKind::Pushed,
2572 ..Diagnostic::default()
2573 }
2574 },
2575 DiagnosticEntry {
2576 range: Point::new(3, 9)..Point::new(3, 14),
2577 diagnostic: Diagnostic {
2578 source: Some("disk".into()),
2579 severity: DiagnosticSeverity::ERROR,
2580 message: "undefined variable 'BB'".to_string(),
2581 is_disk_based: true,
2582 group_id: 5,
2583 is_primary: true,
2584 source_kind: DiagnosticSourceKind::Pushed,
2585 ..Diagnostic::default()
2586 },
2587 }
2588 ]
2589 );
2590 });
2591}
2592
2593#[gpui::test]
2594async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2595 init_test(cx);
2596
2597 let text = concat!(
2598 "let one = ;\n", //
2599 "let two = \n",
2600 "let three = 3;\n",
2601 );
2602
2603 let fs = FakeFs::new(cx.executor());
2604 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2605
2606 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2607 let buffer = project
2608 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2609 .await
2610 .unwrap();
2611
2612 project.update(cx, |project, cx| {
2613 project.lsp_store.update(cx, |lsp_store, cx| {
2614 lsp_store
2615 .update_diagnostic_entries(
2616 LanguageServerId(0),
2617 PathBuf::from("/dir/a.rs"),
2618 None,
2619 None,
2620 vec![
2621 DiagnosticEntry {
2622 range: Unclipped(PointUtf16::new(0, 10))
2623 ..Unclipped(PointUtf16::new(0, 10)),
2624 diagnostic: Diagnostic {
2625 severity: DiagnosticSeverity::ERROR,
2626 message: "syntax error 1".to_string(),
2627 source_kind: DiagnosticSourceKind::Pushed,
2628 ..Diagnostic::default()
2629 },
2630 },
2631 DiagnosticEntry {
2632 range: Unclipped(PointUtf16::new(1, 10))
2633 ..Unclipped(PointUtf16::new(1, 10)),
2634 diagnostic: Diagnostic {
2635 severity: DiagnosticSeverity::ERROR,
2636 message: "syntax error 2".to_string(),
2637 source_kind: DiagnosticSourceKind::Pushed,
2638 ..Diagnostic::default()
2639 },
2640 },
2641 ],
2642 cx,
2643 )
2644 .unwrap();
2645 })
2646 });
2647
2648 // An empty range is extended forward to include the following character.
2649 // At the end of a line, an empty range is extended backward to include
2650 // the preceding character.
2651 buffer.update(cx, |buffer, _| {
2652 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2653 assert_eq!(
2654 chunks
2655 .iter()
2656 .map(|(s, d)| (s.as_str(), *d))
2657 .collect::<Vec<_>>(),
2658 &[
2659 ("let one = ", None),
2660 (";", Some(DiagnosticSeverity::ERROR)),
2661 ("\nlet two =", None),
2662 (" ", Some(DiagnosticSeverity::ERROR)),
2663 ("\nlet three = 3;\n", None)
2664 ]
2665 );
2666 });
2667}
2668
2669#[gpui::test]
2670async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2671 init_test(cx);
2672
2673 let fs = FakeFs::new(cx.executor());
2674 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2675 .await;
2676
2677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2678 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2679
2680 lsp_store.update(cx, |lsp_store, cx| {
2681 lsp_store
2682 .update_diagnostic_entries(
2683 LanguageServerId(0),
2684 Path::new("/dir/a.rs").to_owned(),
2685 None,
2686 None,
2687 vec![DiagnosticEntry {
2688 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2689 diagnostic: Diagnostic {
2690 severity: DiagnosticSeverity::ERROR,
2691 is_primary: true,
2692 message: "syntax error a1".to_string(),
2693 source_kind: DiagnosticSourceKind::Pushed,
2694 ..Diagnostic::default()
2695 },
2696 }],
2697 cx,
2698 )
2699 .unwrap();
2700 lsp_store
2701 .update_diagnostic_entries(
2702 LanguageServerId(1),
2703 Path::new("/dir/a.rs").to_owned(),
2704 None,
2705 None,
2706 vec![DiagnosticEntry {
2707 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2708 diagnostic: Diagnostic {
2709 severity: DiagnosticSeverity::ERROR,
2710 is_primary: true,
2711 message: "syntax error b1".to_string(),
2712 source_kind: DiagnosticSourceKind::Pushed,
2713 ..Diagnostic::default()
2714 },
2715 }],
2716 cx,
2717 )
2718 .unwrap();
2719
2720 assert_eq!(
2721 lsp_store.diagnostic_summary(false, cx),
2722 DiagnosticSummary {
2723 error_count: 2,
2724 warning_count: 0,
2725 }
2726 );
2727 });
2728}
2729
2730#[gpui::test]
2731async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2732 init_test(cx);
2733
2734 let text = "
2735 fn a() {
2736 f1();
2737 }
2738 fn b() {
2739 f2();
2740 }
2741 fn c() {
2742 f3();
2743 }
2744 "
2745 .unindent();
2746
2747 let fs = FakeFs::new(cx.executor());
2748 fs.insert_tree(
2749 path!("/dir"),
2750 json!({
2751 "a.rs": text.clone(),
2752 }),
2753 )
2754 .await;
2755
2756 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2757 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2758
2759 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2760 language_registry.add(rust_lang());
2761 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2762
2763 let (buffer, _handle) = project
2764 .update(cx, |project, cx| {
2765 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2766 })
2767 .await
2768 .unwrap();
2769
2770 let mut fake_server = fake_servers.next().await.unwrap();
2771 let lsp_document_version = fake_server
2772 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2773 .await
2774 .text_document
2775 .version;
2776
2777 // Simulate editing the buffer after the language server computes some edits.
2778 buffer.update(cx, |buffer, cx| {
2779 buffer.edit(
2780 [(
2781 Point::new(0, 0)..Point::new(0, 0),
2782 "// above first function\n",
2783 )],
2784 None,
2785 cx,
2786 );
2787 buffer.edit(
2788 [(
2789 Point::new(2, 0)..Point::new(2, 0),
2790 " // inside first function\n",
2791 )],
2792 None,
2793 cx,
2794 );
2795 buffer.edit(
2796 [(
2797 Point::new(6, 4)..Point::new(6, 4),
2798 "// inside second function ",
2799 )],
2800 None,
2801 cx,
2802 );
2803
2804 assert_eq!(
2805 buffer.text(),
2806 "
2807 // above first function
2808 fn a() {
2809 // inside first function
2810 f1();
2811 }
2812 fn b() {
2813 // inside second function f2();
2814 }
2815 fn c() {
2816 f3();
2817 }
2818 "
2819 .unindent()
2820 );
2821 });
2822
2823 let edits = lsp_store
2824 .update(cx, |lsp_store, cx| {
2825 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2826 &buffer,
2827 vec![
2828 // replace body of first function
2829 lsp::TextEdit {
2830 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2831 new_text: "
2832 fn a() {
2833 f10();
2834 }
2835 "
2836 .unindent(),
2837 },
2838 // edit inside second function
2839 lsp::TextEdit {
2840 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2841 new_text: "00".into(),
2842 },
2843 // edit inside third function via two distinct edits
2844 lsp::TextEdit {
2845 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2846 new_text: "4000".into(),
2847 },
2848 lsp::TextEdit {
2849 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2850 new_text: "".into(),
2851 },
2852 ],
2853 LanguageServerId(0),
2854 Some(lsp_document_version),
2855 cx,
2856 )
2857 })
2858 .await
2859 .unwrap();
2860
2861 buffer.update(cx, |buffer, cx| {
2862 for (range, new_text) in edits {
2863 buffer.edit([(range, new_text)], None, cx);
2864 }
2865 assert_eq!(
2866 buffer.text(),
2867 "
2868 // above first function
2869 fn a() {
2870 // inside first function
2871 f10();
2872 }
2873 fn b() {
2874 // inside second function f200();
2875 }
2876 fn c() {
2877 f4000();
2878 }
2879 "
2880 .unindent()
2881 );
2882 });
2883}
2884
2885#[gpui::test]
2886async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2887 init_test(cx);
2888
2889 let text = "
2890 use a::b;
2891 use a::c;
2892
2893 fn f() {
2894 b();
2895 c();
2896 }
2897 "
2898 .unindent();
2899
2900 let fs = FakeFs::new(cx.executor());
2901 fs.insert_tree(
2902 path!("/dir"),
2903 json!({
2904 "a.rs": text.clone(),
2905 }),
2906 )
2907 .await;
2908
2909 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2910 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2911 let buffer = project
2912 .update(cx, |project, cx| {
2913 project.open_local_buffer(path!("/dir/a.rs"), cx)
2914 })
2915 .await
2916 .unwrap();
2917
2918 // Simulate the language server sending us a small edit in the form of a very large diff.
2919 // Rust-analyzer does this when performing a merge-imports code action.
2920 let edits = lsp_store
2921 .update(cx, |lsp_store, cx| {
2922 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2923 &buffer,
2924 [
2925 // Replace the first use statement without editing the semicolon.
2926 lsp::TextEdit {
2927 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2928 new_text: "a::{b, c}".into(),
2929 },
2930 // Reinsert the remainder of the file between the semicolon and the final
2931 // newline of the file.
2932 lsp::TextEdit {
2933 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2934 new_text: "\n\n".into(),
2935 },
2936 lsp::TextEdit {
2937 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2938 new_text: "
2939 fn f() {
2940 b();
2941 c();
2942 }"
2943 .unindent(),
2944 },
2945 // Delete everything after the first newline of the file.
2946 lsp::TextEdit {
2947 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2948 new_text: "".into(),
2949 },
2950 ],
2951 LanguageServerId(0),
2952 None,
2953 cx,
2954 )
2955 })
2956 .await
2957 .unwrap();
2958
2959 buffer.update(cx, |buffer, cx| {
2960 let edits = edits
2961 .into_iter()
2962 .map(|(range, text)| {
2963 (
2964 range.start.to_point(buffer)..range.end.to_point(buffer),
2965 text,
2966 )
2967 })
2968 .collect::<Vec<_>>();
2969
2970 assert_eq!(
2971 edits,
2972 [
2973 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2974 (Point::new(1, 0)..Point::new(2, 0), "".into())
2975 ]
2976 );
2977
2978 for (range, new_text) in edits {
2979 buffer.edit([(range, new_text)], None, cx);
2980 }
2981 assert_eq!(
2982 buffer.text(),
2983 "
2984 use a::{b, c};
2985
2986 fn f() {
2987 b();
2988 c();
2989 }
2990 "
2991 .unindent()
2992 );
2993 });
2994}
2995
2996#[gpui::test]
2997async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2998 cx: &mut gpui::TestAppContext,
2999) {
3000 init_test(cx);
3001
3002 let text = "Path()";
3003
3004 let fs = FakeFs::new(cx.executor());
3005 fs.insert_tree(
3006 path!("/dir"),
3007 json!({
3008 "a.rs": text
3009 }),
3010 )
3011 .await;
3012
3013 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3014 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3015 let buffer = project
3016 .update(cx, |project, cx| {
3017 project.open_local_buffer(path!("/dir/a.rs"), cx)
3018 })
3019 .await
3020 .unwrap();
3021
3022 // Simulate the language server sending us a pair of edits at the same location,
3023 // with an insertion following a replacement (which violates the LSP spec).
3024 let edits = lsp_store
3025 .update(cx, |lsp_store, cx| {
3026 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3027 &buffer,
3028 [
3029 lsp::TextEdit {
3030 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3031 new_text: "Path".into(),
3032 },
3033 lsp::TextEdit {
3034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3035 new_text: "from path import Path\n\n\n".into(),
3036 },
3037 ],
3038 LanguageServerId(0),
3039 None,
3040 cx,
3041 )
3042 })
3043 .await
3044 .unwrap();
3045
3046 buffer.update(cx, |buffer, cx| {
3047 buffer.edit(edits, None, cx);
3048 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3049 });
3050}
3051
3052#[gpui::test]
3053async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3054 init_test(cx);
3055
3056 let text = "
3057 use a::b;
3058 use a::c;
3059
3060 fn f() {
3061 b();
3062 c();
3063 }
3064 "
3065 .unindent();
3066
3067 let fs = FakeFs::new(cx.executor());
3068 fs.insert_tree(
3069 path!("/dir"),
3070 json!({
3071 "a.rs": text.clone(),
3072 }),
3073 )
3074 .await;
3075
3076 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3077 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3078 let buffer = project
3079 .update(cx, |project, cx| {
3080 project.open_local_buffer(path!("/dir/a.rs"), cx)
3081 })
3082 .await
3083 .unwrap();
3084
3085 // Simulate the language server sending us edits in a non-ordered fashion,
3086 // with ranges sometimes being inverted or pointing to invalid locations.
3087 let edits = lsp_store
3088 .update(cx, |lsp_store, cx| {
3089 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3090 &buffer,
3091 [
3092 lsp::TextEdit {
3093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3094 new_text: "\n\n".into(),
3095 },
3096 lsp::TextEdit {
3097 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3098 new_text: "a::{b, c}".into(),
3099 },
3100 lsp::TextEdit {
3101 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3102 new_text: "".into(),
3103 },
3104 lsp::TextEdit {
3105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3106 new_text: "
3107 fn f() {
3108 b();
3109 c();
3110 }"
3111 .unindent(),
3112 },
3113 ],
3114 LanguageServerId(0),
3115 None,
3116 cx,
3117 )
3118 })
3119 .await
3120 .unwrap();
3121
3122 buffer.update(cx, |buffer, cx| {
3123 let edits = edits
3124 .into_iter()
3125 .map(|(range, text)| {
3126 (
3127 range.start.to_point(buffer)..range.end.to_point(buffer),
3128 text,
3129 )
3130 })
3131 .collect::<Vec<_>>();
3132
3133 assert_eq!(
3134 edits,
3135 [
3136 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3137 (Point::new(1, 0)..Point::new(2, 0), "".into())
3138 ]
3139 );
3140
3141 for (range, new_text) in edits {
3142 buffer.edit([(range, new_text)], None, cx);
3143 }
3144 assert_eq!(
3145 buffer.text(),
3146 "
3147 use a::{b, c};
3148
3149 fn f() {
3150 b();
3151 c();
3152 }
3153 "
3154 .unindent()
3155 );
3156 });
3157}
3158
3159fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3160 buffer: &Buffer,
3161 range: Range<T>,
3162) -> Vec<(String, Option<DiagnosticSeverity>)> {
3163 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3164 for chunk in buffer.snapshot().chunks(range, true) {
3165 if chunks
3166 .last()
3167 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3168 {
3169 chunks.last_mut().unwrap().0.push_str(chunk.text);
3170 } else {
3171 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3172 }
3173 }
3174 chunks
3175}
3176
3177#[gpui::test(iterations = 10)]
3178async fn test_definition(cx: &mut gpui::TestAppContext) {
3179 init_test(cx);
3180
3181 let fs = FakeFs::new(cx.executor());
3182 fs.insert_tree(
3183 path!("/dir"),
3184 json!({
3185 "a.rs": "const fn a() { A }",
3186 "b.rs": "const y: i32 = crate::a()",
3187 }),
3188 )
3189 .await;
3190
3191 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3192
3193 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3194 language_registry.add(rust_lang());
3195 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3196
3197 let (buffer, _handle) = project
3198 .update(cx, |project, cx| {
3199 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3200 })
3201 .await
3202 .unwrap();
3203
3204 let fake_server = fake_servers.next().await.unwrap();
3205 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3206 let params = params.text_document_position_params;
3207 assert_eq!(
3208 params.text_document.uri.to_file_path().unwrap(),
3209 Path::new(path!("/dir/b.rs")),
3210 );
3211 assert_eq!(params.position, lsp::Position::new(0, 22));
3212
3213 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3214 lsp::Location::new(
3215 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3216 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3217 ),
3218 )))
3219 });
3220 let mut definitions = project
3221 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3222 .await
3223 .unwrap()
3224 .unwrap();
3225
3226 // Assert no new language server started
3227 cx.executor().run_until_parked();
3228 assert!(fake_servers.try_next().is_err());
3229
3230 assert_eq!(definitions.len(), 1);
3231 let definition = definitions.pop().unwrap();
3232 cx.update(|cx| {
3233 let target_buffer = definition.target.buffer.read(cx);
3234 assert_eq!(
3235 target_buffer
3236 .file()
3237 .unwrap()
3238 .as_local()
3239 .unwrap()
3240 .abs_path(cx),
3241 Path::new(path!("/dir/a.rs")),
3242 );
3243 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3244 assert_eq!(
3245 list_worktrees(&project, cx),
3246 [
3247 (path!("/dir/a.rs").as_ref(), false),
3248 (path!("/dir/b.rs").as_ref(), true)
3249 ],
3250 );
3251
3252 drop(definition);
3253 });
3254 cx.update(|cx| {
3255 assert_eq!(
3256 list_worktrees(&project, cx),
3257 [(path!("/dir/b.rs").as_ref(), true)]
3258 );
3259 });
3260
3261 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3262 project
3263 .read(cx)
3264 .worktrees(cx)
3265 .map(|worktree| {
3266 let worktree = worktree.read(cx);
3267 (
3268 worktree.as_local().unwrap().abs_path().as_ref(),
3269 worktree.is_visible(),
3270 )
3271 })
3272 .collect::<Vec<_>>()
3273 }
3274}
3275
3276#[gpui::test]
3277async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3278 init_test(cx);
3279
3280 let fs = FakeFs::new(cx.executor());
3281 fs.insert_tree(
3282 path!("/dir"),
3283 json!({
3284 "a.ts": "",
3285 }),
3286 )
3287 .await;
3288
3289 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3290
3291 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3292 language_registry.add(typescript_lang());
3293 let mut fake_language_servers = language_registry.register_fake_lsp(
3294 "TypeScript",
3295 FakeLspAdapter {
3296 capabilities: lsp::ServerCapabilities {
3297 completion_provider: Some(lsp::CompletionOptions {
3298 trigger_characters: Some(vec![".".to_string()]),
3299 ..Default::default()
3300 }),
3301 ..Default::default()
3302 },
3303 ..Default::default()
3304 },
3305 );
3306
3307 let (buffer, _handle) = project
3308 .update(cx, |p, cx| {
3309 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3310 })
3311 .await
3312 .unwrap();
3313
3314 let fake_server = fake_language_servers.next().await.unwrap();
3315
3316 // When text_edit exists, it takes precedence over insert_text and label
3317 let text = "let a = obj.fqn";
3318 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3319 let completions = project.update(cx, |project, cx| {
3320 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3321 });
3322
3323 fake_server
3324 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3325 Ok(Some(lsp::CompletionResponse::Array(vec![
3326 lsp::CompletionItem {
3327 label: "labelText".into(),
3328 insert_text: Some("insertText".into()),
3329 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3330 range: lsp::Range::new(
3331 lsp::Position::new(0, text.len() as u32 - 3),
3332 lsp::Position::new(0, text.len() as u32),
3333 ),
3334 new_text: "textEditText".into(),
3335 })),
3336 ..Default::default()
3337 },
3338 ])))
3339 })
3340 .next()
3341 .await;
3342
3343 let completions = completions
3344 .await
3345 .unwrap()
3346 .into_iter()
3347 .flat_map(|response| response.completions)
3348 .collect::<Vec<_>>();
3349 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3350
3351 assert_eq!(completions.len(), 1);
3352 assert_eq!(completions[0].new_text, "textEditText");
3353 assert_eq!(
3354 completions[0].replace_range.to_offset(&snapshot),
3355 text.len() - 3..text.len()
3356 );
3357}
3358
3359#[gpui::test]
3360async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3361 init_test(cx);
3362
3363 let fs = FakeFs::new(cx.executor());
3364 fs.insert_tree(
3365 path!("/dir"),
3366 json!({
3367 "a.ts": "",
3368 }),
3369 )
3370 .await;
3371
3372 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3373
3374 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3375 language_registry.add(typescript_lang());
3376 let mut fake_language_servers = language_registry.register_fake_lsp(
3377 "TypeScript",
3378 FakeLspAdapter {
3379 capabilities: lsp::ServerCapabilities {
3380 completion_provider: Some(lsp::CompletionOptions {
3381 trigger_characters: Some(vec![".".to_string()]),
3382 ..Default::default()
3383 }),
3384 ..Default::default()
3385 },
3386 ..Default::default()
3387 },
3388 );
3389
3390 let (buffer, _handle) = project
3391 .update(cx, |p, cx| {
3392 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3393 })
3394 .await
3395 .unwrap();
3396
3397 let fake_server = fake_language_servers.next().await.unwrap();
3398 let text = "let a = obj.fqn";
3399
3400 // Test 1: When text_edit is None but insert_text exists with default edit_range
3401 {
3402 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3403 let completions = project.update(cx, |project, cx| {
3404 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3405 });
3406
3407 fake_server
3408 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3409 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3410 is_incomplete: false,
3411 item_defaults: Some(lsp::CompletionListItemDefaults {
3412 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3413 lsp::Range::new(
3414 lsp::Position::new(0, text.len() as u32 - 3),
3415 lsp::Position::new(0, text.len() as u32),
3416 ),
3417 )),
3418 ..Default::default()
3419 }),
3420 items: vec![lsp::CompletionItem {
3421 label: "labelText".into(),
3422 insert_text: Some("insertText".into()),
3423 text_edit: None,
3424 ..Default::default()
3425 }],
3426 })))
3427 })
3428 .next()
3429 .await;
3430
3431 let completions = completions
3432 .await
3433 .unwrap()
3434 .into_iter()
3435 .flat_map(|response| response.completions)
3436 .collect::<Vec<_>>();
3437 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3438
3439 assert_eq!(completions.len(), 1);
3440 assert_eq!(completions[0].new_text, "insertText");
3441 assert_eq!(
3442 completions[0].replace_range.to_offset(&snapshot),
3443 text.len() - 3..text.len()
3444 );
3445 }
3446
3447 // Test 2: When both text_edit and insert_text are None with default edit_range
3448 {
3449 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3450 let completions = project.update(cx, |project, cx| {
3451 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3452 });
3453
3454 fake_server
3455 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3456 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3457 is_incomplete: false,
3458 item_defaults: Some(lsp::CompletionListItemDefaults {
3459 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3460 lsp::Range::new(
3461 lsp::Position::new(0, text.len() as u32 - 3),
3462 lsp::Position::new(0, text.len() as u32),
3463 ),
3464 )),
3465 ..Default::default()
3466 }),
3467 items: vec![lsp::CompletionItem {
3468 label: "labelText".into(),
3469 insert_text: None,
3470 text_edit: None,
3471 ..Default::default()
3472 }],
3473 })))
3474 })
3475 .next()
3476 .await;
3477
3478 let completions = completions
3479 .await
3480 .unwrap()
3481 .into_iter()
3482 .flat_map(|response| response.completions)
3483 .collect::<Vec<_>>();
3484 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3485
3486 assert_eq!(completions.len(), 1);
3487 assert_eq!(completions[0].new_text, "labelText");
3488 assert_eq!(
3489 completions[0].replace_range.to_offset(&snapshot),
3490 text.len() - 3..text.len()
3491 );
3492 }
3493}
3494
3495#[gpui::test]
3496async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3497 init_test(cx);
3498
3499 let fs = FakeFs::new(cx.executor());
3500 fs.insert_tree(
3501 path!("/dir"),
3502 json!({
3503 "a.ts": "",
3504 }),
3505 )
3506 .await;
3507
3508 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3509
3510 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3511 language_registry.add(typescript_lang());
3512 let mut fake_language_servers = language_registry.register_fake_lsp(
3513 "TypeScript",
3514 FakeLspAdapter {
3515 capabilities: lsp::ServerCapabilities {
3516 completion_provider: Some(lsp::CompletionOptions {
3517 trigger_characters: Some(vec![":".to_string()]),
3518 ..Default::default()
3519 }),
3520 ..Default::default()
3521 },
3522 ..Default::default()
3523 },
3524 );
3525
3526 let (buffer, _handle) = project
3527 .update(cx, |p, cx| {
3528 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3529 })
3530 .await
3531 .unwrap();
3532
3533 let fake_server = fake_language_servers.next().await.unwrap();
3534
3535 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3536 let text = "let a = b.fqn";
3537 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3538 let completions = project.update(cx, |project, cx| {
3539 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3540 });
3541
3542 fake_server
3543 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3544 Ok(Some(lsp::CompletionResponse::Array(vec![
3545 lsp::CompletionItem {
3546 label: "fullyQualifiedName?".into(),
3547 insert_text: Some("fullyQualifiedName".into()),
3548 ..Default::default()
3549 },
3550 ])))
3551 })
3552 .next()
3553 .await;
3554 let completions = completions
3555 .await
3556 .unwrap()
3557 .into_iter()
3558 .flat_map(|response| response.completions)
3559 .collect::<Vec<_>>();
3560 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3561 assert_eq!(completions.len(), 1);
3562 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3563 assert_eq!(
3564 completions[0].replace_range.to_offset(&snapshot),
3565 text.len() - 3..text.len()
3566 );
3567
3568 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3569 let text = "let a = \"atoms/cmp\"";
3570 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3571 let completions = project.update(cx, |project, cx| {
3572 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3573 });
3574
3575 fake_server
3576 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3577 Ok(Some(lsp::CompletionResponse::Array(vec![
3578 lsp::CompletionItem {
3579 label: "component".into(),
3580 ..Default::default()
3581 },
3582 ])))
3583 })
3584 .next()
3585 .await;
3586 let completions = completions
3587 .await
3588 .unwrap()
3589 .into_iter()
3590 .flat_map(|response| response.completions)
3591 .collect::<Vec<_>>();
3592 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3593 assert_eq!(completions.len(), 1);
3594 assert_eq!(completions[0].new_text, "component");
3595 assert_eq!(
3596 completions[0].replace_range.to_offset(&snapshot),
3597 text.len() - 4..text.len() - 1
3598 );
3599}
3600
3601#[gpui::test]
3602async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3603 init_test(cx);
3604
3605 let fs = FakeFs::new(cx.executor());
3606 fs.insert_tree(
3607 path!("/dir"),
3608 json!({
3609 "a.ts": "",
3610 }),
3611 )
3612 .await;
3613
3614 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3615
3616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3617 language_registry.add(typescript_lang());
3618 let mut fake_language_servers = language_registry.register_fake_lsp(
3619 "TypeScript",
3620 FakeLspAdapter {
3621 capabilities: lsp::ServerCapabilities {
3622 completion_provider: Some(lsp::CompletionOptions {
3623 trigger_characters: Some(vec![":".to_string()]),
3624 ..Default::default()
3625 }),
3626 ..Default::default()
3627 },
3628 ..Default::default()
3629 },
3630 );
3631
3632 let (buffer, _handle) = project
3633 .update(cx, |p, cx| {
3634 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3635 })
3636 .await
3637 .unwrap();
3638
3639 let fake_server = fake_language_servers.next().await.unwrap();
3640
3641 let text = "let a = b.fqn";
3642 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3643 let completions = project.update(cx, |project, cx| {
3644 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3645 });
3646
3647 fake_server
3648 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3649 Ok(Some(lsp::CompletionResponse::Array(vec![
3650 lsp::CompletionItem {
3651 label: "fullyQualifiedName?".into(),
3652 insert_text: Some("fully\rQualified\r\nName".into()),
3653 ..Default::default()
3654 },
3655 ])))
3656 })
3657 .next()
3658 .await;
3659 let completions = completions
3660 .await
3661 .unwrap()
3662 .into_iter()
3663 .flat_map(|response| response.completions)
3664 .collect::<Vec<_>>();
3665 assert_eq!(completions.len(), 1);
3666 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3667}
3668
3669#[gpui::test(iterations = 10)]
3670async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3671 init_test(cx);
3672
3673 let fs = FakeFs::new(cx.executor());
3674 fs.insert_tree(
3675 path!("/dir"),
3676 json!({
3677 "a.ts": "a",
3678 }),
3679 )
3680 .await;
3681
3682 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3683
3684 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3685 language_registry.add(typescript_lang());
3686 let mut fake_language_servers = language_registry.register_fake_lsp(
3687 "TypeScript",
3688 FakeLspAdapter {
3689 capabilities: lsp::ServerCapabilities {
3690 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3691 lsp::CodeActionOptions {
3692 resolve_provider: Some(true),
3693 ..lsp::CodeActionOptions::default()
3694 },
3695 )),
3696 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3697 commands: vec!["_the/command".to_string()],
3698 ..lsp::ExecuteCommandOptions::default()
3699 }),
3700 ..lsp::ServerCapabilities::default()
3701 },
3702 ..FakeLspAdapter::default()
3703 },
3704 );
3705
3706 let (buffer, _handle) = project
3707 .update(cx, |p, cx| {
3708 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3709 })
3710 .await
3711 .unwrap();
3712
3713 let fake_server = fake_language_servers.next().await.unwrap();
3714
3715 // Language server returns code actions that contain commands, and not edits.
3716 let actions = project.update(cx, |project, cx| {
3717 project.code_actions(&buffer, 0..0, None, cx)
3718 });
3719 fake_server
3720 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3721 Ok(Some(vec![
3722 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3723 title: "The code action".into(),
3724 data: Some(serde_json::json!({
3725 "command": "_the/command",
3726 })),
3727 ..lsp::CodeAction::default()
3728 }),
3729 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3730 title: "two".into(),
3731 ..lsp::CodeAction::default()
3732 }),
3733 ]))
3734 })
3735 .next()
3736 .await;
3737
3738 let action = actions.await.unwrap().unwrap()[0].clone();
3739 let apply = project.update(cx, |project, cx| {
3740 project.apply_code_action(buffer.clone(), action, true, cx)
3741 });
3742
3743 // Resolving the code action does not populate its edits. In absence of
3744 // edits, we must execute the given command.
3745 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3746 |mut action, _| async move {
3747 if action.data.is_some() {
3748 action.command = Some(lsp::Command {
3749 title: "The command".into(),
3750 command: "_the/command".into(),
3751 arguments: Some(vec![json!("the-argument")]),
3752 });
3753 }
3754 Ok(action)
3755 },
3756 );
3757
3758 // While executing the command, the language server sends the editor
3759 // a `workspaceEdit` request.
3760 fake_server
3761 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3762 let fake = fake_server.clone();
3763 move |params, _| {
3764 assert_eq!(params.command, "_the/command");
3765 let fake = fake.clone();
3766 async move {
3767 fake.server
3768 .request::<lsp::request::ApplyWorkspaceEdit>(
3769 lsp::ApplyWorkspaceEditParams {
3770 label: None,
3771 edit: lsp::WorkspaceEdit {
3772 changes: Some(
3773 [(
3774 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3775 vec![lsp::TextEdit {
3776 range: lsp::Range::new(
3777 lsp::Position::new(0, 0),
3778 lsp::Position::new(0, 0),
3779 ),
3780 new_text: "X".into(),
3781 }],
3782 )]
3783 .into_iter()
3784 .collect(),
3785 ),
3786 ..Default::default()
3787 },
3788 },
3789 )
3790 .await
3791 .into_response()
3792 .unwrap();
3793 Ok(Some(json!(null)))
3794 }
3795 }
3796 })
3797 .next()
3798 .await;
3799
3800 // Applying the code action returns a project transaction containing the edits
3801 // sent by the language server in its `workspaceEdit` request.
3802 let transaction = apply.await.unwrap();
3803 assert!(transaction.0.contains_key(&buffer));
3804 buffer.update(cx, |buffer, cx| {
3805 assert_eq!(buffer.text(), "Xa");
3806 buffer.undo(cx);
3807 assert_eq!(buffer.text(), "a");
3808 });
3809}
3810
3811#[gpui::test(iterations = 10)]
3812async fn test_save_file(cx: &mut gpui::TestAppContext) {
3813 init_test(cx);
3814
3815 let fs = FakeFs::new(cx.executor());
3816 fs.insert_tree(
3817 path!("/dir"),
3818 json!({
3819 "file1": "the old contents",
3820 }),
3821 )
3822 .await;
3823
3824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3825 let buffer = project
3826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3827 .await
3828 .unwrap();
3829 buffer.update(cx, |buffer, cx| {
3830 assert_eq!(buffer.text(), "the old contents");
3831 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3832 });
3833
3834 project
3835 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3836 .await
3837 .unwrap();
3838
3839 let new_text = fs
3840 .load(Path::new(path!("/dir/file1")))
3841 .await
3842 .unwrap()
3843 .replace("\r\n", "\n");
3844 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3845}
3846
3847#[gpui::test(iterations = 10)]
3848async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3849 // Issue: #24349
3850 init_test(cx);
3851
3852 let fs = FakeFs::new(cx.executor());
3853 fs.insert_tree(path!("/dir"), json!({})).await;
3854
3855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3856 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3857
3858 language_registry.add(rust_lang());
3859 let mut fake_rust_servers = language_registry.register_fake_lsp(
3860 "Rust",
3861 FakeLspAdapter {
3862 name: "the-rust-language-server",
3863 capabilities: lsp::ServerCapabilities {
3864 completion_provider: Some(lsp::CompletionOptions {
3865 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3866 ..Default::default()
3867 }),
3868 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3869 lsp::TextDocumentSyncOptions {
3870 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3871 ..Default::default()
3872 },
3873 )),
3874 ..Default::default()
3875 },
3876 ..Default::default()
3877 },
3878 );
3879
3880 let buffer = project
3881 .update(cx, |this, cx| this.create_buffer(false, cx))
3882 .unwrap()
3883 .await;
3884 project.update(cx, |this, cx| {
3885 this.register_buffer_with_language_servers(&buffer, cx);
3886 buffer.update(cx, |buffer, cx| {
3887 assert!(!this.has_language_servers_for(buffer, cx));
3888 })
3889 });
3890
3891 project
3892 .update(cx, |this, cx| {
3893 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3894 this.save_buffer_as(
3895 buffer.clone(),
3896 ProjectPath {
3897 worktree_id,
3898 path: Arc::from("file.rs".as_ref()),
3899 },
3900 cx,
3901 )
3902 })
3903 .await
3904 .unwrap();
3905 // A server is started up, and it is notified about Rust files.
3906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3907 assert_eq!(
3908 fake_rust_server
3909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3910 .await
3911 .text_document,
3912 lsp::TextDocumentItem {
3913 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
3914 version: 0,
3915 text: "".to_string(),
3916 language_id: "rust".to_string(),
3917 }
3918 );
3919
3920 project.update(cx, |this, cx| {
3921 buffer.update(cx, |buffer, cx| {
3922 assert!(this.has_language_servers_for(buffer, cx));
3923 })
3924 });
3925}
3926
3927#[gpui::test(iterations = 30)]
3928async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3929 init_test(cx);
3930
3931 let fs = FakeFs::new(cx.executor());
3932 fs.insert_tree(
3933 path!("/dir"),
3934 json!({
3935 "file1": "the original contents",
3936 }),
3937 )
3938 .await;
3939
3940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3941 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3942 let buffer = project
3943 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3944 .await
3945 .unwrap();
3946
3947 // Simulate buffer diffs being slow, so that they don't complete before
3948 // the next file change occurs.
3949 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3950
3951 // Change the buffer's file on disk, and then wait for the file change
3952 // to be detected by the worktree, so that the buffer starts reloading.
3953 fs.save(
3954 path!("/dir/file1").as_ref(),
3955 &"the first contents".into(),
3956 Default::default(),
3957 )
3958 .await
3959 .unwrap();
3960 worktree.next_event(cx).await;
3961
3962 // Change the buffer's file again. Depending on the random seed, the
3963 // previous file change may still be in progress.
3964 fs.save(
3965 path!("/dir/file1").as_ref(),
3966 &"the second contents".into(),
3967 Default::default(),
3968 )
3969 .await
3970 .unwrap();
3971 worktree.next_event(cx).await;
3972
3973 cx.executor().run_until_parked();
3974 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3975 buffer.read_with(cx, |buffer, _| {
3976 assert_eq!(buffer.text(), on_disk_text);
3977 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3978 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3979 });
3980}
3981
3982#[gpui::test(iterations = 30)]
3983async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3984 init_test(cx);
3985
3986 let fs = FakeFs::new(cx.executor());
3987 fs.insert_tree(
3988 path!("/dir"),
3989 json!({
3990 "file1": "the original contents",
3991 }),
3992 )
3993 .await;
3994
3995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3996 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3997 let buffer = project
3998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3999 .await
4000 .unwrap();
4001
4002 // Simulate buffer diffs being slow, so that they don't complete before
4003 // the next file change occurs.
4004 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4005
4006 // Change the buffer's file on disk, and then wait for the file change
4007 // to be detected by the worktree, so that the buffer starts reloading.
4008 fs.save(
4009 path!("/dir/file1").as_ref(),
4010 &"the first contents".into(),
4011 Default::default(),
4012 )
4013 .await
4014 .unwrap();
4015 worktree.next_event(cx).await;
4016
4017 cx.executor()
4018 .spawn(cx.executor().simulate_random_delay())
4019 .await;
4020
4021 // Perform a noop edit, causing the buffer's version to increase.
4022 buffer.update(cx, |buffer, cx| {
4023 buffer.edit([(0..0, " ")], None, cx);
4024 buffer.undo(cx);
4025 });
4026
4027 cx.executor().run_until_parked();
4028 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4029 buffer.read_with(cx, |buffer, _| {
4030 let buffer_text = buffer.text();
4031 if buffer_text == on_disk_text {
4032 assert!(
4033 !buffer.is_dirty() && !buffer.has_conflict(),
4034 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4035 );
4036 }
4037 // If the file change occurred while the buffer was processing the first
4038 // change, the buffer will be in a conflicting state.
4039 else {
4040 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4041 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4042 }
4043 });
4044}
4045
4046#[gpui::test]
4047async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4048 init_test(cx);
4049
4050 let fs = FakeFs::new(cx.executor());
4051 fs.insert_tree(
4052 path!("/dir"),
4053 json!({
4054 "file1": "the old contents",
4055 }),
4056 )
4057 .await;
4058
4059 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4060 let buffer = project
4061 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4062 .await
4063 .unwrap();
4064 buffer.update(cx, |buffer, cx| {
4065 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4066 });
4067
4068 project
4069 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4070 .await
4071 .unwrap();
4072
4073 let new_text = fs
4074 .load(Path::new(path!("/dir/file1")))
4075 .await
4076 .unwrap()
4077 .replace("\r\n", "\n");
4078 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4079}
4080
4081#[gpui::test]
4082async fn test_save_as(cx: &mut gpui::TestAppContext) {
4083 init_test(cx);
4084
4085 let fs = FakeFs::new(cx.executor());
4086 fs.insert_tree("/dir", json!({})).await;
4087
4088 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4089
4090 let languages = project.update(cx, |project, _| project.languages().clone());
4091 languages.add(rust_lang());
4092
4093 let buffer = project.update(cx, |project, cx| {
4094 project.create_local_buffer("", None, false, cx)
4095 });
4096 buffer.update(cx, |buffer, cx| {
4097 buffer.edit([(0..0, "abc")], None, cx);
4098 assert!(buffer.is_dirty());
4099 assert!(!buffer.has_conflict());
4100 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4101 });
4102 project
4103 .update(cx, |project, cx| {
4104 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4105 let path = ProjectPath {
4106 worktree_id,
4107 path: Arc::from(Path::new("file1.rs")),
4108 };
4109 project.save_buffer_as(buffer.clone(), path, cx)
4110 })
4111 .await
4112 .unwrap();
4113 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4114
4115 cx.executor().run_until_parked();
4116 buffer.update(cx, |buffer, cx| {
4117 assert_eq!(
4118 buffer.file().unwrap().full_path(cx),
4119 Path::new("dir/file1.rs")
4120 );
4121 assert!(!buffer.is_dirty());
4122 assert!(!buffer.has_conflict());
4123 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4124 });
4125
4126 let opened_buffer = project
4127 .update(cx, |project, cx| {
4128 project.open_local_buffer("/dir/file1.rs", cx)
4129 })
4130 .await
4131 .unwrap();
4132 assert_eq!(opened_buffer, buffer);
4133}
4134
4135#[gpui::test(retries = 5)]
4136async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4137 use worktree::WorktreeModelHandle as _;
4138
4139 init_test(cx);
4140 cx.executor().allow_parking();
4141
4142 let dir = TempTree::new(json!({
4143 "a": {
4144 "file1": "",
4145 "file2": "",
4146 "file3": "",
4147 },
4148 "b": {
4149 "c": {
4150 "file4": "",
4151 "file5": "",
4152 }
4153 }
4154 }));
4155
4156 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4157
4158 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4159 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4160 async move { buffer.await.unwrap() }
4161 };
4162 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4163 project.update(cx, |project, cx| {
4164 let tree = project.worktrees(cx).next().unwrap();
4165 tree.read(cx)
4166 .entry_for_path(path)
4167 .unwrap_or_else(|| panic!("no entry for path {}", path))
4168 .id
4169 })
4170 };
4171
4172 let buffer2 = buffer_for_path("a/file2", cx).await;
4173 let buffer3 = buffer_for_path("a/file3", cx).await;
4174 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4175 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4176
4177 let file2_id = id_for_path("a/file2", cx);
4178 let file3_id = id_for_path("a/file3", cx);
4179 let file4_id = id_for_path("b/c/file4", cx);
4180
4181 // Create a remote copy of this worktree.
4182 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4183 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4184
4185 let updates = Arc::new(Mutex::new(Vec::new()));
4186 tree.update(cx, |tree, cx| {
4187 let updates = updates.clone();
4188 tree.observe_updates(0, cx, move |update| {
4189 updates.lock().push(update);
4190 async { true }
4191 });
4192 });
4193
4194 let remote =
4195 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
4196
4197 cx.executor().run_until_parked();
4198
4199 cx.update(|cx| {
4200 assert!(!buffer2.read(cx).is_dirty());
4201 assert!(!buffer3.read(cx).is_dirty());
4202 assert!(!buffer4.read(cx).is_dirty());
4203 assert!(!buffer5.read(cx).is_dirty());
4204 });
4205
4206 // Rename and delete files and directories.
4207 tree.flush_fs_events(cx).await;
4208 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4209 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4210 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4211 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4212 tree.flush_fs_events(cx).await;
4213
4214 cx.update(|app| {
4215 assert_eq!(
4216 tree.read(app)
4217 .paths()
4218 .map(|p| p.to_str().unwrap())
4219 .collect::<Vec<_>>(),
4220 vec![
4221 "a",
4222 path!("a/file1"),
4223 path!("a/file2.new"),
4224 "b",
4225 "d",
4226 path!("d/file3"),
4227 path!("d/file4"),
4228 ]
4229 );
4230 });
4231
4232 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4233 assert_eq!(id_for_path("d/file3", cx), file3_id);
4234 assert_eq!(id_for_path("d/file4", cx), file4_id);
4235
4236 cx.update(|cx| {
4237 assert_eq!(
4238 buffer2.read(cx).file().unwrap().path().as_ref(),
4239 Path::new("a/file2.new")
4240 );
4241 assert_eq!(
4242 buffer3.read(cx).file().unwrap().path().as_ref(),
4243 Path::new("d/file3")
4244 );
4245 assert_eq!(
4246 buffer4.read(cx).file().unwrap().path().as_ref(),
4247 Path::new("d/file4")
4248 );
4249 assert_eq!(
4250 buffer5.read(cx).file().unwrap().path().as_ref(),
4251 Path::new("b/c/file5")
4252 );
4253
4254 assert_matches!(
4255 buffer2.read(cx).file().unwrap().disk_state(),
4256 DiskState::Present { .. }
4257 );
4258 assert_matches!(
4259 buffer3.read(cx).file().unwrap().disk_state(),
4260 DiskState::Present { .. }
4261 );
4262 assert_matches!(
4263 buffer4.read(cx).file().unwrap().disk_state(),
4264 DiskState::Present { .. }
4265 );
4266 assert_eq!(
4267 buffer5.read(cx).file().unwrap().disk_state(),
4268 DiskState::Deleted
4269 );
4270 });
4271
4272 // Update the remote worktree. Check that it becomes consistent with the
4273 // local worktree.
4274 cx.executor().run_until_parked();
4275
4276 remote.update(cx, |remote, _| {
4277 for update in updates.lock().drain(..) {
4278 remote.as_remote_mut().unwrap().update_from_remote(update);
4279 }
4280 });
4281 cx.executor().run_until_parked();
4282 remote.update(cx, |remote, _| {
4283 assert_eq!(
4284 remote
4285 .paths()
4286 .map(|p| p.to_str().unwrap())
4287 .collect::<Vec<_>>(),
4288 vec![
4289 "a",
4290 path!("a/file1"),
4291 path!("a/file2.new"),
4292 "b",
4293 "d",
4294 path!("d/file3"),
4295 path!("d/file4"),
4296 ]
4297 );
4298 });
4299}
4300
4301#[gpui::test(iterations = 10)]
4302async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4303 init_test(cx);
4304
4305 let fs = FakeFs::new(cx.executor());
4306 fs.insert_tree(
4307 path!("/dir"),
4308 json!({
4309 "a": {
4310 "file1": "",
4311 }
4312 }),
4313 )
4314 .await;
4315
4316 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4317 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4318 let tree_id = tree.update(cx, |tree, _| tree.id());
4319
4320 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4321 project.update(cx, |project, cx| {
4322 let tree = project.worktrees(cx).next().unwrap();
4323 tree.read(cx)
4324 .entry_for_path(path)
4325 .unwrap_or_else(|| panic!("no entry for path {}", path))
4326 .id
4327 })
4328 };
4329
4330 let dir_id = id_for_path("a", cx);
4331 let file_id = id_for_path("a/file1", cx);
4332 let buffer = project
4333 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4334 .await
4335 .unwrap();
4336 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4337
4338 project
4339 .update(cx, |project, cx| {
4340 project.rename_entry(dir_id, Path::new("b"), cx)
4341 })
4342 .unwrap()
4343 .await
4344 .into_included()
4345 .unwrap();
4346 cx.executor().run_until_parked();
4347
4348 assert_eq!(id_for_path("b", cx), dir_id);
4349 assert_eq!(id_for_path("b/file1", cx), file_id);
4350 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4351}
4352
4353#[gpui::test]
4354async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4355 init_test(cx);
4356
4357 let fs = FakeFs::new(cx.executor());
4358 fs.insert_tree(
4359 "/dir",
4360 json!({
4361 "a.txt": "a-contents",
4362 "b.txt": "b-contents",
4363 }),
4364 )
4365 .await;
4366
4367 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4368
4369 // Spawn multiple tasks to open paths, repeating some paths.
4370 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4371 (
4372 p.open_local_buffer("/dir/a.txt", cx),
4373 p.open_local_buffer("/dir/b.txt", cx),
4374 p.open_local_buffer("/dir/a.txt", cx),
4375 )
4376 });
4377
4378 let buffer_a_1 = buffer_a_1.await.unwrap();
4379 let buffer_a_2 = buffer_a_2.await.unwrap();
4380 let buffer_b = buffer_b.await.unwrap();
4381 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4382 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4383
4384 // There is only one buffer per path.
4385 let buffer_a_id = buffer_a_1.entity_id();
4386 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4387
4388 // Open the same path again while it is still open.
4389 drop(buffer_a_1);
4390 let buffer_a_3 = project
4391 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4392 .await
4393 .unwrap();
4394
4395 // There's still only one buffer per path.
4396 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4397}
4398
4399#[gpui::test]
4400async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4401 init_test(cx);
4402
4403 let fs = FakeFs::new(cx.executor());
4404 fs.insert_tree(
4405 path!("/dir"),
4406 json!({
4407 "file1": "abc",
4408 "file2": "def",
4409 "file3": "ghi",
4410 }),
4411 )
4412 .await;
4413
4414 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4415
4416 let buffer1 = project
4417 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4418 .await
4419 .unwrap();
4420 let events = Arc::new(Mutex::new(Vec::new()));
4421
4422 // initially, the buffer isn't dirty.
4423 buffer1.update(cx, |buffer, cx| {
4424 cx.subscribe(&buffer1, {
4425 let events = events.clone();
4426 move |_, _, event, _| match event {
4427 BufferEvent::Operation { .. } => {}
4428 _ => events.lock().push(event.clone()),
4429 }
4430 })
4431 .detach();
4432
4433 assert!(!buffer.is_dirty());
4434 assert!(events.lock().is_empty());
4435
4436 buffer.edit([(1..2, "")], None, cx);
4437 });
4438
4439 // after the first edit, the buffer is dirty, and emits a dirtied event.
4440 buffer1.update(cx, |buffer, cx| {
4441 assert!(buffer.text() == "ac");
4442 assert!(buffer.is_dirty());
4443 assert_eq!(
4444 *events.lock(),
4445 &[
4446 language::BufferEvent::Edited,
4447 language::BufferEvent::DirtyChanged
4448 ]
4449 );
4450 events.lock().clear();
4451 buffer.did_save(
4452 buffer.version(),
4453 buffer.file().unwrap().disk_state().mtime(),
4454 cx,
4455 );
4456 });
4457
4458 // after saving, the buffer is not dirty, and emits a saved event.
4459 buffer1.update(cx, |buffer, cx| {
4460 assert!(!buffer.is_dirty());
4461 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4462 events.lock().clear();
4463
4464 buffer.edit([(1..1, "B")], None, cx);
4465 buffer.edit([(2..2, "D")], None, cx);
4466 });
4467
4468 // after editing again, the buffer is dirty, and emits another dirty event.
4469 buffer1.update(cx, |buffer, cx| {
4470 assert!(buffer.text() == "aBDc");
4471 assert!(buffer.is_dirty());
4472 assert_eq!(
4473 *events.lock(),
4474 &[
4475 language::BufferEvent::Edited,
4476 language::BufferEvent::DirtyChanged,
4477 language::BufferEvent::Edited,
4478 ],
4479 );
4480 events.lock().clear();
4481
4482 // After restoring the buffer to its previously-saved state,
4483 // the buffer is not considered dirty anymore.
4484 buffer.edit([(1..3, "")], None, cx);
4485 assert!(buffer.text() == "ac");
4486 assert!(!buffer.is_dirty());
4487 });
4488
4489 assert_eq!(
4490 *events.lock(),
4491 &[
4492 language::BufferEvent::Edited,
4493 language::BufferEvent::DirtyChanged
4494 ]
4495 );
4496
4497 // When a file is deleted, it is not considered dirty.
4498 let events = Arc::new(Mutex::new(Vec::new()));
4499 let buffer2 = project
4500 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4501 .await
4502 .unwrap();
4503 buffer2.update(cx, |_, cx| {
4504 cx.subscribe(&buffer2, {
4505 let events = events.clone();
4506 move |_, _, event, _| match event {
4507 BufferEvent::Operation { .. } => {}
4508 _ => events.lock().push(event.clone()),
4509 }
4510 })
4511 .detach();
4512 });
4513
4514 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4515 .await
4516 .unwrap();
4517 cx.executor().run_until_parked();
4518 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4519 assert_eq!(
4520 mem::take(&mut *events.lock()),
4521 &[language::BufferEvent::FileHandleChanged]
4522 );
4523
4524 // Buffer becomes dirty when edited.
4525 buffer2.update(cx, |buffer, cx| {
4526 buffer.edit([(2..3, "")], None, cx);
4527 assert_eq!(buffer.is_dirty(), true);
4528 });
4529 assert_eq!(
4530 mem::take(&mut *events.lock()),
4531 &[
4532 language::BufferEvent::Edited,
4533 language::BufferEvent::DirtyChanged
4534 ]
4535 );
4536
4537 // Buffer becomes clean again when all of its content is removed, because
4538 // the file was deleted.
4539 buffer2.update(cx, |buffer, cx| {
4540 buffer.edit([(0..2, "")], None, cx);
4541 assert_eq!(buffer.is_empty(), true);
4542 assert_eq!(buffer.is_dirty(), false);
4543 });
4544 assert_eq!(
4545 *events.lock(),
4546 &[
4547 language::BufferEvent::Edited,
4548 language::BufferEvent::DirtyChanged
4549 ]
4550 );
4551
4552 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4553 let events = Arc::new(Mutex::new(Vec::new()));
4554 let buffer3 = project
4555 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4556 .await
4557 .unwrap();
4558 buffer3.update(cx, |_, cx| {
4559 cx.subscribe(&buffer3, {
4560 let events = events.clone();
4561 move |_, _, event, _| match event {
4562 BufferEvent::Operation { .. } => {}
4563 _ => events.lock().push(event.clone()),
4564 }
4565 })
4566 .detach();
4567 });
4568
4569 buffer3.update(cx, |buffer, cx| {
4570 buffer.edit([(0..0, "x")], None, cx);
4571 });
4572 events.lock().clear();
4573 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4574 .await
4575 .unwrap();
4576 cx.executor().run_until_parked();
4577 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4578 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4579}
4580
4581#[gpui::test]
4582async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4583 init_test(cx);
4584
4585 let (initial_contents, initial_offsets) =
4586 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4587 let fs = FakeFs::new(cx.executor());
4588 fs.insert_tree(
4589 path!("/dir"),
4590 json!({
4591 "the-file": initial_contents,
4592 }),
4593 )
4594 .await;
4595 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4596 let buffer = project
4597 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4598 .await
4599 .unwrap();
4600
4601 let anchors = initial_offsets
4602 .iter()
4603 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4604 .collect::<Vec<_>>();
4605
4606 // Change the file on disk, adding two new lines of text, and removing
4607 // one line.
4608 buffer.update(cx, |buffer, _| {
4609 assert!(!buffer.is_dirty());
4610 assert!(!buffer.has_conflict());
4611 });
4612
4613 let (new_contents, new_offsets) =
4614 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4615 fs.save(
4616 path!("/dir/the-file").as_ref(),
4617 &new_contents.as_str().into(),
4618 LineEnding::Unix,
4619 )
4620 .await
4621 .unwrap();
4622
4623 // Because the buffer was not modified, it is reloaded from disk. Its
4624 // contents are edited according to the diff between the old and new
4625 // file contents.
4626 cx.executor().run_until_parked();
4627 buffer.update(cx, |buffer, _| {
4628 assert_eq!(buffer.text(), new_contents);
4629 assert!(!buffer.is_dirty());
4630 assert!(!buffer.has_conflict());
4631
4632 let anchor_offsets = anchors
4633 .iter()
4634 .map(|anchor| anchor.to_offset(&*buffer))
4635 .collect::<Vec<_>>();
4636 assert_eq!(anchor_offsets, new_offsets);
4637 });
4638
4639 // Modify the buffer
4640 buffer.update(cx, |buffer, cx| {
4641 buffer.edit([(0..0, " ")], None, cx);
4642 assert!(buffer.is_dirty());
4643 assert!(!buffer.has_conflict());
4644 });
4645
4646 // Change the file on disk again, adding blank lines to the beginning.
4647 fs.save(
4648 path!("/dir/the-file").as_ref(),
4649 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4650 LineEnding::Unix,
4651 )
4652 .await
4653 .unwrap();
4654
4655 // Because the buffer is modified, it doesn't reload from disk, but is
4656 // marked as having a conflict.
4657 cx.executor().run_until_parked();
4658 buffer.update(cx, |buffer, _| {
4659 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4660 assert!(buffer.has_conflict());
4661 });
4662}
4663
4664#[gpui::test]
4665async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4666 init_test(cx);
4667
4668 let fs = FakeFs::new(cx.executor());
4669 fs.insert_tree(
4670 path!("/dir"),
4671 json!({
4672 "file1": "a\nb\nc\n",
4673 "file2": "one\r\ntwo\r\nthree\r\n",
4674 }),
4675 )
4676 .await;
4677
4678 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4679 let buffer1 = project
4680 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4681 .await
4682 .unwrap();
4683 let buffer2 = project
4684 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4685 .await
4686 .unwrap();
4687
4688 buffer1.update(cx, |buffer, _| {
4689 assert_eq!(buffer.text(), "a\nb\nc\n");
4690 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4691 });
4692 buffer2.update(cx, |buffer, _| {
4693 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4694 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4695 });
4696
4697 // Change a file's line endings on disk from unix to windows. The buffer's
4698 // state updates correctly.
4699 fs.save(
4700 path!("/dir/file1").as_ref(),
4701 &"aaa\nb\nc\n".into(),
4702 LineEnding::Windows,
4703 )
4704 .await
4705 .unwrap();
4706 cx.executor().run_until_parked();
4707 buffer1.update(cx, |buffer, _| {
4708 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4709 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4710 });
4711
4712 // Save a file with windows line endings. The file is written correctly.
4713 buffer2.update(cx, |buffer, cx| {
4714 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4715 });
4716 project
4717 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4718 .await
4719 .unwrap();
4720 assert_eq!(
4721 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4722 "one\r\ntwo\r\nthree\r\nfour\r\n",
4723 );
4724}
4725
4726#[gpui::test]
4727async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4728 init_test(cx);
4729
4730 let fs = FakeFs::new(cx.executor());
4731 fs.insert_tree(
4732 path!("/dir"),
4733 json!({
4734 "a.rs": "
4735 fn foo(mut v: Vec<usize>) {
4736 for x in &v {
4737 v.push(1);
4738 }
4739 }
4740 "
4741 .unindent(),
4742 }),
4743 )
4744 .await;
4745
4746 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4747 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4748 let buffer = project
4749 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4750 .await
4751 .unwrap();
4752
4753 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4754 let message = lsp::PublishDiagnosticsParams {
4755 uri: buffer_uri.clone(),
4756 diagnostics: vec![
4757 lsp::Diagnostic {
4758 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4759 severity: Some(DiagnosticSeverity::WARNING),
4760 message: "error 1".to_string(),
4761 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4762 location: lsp::Location {
4763 uri: buffer_uri.clone(),
4764 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4765 },
4766 message: "error 1 hint 1".to_string(),
4767 }]),
4768 ..Default::default()
4769 },
4770 lsp::Diagnostic {
4771 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4772 severity: Some(DiagnosticSeverity::HINT),
4773 message: "error 1 hint 1".to_string(),
4774 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4775 location: lsp::Location {
4776 uri: buffer_uri.clone(),
4777 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4778 },
4779 message: "original diagnostic".to_string(),
4780 }]),
4781 ..Default::default()
4782 },
4783 lsp::Diagnostic {
4784 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4785 severity: Some(DiagnosticSeverity::ERROR),
4786 message: "error 2".to_string(),
4787 related_information: Some(vec![
4788 lsp::DiagnosticRelatedInformation {
4789 location: lsp::Location {
4790 uri: buffer_uri.clone(),
4791 range: lsp::Range::new(
4792 lsp::Position::new(1, 13),
4793 lsp::Position::new(1, 15),
4794 ),
4795 },
4796 message: "error 2 hint 1".to_string(),
4797 },
4798 lsp::DiagnosticRelatedInformation {
4799 location: lsp::Location {
4800 uri: buffer_uri.clone(),
4801 range: lsp::Range::new(
4802 lsp::Position::new(1, 13),
4803 lsp::Position::new(1, 15),
4804 ),
4805 },
4806 message: "error 2 hint 2".to_string(),
4807 },
4808 ]),
4809 ..Default::default()
4810 },
4811 lsp::Diagnostic {
4812 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4813 severity: Some(DiagnosticSeverity::HINT),
4814 message: "error 2 hint 1".to_string(),
4815 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4816 location: lsp::Location {
4817 uri: buffer_uri.clone(),
4818 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4819 },
4820 message: "original diagnostic".to_string(),
4821 }]),
4822 ..Default::default()
4823 },
4824 lsp::Diagnostic {
4825 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4826 severity: Some(DiagnosticSeverity::HINT),
4827 message: "error 2 hint 2".to_string(),
4828 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4829 location: lsp::Location {
4830 uri: buffer_uri,
4831 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4832 },
4833 message: "original diagnostic".to_string(),
4834 }]),
4835 ..Default::default()
4836 },
4837 ],
4838 version: None,
4839 };
4840
4841 lsp_store
4842 .update(cx, |lsp_store, cx| {
4843 lsp_store.update_diagnostics(
4844 LanguageServerId(0),
4845 message,
4846 None,
4847 DiagnosticSourceKind::Pushed,
4848 &[],
4849 cx,
4850 )
4851 })
4852 .unwrap();
4853 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4854
4855 assert_eq!(
4856 buffer
4857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4858 .collect::<Vec<_>>(),
4859 &[
4860 DiagnosticEntry {
4861 range: Point::new(1, 8)..Point::new(1, 9),
4862 diagnostic: Diagnostic {
4863 severity: DiagnosticSeverity::WARNING,
4864 message: "error 1".to_string(),
4865 group_id: 1,
4866 is_primary: true,
4867 source_kind: DiagnosticSourceKind::Pushed,
4868 ..Diagnostic::default()
4869 }
4870 },
4871 DiagnosticEntry {
4872 range: Point::new(1, 8)..Point::new(1, 9),
4873 diagnostic: Diagnostic {
4874 severity: DiagnosticSeverity::HINT,
4875 message: "error 1 hint 1".to_string(),
4876 group_id: 1,
4877 is_primary: false,
4878 source_kind: DiagnosticSourceKind::Pushed,
4879 ..Diagnostic::default()
4880 }
4881 },
4882 DiagnosticEntry {
4883 range: Point::new(1, 13)..Point::new(1, 15),
4884 diagnostic: Diagnostic {
4885 severity: DiagnosticSeverity::HINT,
4886 message: "error 2 hint 1".to_string(),
4887 group_id: 0,
4888 is_primary: false,
4889 source_kind: DiagnosticSourceKind::Pushed,
4890 ..Diagnostic::default()
4891 }
4892 },
4893 DiagnosticEntry {
4894 range: Point::new(1, 13)..Point::new(1, 15),
4895 diagnostic: Diagnostic {
4896 severity: DiagnosticSeverity::HINT,
4897 message: "error 2 hint 2".to_string(),
4898 group_id: 0,
4899 is_primary: false,
4900 source_kind: DiagnosticSourceKind::Pushed,
4901 ..Diagnostic::default()
4902 }
4903 },
4904 DiagnosticEntry {
4905 range: Point::new(2, 8)..Point::new(2, 17),
4906 diagnostic: Diagnostic {
4907 severity: DiagnosticSeverity::ERROR,
4908 message: "error 2".to_string(),
4909 group_id: 0,
4910 is_primary: true,
4911 source_kind: DiagnosticSourceKind::Pushed,
4912 ..Diagnostic::default()
4913 }
4914 }
4915 ]
4916 );
4917
4918 assert_eq!(
4919 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4920 &[
4921 DiagnosticEntry {
4922 range: Point::new(1, 13)..Point::new(1, 15),
4923 diagnostic: Diagnostic {
4924 severity: DiagnosticSeverity::HINT,
4925 message: "error 2 hint 1".to_string(),
4926 group_id: 0,
4927 is_primary: false,
4928 source_kind: DiagnosticSourceKind::Pushed,
4929 ..Diagnostic::default()
4930 }
4931 },
4932 DiagnosticEntry {
4933 range: Point::new(1, 13)..Point::new(1, 15),
4934 diagnostic: Diagnostic {
4935 severity: DiagnosticSeverity::HINT,
4936 message: "error 2 hint 2".to_string(),
4937 group_id: 0,
4938 is_primary: false,
4939 source_kind: DiagnosticSourceKind::Pushed,
4940 ..Diagnostic::default()
4941 }
4942 },
4943 DiagnosticEntry {
4944 range: Point::new(2, 8)..Point::new(2, 17),
4945 diagnostic: Diagnostic {
4946 severity: DiagnosticSeverity::ERROR,
4947 message: "error 2".to_string(),
4948 group_id: 0,
4949 is_primary: true,
4950 source_kind: DiagnosticSourceKind::Pushed,
4951 ..Diagnostic::default()
4952 }
4953 }
4954 ]
4955 );
4956
4957 assert_eq!(
4958 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4959 &[
4960 DiagnosticEntry {
4961 range: Point::new(1, 8)..Point::new(1, 9),
4962 diagnostic: Diagnostic {
4963 severity: DiagnosticSeverity::WARNING,
4964 message: "error 1".to_string(),
4965 group_id: 1,
4966 is_primary: true,
4967 source_kind: DiagnosticSourceKind::Pushed,
4968 ..Diagnostic::default()
4969 }
4970 },
4971 DiagnosticEntry {
4972 range: Point::new(1, 8)..Point::new(1, 9),
4973 diagnostic: Diagnostic {
4974 severity: DiagnosticSeverity::HINT,
4975 message: "error 1 hint 1".to_string(),
4976 group_id: 1,
4977 is_primary: false,
4978 source_kind: DiagnosticSourceKind::Pushed,
4979 ..Diagnostic::default()
4980 }
4981 },
4982 ]
4983 );
4984}
4985
4986#[gpui::test]
4987async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4988 init_test(cx);
4989
4990 let fs = FakeFs::new(cx.executor());
4991 fs.insert_tree(
4992 path!("/dir"),
4993 json!({
4994 "one.rs": "const ONE: usize = 1;",
4995 "two": {
4996 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4997 }
4998
4999 }),
5000 )
5001 .await;
5002 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5003
5004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5005 language_registry.add(rust_lang());
5006 let watched_paths = lsp::FileOperationRegistrationOptions {
5007 filters: vec![
5008 FileOperationFilter {
5009 scheme: Some("file".to_owned()),
5010 pattern: lsp::FileOperationPattern {
5011 glob: "**/*.rs".to_owned(),
5012 matches: Some(lsp::FileOperationPatternKind::File),
5013 options: None,
5014 },
5015 },
5016 FileOperationFilter {
5017 scheme: Some("file".to_owned()),
5018 pattern: lsp::FileOperationPattern {
5019 glob: "**/**".to_owned(),
5020 matches: Some(lsp::FileOperationPatternKind::Folder),
5021 options: None,
5022 },
5023 },
5024 ],
5025 };
5026 let mut fake_servers = language_registry.register_fake_lsp(
5027 "Rust",
5028 FakeLspAdapter {
5029 capabilities: lsp::ServerCapabilities {
5030 workspace: Some(lsp::WorkspaceServerCapabilities {
5031 workspace_folders: None,
5032 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5033 did_rename: Some(watched_paths.clone()),
5034 will_rename: Some(watched_paths),
5035 ..Default::default()
5036 }),
5037 }),
5038 ..Default::default()
5039 },
5040 ..Default::default()
5041 },
5042 );
5043
5044 let _ = project
5045 .update(cx, |project, cx| {
5046 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5047 })
5048 .await
5049 .unwrap();
5050
5051 let fake_server = fake_servers.next().await.unwrap();
5052 let response = project.update(cx, |project, cx| {
5053 let worktree = project.worktrees(cx).next().unwrap();
5054 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
5055 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
5056 });
5057 let expected_edit = lsp::WorkspaceEdit {
5058 changes: None,
5059 document_changes: Some(DocumentChanges::Edits({
5060 vec![TextDocumentEdit {
5061 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5062 range: lsp::Range {
5063 start: lsp::Position {
5064 line: 0,
5065 character: 1,
5066 },
5067 end: lsp::Position {
5068 line: 0,
5069 character: 3,
5070 },
5071 },
5072 new_text: "This is not a drill".to_owned(),
5073 })],
5074 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5075 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5076 version: Some(1337),
5077 },
5078 }]
5079 })),
5080 change_annotations: None,
5081 };
5082 let resolved_workspace_edit = Arc::new(OnceLock::new());
5083 fake_server
5084 .set_request_handler::<WillRenameFiles, _, _>({
5085 let resolved_workspace_edit = resolved_workspace_edit.clone();
5086 let expected_edit = expected_edit.clone();
5087 move |params, _| {
5088 let resolved_workspace_edit = resolved_workspace_edit.clone();
5089 let expected_edit = expected_edit.clone();
5090 async move {
5091 assert_eq!(params.files.len(), 1);
5092 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5093 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5094 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5095 Ok(Some(expected_edit))
5096 }
5097 }
5098 })
5099 .next()
5100 .await
5101 .unwrap();
5102 let _ = response.await.unwrap();
5103 fake_server
5104 .handle_notification::<DidRenameFiles, _>(|params, _| {
5105 assert_eq!(params.files.len(), 1);
5106 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5107 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5108 })
5109 .next()
5110 .await
5111 .unwrap();
5112 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5113}
5114
5115#[gpui::test]
5116async fn test_rename(cx: &mut gpui::TestAppContext) {
5117 // hi
5118 init_test(cx);
5119
5120 let fs = FakeFs::new(cx.executor());
5121 fs.insert_tree(
5122 path!("/dir"),
5123 json!({
5124 "one.rs": "const ONE: usize = 1;",
5125 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5126 }),
5127 )
5128 .await;
5129
5130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5131
5132 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5133 language_registry.add(rust_lang());
5134 let mut fake_servers = language_registry.register_fake_lsp(
5135 "Rust",
5136 FakeLspAdapter {
5137 capabilities: lsp::ServerCapabilities {
5138 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5139 prepare_provider: Some(true),
5140 work_done_progress_options: Default::default(),
5141 })),
5142 ..Default::default()
5143 },
5144 ..Default::default()
5145 },
5146 );
5147
5148 let (buffer, _handle) = project
5149 .update(cx, |project, cx| {
5150 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5151 })
5152 .await
5153 .unwrap();
5154
5155 let fake_server = fake_servers.next().await.unwrap();
5156
5157 let response = project.update(cx, |project, cx| {
5158 project.prepare_rename(buffer.clone(), 7, cx)
5159 });
5160 fake_server
5161 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5162 assert_eq!(
5163 params.text_document.uri.as_str(),
5164 uri!("file:///dir/one.rs")
5165 );
5166 assert_eq!(params.position, lsp::Position::new(0, 7));
5167 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5168 lsp::Position::new(0, 6),
5169 lsp::Position::new(0, 9),
5170 ))))
5171 })
5172 .next()
5173 .await
5174 .unwrap();
5175 let response = response.await.unwrap();
5176 let PrepareRenameResponse::Success(range) = response else {
5177 panic!("{:?}", response);
5178 };
5179 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5180 assert_eq!(range, 6..9);
5181
5182 let response = project.update(cx, |project, cx| {
5183 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5184 });
5185 fake_server
5186 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5187 assert_eq!(
5188 params.text_document_position.text_document.uri.as_str(),
5189 uri!("file:///dir/one.rs")
5190 );
5191 assert_eq!(
5192 params.text_document_position.position,
5193 lsp::Position::new(0, 7)
5194 );
5195 assert_eq!(params.new_name, "THREE");
5196 Ok(Some(lsp::WorkspaceEdit {
5197 changes: Some(
5198 [
5199 (
5200 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5201 vec![lsp::TextEdit::new(
5202 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5203 "THREE".to_string(),
5204 )],
5205 ),
5206 (
5207 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5208 vec![
5209 lsp::TextEdit::new(
5210 lsp::Range::new(
5211 lsp::Position::new(0, 24),
5212 lsp::Position::new(0, 27),
5213 ),
5214 "THREE".to_string(),
5215 ),
5216 lsp::TextEdit::new(
5217 lsp::Range::new(
5218 lsp::Position::new(0, 35),
5219 lsp::Position::new(0, 38),
5220 ),
5221 "THREE".to_string(),
5222 ),
5223 ],
5224 ),
5225 ]
5226 .into_iter()
5227 .collect(),
5228 ),
5229 ..Default::default()
5230 }))
5231 })
5232 .next()
5233 .await
5234 .unwrap();
5235 let mut transaction = response.await.unwrap().0;
5236 assert_eq!(transaction.len(), 2);
5237 assert_eq!(
5238 transaction
5239 .remove_entry(&buffer)
5240 .unwrap()
5241 .0
5242 .update(cx, |buffer, _| buffer.text()),
5243 "const THREE: usize = 1;"
5244 );
5245 assert_eq!(
5246 transaction
5247 .into_keys()
5248 .next()
5249 .unwrap()
5250 .update(cx, |buffer, _| buffer.text()),
5251 "const TWO: usize = one::THREE + one::THREE;"
5252 );
5253}
5254
5255#[gpui::test]
5256async fn test_search(cx: &mut gpui::TestAppContext) {
5257 init_test(cx);
5258
5259 let fs = FakeFs::new(cx.executor());
5260 fs.insert_tree(
5261 path!("/dir"),
5262 json!({
5263 "one.rs": "const ONE: usize = 1;",
5264 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5265 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5266 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5267 }),
5268 )
5269 .await;
5270 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5271 assert_eq!(
5272 search(
5273 &project,
5274 SearchQuery::text(
5275 "TWO",
5276 false,
5277 true,
5278 false,
5279 Default::default(),
5280 Default::default(),
5281 false,
5282 None
5283 )
5284 .unwrap(),
5285 cx
5286 )
5287 .await
5288 .unwrap(),
5289 HashMap::from_iter([
5290 (path!("dir/two.rs").to_string(), vec![6..9]),
5291 (path!("dir/three.rs").to_string(), vec![37..40])
5292 ])
5293 );
5294
5295 let buffer_4 = project
5296 .update(cx, |project, cx| {
5297 project.open_local_buffer(path!("/dir/four.rs"), cx)
5298 })
5299 .await
5300 .unwrap();
5301 buffer_4.update(cx, |buffer, cx| {
5302 let text = "two::TWO";
5303 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5304 });
5305
5306 assert_eq!(
5307 search(
5308 &project,
5309 SearchQuery::text(
5310 "TWO",
5311 false,
5312 true,
5313 false,
5314 Default::default(),
5315 Default::default(),
5316 false,
5317 None,
5318 )
5319 .unwrap(),
5320 cx
5321 )
5322 .await
5323 .unwrap(),
5324 HashMap::from_iter([
5325 (path!("dir/two.rs").to_string(), vec![6..9]),
5326 (path!("dir/three.rs").to_string(), vec![37..40]),
5327 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5328 ])
5329 );
5330}
5331
5332#[gpui::test]
5333async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5334 init_test(cx);
5335
5336 let search_query = "file";
5337
5338 let fs = FakeFs::new(cx.executor());
5339 fs.insert_tree(
5340 path!("/dir"),
5341 json!({
5342 "one.rs": r#"// Rust file one"#,
5343 "one.ts": r#"// TypeScript file one"#,
5344 "two.rs": r#"// Rust file two"#,
5345 "two.ts": r#"// TypeScript file two"#,
5346 }),
5347 )
5348 .await;
5349 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5350
5351 assert!(
5352 search(
5353 &project,
5354 SearchQuery::text(
5355 search_query,
5356 false,
5357 true,
5358 false,
5359 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5360 Default::default(),
5361 false,
5362 None
5363 )
5364 .unwrap(),
5365 cx
5366 )
5367 .await
5368 .unwrap()
5369 .is_empty(),
5370 "If no inclusions match, no files should be returned"
5371 );
5372
5373 assert_eq!(
5374 search(
5375 &project,
5376 SearchQuery::text(
5377 search_query,
5378 false,
5379 true,
5380 false,
5381 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5382 Default::default(),
5383 false,
5384 None
5385 )
5386 .unwrap(),
5387 cx
5388 )
5389 .await
5390 .unwrap(),
5391 HashMap::from_iter([
5392 (path!("dir/one.rs").to_string(), vec![8..12]),
5393 (path!("dir/two.rs").to_string(), vec![8..12]),
5394 ]),
5395 "Rust only search should give only Rust files"
5396 );
5397
5398 assert_eq!(
5399 search(
5400 &project,
5401 SearchQuery::text(
5402 search_query,
5403 false,
5404 true,
5405 false,
5406 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5407 Default::default(),
5408 false,
5409 None,
5410 )
5411 .unwrap(),
5412 cx
5413 )
5414 .await
5415 .unwrap(),
5416 HashMap::from_iter([
5417 (path!("dir/one.ts").to_string(), vec![14..18]),
5418 (path!("dir/two.ts").to_string(), vec![14..18]),
5419 ]),
5420 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5421 );
5422
5423 assert_eq!(
5424 search(
5425 &project,
5426 SearchQuery::text(
5427 search_query,
5428 false,
5429 true,
5430 false,
5431 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5432 .unwrap(),
5433 Default::default(),
5434 false,
5435 None,
5436 )
5437 .unwrap(),
5438 cx
5439 )
5440 .await
5441 .unwrap(),
5442 HashMap::from_iter([
5443 (path!("dir/two.ts").to_string(), vec![14..18]),
5444 (path!("dir/one.rs").to_string(), vec![8..12]),
5445 (path!("dir/one.ts").to_string(), vec![14..18]),
5446 (path!("dir/two.rs").to_string(), vec![8..12]),
5447 ]),
5448 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5449 );
5450}
5451
5452#[gpui::test]
5453async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5454 init_test(cx);
5455
5456 let search_query = "file";
5457
5458 let fs = FakeFs::new(cx.executor());
5459 fs.insert_tree(
5460 path!("/dir"),
5461 json!({
5462 "one.rs": r#"// Rust file one"#,
5463 "one.ts": r#"// TypeScript file one"#,
5464 "two.rs": r#"// Rust file two"#,
5465 "two.ts": r#"// TypeScript file two"#,
5466 }),
5467 )
5468 .await;
5469 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5470
5471 assert_eq!(
5472 search(
5473 &project,
5474 SearchQuery::text(
5475 search_query,
5476 false,
5477 true,
5478 false,
5479 Default::default(),
5480 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5481 false,
5482 None,
5483 )
5484 .unwrap(),
5485 cx
5486 )
5487 .await
5488 .unwrap(),
5489 HashMap::from_iter([
5490 (path!("dir/one.rs").to_string(), vec![8..12]),
5491 (path!("dir/one.ts").to_string(), vec![14..18]),
5492 (path!("dir/two.rs").to_string(), vec![8..12]),
5493 (path!("dir/two.ts").to_string(), vec![14..18]),
5494 ]),
5495 "If no exclusions match, all files should be returned"
5496 );
5497
5498 assert_eq!(
5499 search(
5500 &project,
5501 SearchQuery::text(
5502 search_query,
5503 false,
5504 true,
5505 false,
5506 Default::default(),
5507 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5508 false,
5509 None,
5510 )
5511 .unwrap(),
5512 cx
5513 )
5514 .await
5515 .unwrap(),
5516 HashMap::from_iter([
5517 (path!("dir/one.ts").to_string(), vec![14..18]),
5518 (path!("dir/two.ts").to_string(), vec![14..18]),
5519 ]),
5520 "Rust exclusion search should give only TypeScript files"
5521 );
5522
5523 assert_eq!(
5524 search(
5525 &project,
5526 SearchQuery::text(
5527 search_query,
5528 false,
5529 true,
5530 false,
5531 Default::default(),
5532 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5533 false,
5534 None,
5535 )
5536 .unwrap(),
5537 cx
5538 )
5539 .await
5540 .unwrap(),
5541 HashMap::from_iter([
5542 (path!("dir/one.rs").to_string(), vec![8..12]),
5543 (path!("dir/two.rs").to_string(), vec![8..12]),
5544 ]),
5545 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5546 );
5547
5548 assert!(
5549 search(
5550 &project,
5551 SearchQuery::text(
5552 search_query,
5553 false,
5554 true,
5555 false,
5556 Default::default(),
5557 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5558 .unwrap(),
5559 false,
5560 None,
5561 )
5562 .unwrap(),
5563 cx
5564 )
5565 .await
5566 .unwrap()
5567 .is_empty(),
5568 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5569 );
5570}
5571
5572#[gpui::test]
5573async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5574 init_test(cx);
5575
5576 let search_query = "file";
5577
5578 let fs = FakeFs::new(cx.executor());
5579 fs.insert_tree(
5580 path!("/dir"),
5581 json!({
5582 "one.rs": r#"// Rust file one"#,
5583 "one.ts": r#"// TypeScript file one"#,
5584 "two.rs": r#"// Rust file two"#,
5585 "two.ts": r#"// TypeScript file two"#,
5586 }),
5587 )
5588 .await;
5589
5590 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5591 let _buffer = project.update(cx, |project, cx| {
5592 project.create_local_buffer("file", None, false, cx)
5593 });
5594
5595 assert_eq!(
5596 search(
5597 &project,
5598 SearchQuery::text(
5599 search_query,
5600 false,
5601 true,
5602 false,
5603 Default::default(),
5604 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5605 false,
5606 None,
5607 )
5608 .unwrap(),
5609 cx
5610 )
5611 .await
5612 .unwrap(),
5613 HashMap::from_iter([
5614 (path!("dir/one.rs").to_string(), vec![8..12]),
5615 (path!("dir/one.ts").to_string(), vec![14..18]),
5616 (path!("dir/two.rs").to_string(), vec![8..12]),
5617 (path!("dir/two.ts").to_string(), vec![14..18]),
5618 ]),
5619 "If no exclusions match, all files should be returned"
5620 );
5621
5622 assert_eq!(
5623 search(
5624 &project,
5625 SearchQuery::text(
5626 search_query,
5627 false,
5628 true,
5629 false,
5630 Default::default(),
5631 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5632 false,
5633 None,
5634 )
5635 .unwrap(),
5636 cx
5637 )
5638 .await
5639 .unwrap(),
5640 HashMap::from_iter([
5641 (path!("dir/one.ts").to_string(), vec![14..18]),
5642 (path!("dir/two.ts").to_string(), vec![14..18]),
5643 ]),
5644 "Rust exclusion search should give only TypeScript files"
5645 );
5646
5647 assert_eq!(
5648 search(
5649 &project,
5650 SearchQuery::text(
5651 search_query,
5652 false,
5653 true,
5654 false,
5655 Default::default(),
5656 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5657 false,
5658 None,
5659 )
5660 .unwrap(),
5661 cx
5662 )
5663 .await
5664 .unwrap(),
5665 HashMap::from_iter([
5666 (path!("dir/one.rs").to_string(), vec![8..12]),
5667 (path!("dir/two.rs").to_string(), vec![8..12]),
5668 ]),
5669 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5670 );
5671
5672 assert!(
5673 search(
5674 &project,
5675 SearchQuery::text(
5676 search_query,
5677 false,
5678 true,
5679 false,
5680 Default::default(),
5681 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5682 .unwrap(),
5683 false,
5684 None,
5685 )
5686 .unwrap(),
5687 cx
5688 )
5689 .await
5690 .unwrap()
5691 .is_empty(),
5692 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5693 );
5694}
5695
5696#[gpui::test]
5697async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5698 init_test(cx);
5699
5700 let search_query = "file";
5701
5702 let fs = FakeFs::new(cx.executor());
5703 fs.insert_tree(
5704 path!("/dir"),
5705 json!({
5706 "one.rs": r#"// Rust file one"#,
5707 "one.ts": r#"// TypeScript file one"#,
5708 "two.rs": r#"// Rust file two"#,
5709 "two.ts": r#"// TypeScript file two"#,
5710 }),
5711 )
5712 .await;
5713 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5714
5715 assert!(
5716 search(
5717 &project,
5718 SearchQuery::text(
5719 search_query,
5720 false,
5721 true,
5722 false,
5723 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5724 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5725 false,
5726 None,
5727 )
5728 .unwrap(),
5729 cx
5730 )
5731 .await
5732 .unwrap()
5733 .is_empty(),
5734 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5735 );
5736
5737 assert!(
5738 search(
5739 &project,
5740 SearchQuery::text(
5741 search_query,
5742 false,
5743 true,
5744 false,
5745 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5746 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5747 false,
5748 None,
5749 )
5750 .unwrap(),
5751 cx
5752 )
5753 .await
5754 .unwrap()
5755 .is_empty(),
5756 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5757 );
5758
5759 assert!(
5760 search(
5761 &project,
5762 SearchQuery::text(
5763 search_query,
5764 false,
5765 true,
5766 false,
5767 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5768 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5769 false,
5770 None,
5771 )
5772 .unwrap(),
5773 cx
5774 )
5775 .await
5776 .unwrap()
5777 .is_empty(),
5778 "Non-matching inclusions and exclusions should not change that."
5779 );
5780
5781 assert_eq!(
5782 search(
5783 &project,
5784 SearchQuery::text(
5785 search_query,
5786 false,
5787 true,
5788 false,
5789 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5790 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5791 false,
5792 None,
5793 )
5794 .unwrap(),
5795 cx
5796 )
5797 .await
5798 .unwrap(),
5799 HashMap::from_iter([
5800 (path!("dir/one.ts").to_string(), vec![14..18]),
5801 (path!("dir/two.ts").to_string(), vec![14..18]),
5802 ]),
5803 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5804 );
5805}
5806
5807#[gpui::test]
5808async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5809 init_test(cx);
5810
5811 let fs = FakeFs::new(cx.executor());
5812 fs.insert_tree(
5813 path!("/worktree-a"),
5814 json!({
5815 "haystack.rs": r#"// NEEDLE"#,
5816 "haystack.ts": r#"// NEEDLE"#,
5817 }),
5818 )
5819 .await;
5820 fs.insert_tree(
5821 path!("/worktree-b"),
5822 json!({
5823 "haystack.rs": r#"// NEEDLE"#,
5824 "haystack.ts": r#"// NEEDLE"#,
5825 }),
5826 )
5827 .await;
5828
5829 let project = Project::test(
5830 fs.clone(),
5831 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5832 cx,
5833 )
5834 .await;
5835
5836 assert_eq!(
5837 search(
5838 &project,
5839 SearchQuery::text(
5840 "NEEDLE",
5841 false,
5842 true,
5843 false,
5844 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5845 Default::default(),
5846 true,
5847 None,
5848 )
5849 .unwrap(),
5850 cx
5851 )
5852 .await
5853 .unwrap(),
5854 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5855 "should only return results from included worktree"
5856 );
5857 assert_eq!(
5858 search(
5859 &project,
5860 SearchQuery::text(
5861 "NEEDLE",
5862 false,
5863 true,
5864 false,
5865 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5866 Default::default(),
5867 true,
5868 None,
5869 )
5870 .unwrap(),
5871 cx
5872 )
5873 .await
5874 .unwrap(),
5875 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5876 "should only return results from included worktree"
5877 );
5878
5879 assert_eq!(
5880 search(
5881 &project,
5882 SearchQuery::text(
5883 "NEEDLE",
5884 false,
5885 true,
5886 false,
5887 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5888 Default::default(),
5889 false,
5890 None,
5891 )
5892 .unwrap(),
5893 cx
5894 )
5895 .await
5896 .unwrap(),
5897 HashMap::from_iter([
5898 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5899 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5900 ]),
5901 "should return results from both worktrees"
5902 );
5903}
5904
5905#[gpui::test]
5906async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5907 init_test(cx);
5908
5909 let fs = FakeFs::new(cx.background_executor.clone());
5910 fs.insert_tree(
5911 path!("/dir"),
5912 json!({
5913 ".git": {},
5914 ".gitignore": "**/target\n/node_modules\n",
5915 "target": {
5916 "index.txt": "index_key:index_value"
5917 },
5918 "node_modules": {
5919 "eslint": {
5920 "index.ts": "const eslint_key = 'eslint value'",
5921 "package.json": r#"{ "some_key": "some value" }"#,
5922 },
5923 "prettier": {
5924 "index.ts": "const prettier_key = 'prettier value'",
5925 "package.json": r#"{ "other_key": "other value" }"#,
5926 },
5927 },
5928 "package.json": r#"{ "main_key": "main value" }"#,
5929 }),
5930 )
5931 .await;
5932 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5933
5934 let query = "key";
5935 assert_eq!(
5936 search(
5937 &project,
5938 SearchQuery::text(
5939 query,
5940 false,
5941 false,
5942 false,
5943 Default::default(),
5944 Default::default(),
5945 false,
5946 None,
5947 )
5948 .unwrap(),
5949 cx
5950 )
5951 .await
5952 .unwrap(),
5953 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5954 "Only one non-ignored file should have the query"
5955 );
5956
5957 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5958 assert_eq!(
5959 search(
5960 &project,
5961 SearchQuery::text(
5962 query,
5963 false,
5964 false,
5965 true,
5966 Default::default(),
5967 Default::default(),
5968 false,
5969 None,
5970 )
5971 .unwrap(),
5972 cx
5973 )
5974 .await
5975 .unwrap(),
5976 HashMap::from_iter([
5977 (path!("dir/package.json").to_string(), vec![8..11]),
5978 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5979 (
5980 path!("dir/node_modules/prettier/package.json").to_string(),
5981 vec![9..12]
5982 ),
5983 (
5984 path!("dir/node_modules/prettier/index.ts").to_string(),
5985 vec![15..18]
5986 ),
5987 (
5988 path!("dir/node_modules/eslint/index.ts").to_string(),
5989 vec![13..16]
5990 ),
5991 (
5992 path!("dir/node_modules/eslint/package.json").to_string(),
5993 vec![8..11]
5994 ),
5995 ]),
5996 "Unrestricted search with ignored directories should find every file with the query"
5997 );
5998
5999 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
6000 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
6001 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6002 assert_eq!(
6003 search(
6004 &project,
6005 SearchQuery::text(
6006 query,
6007 false,
6008 false,
6009 true,
6010 files_to_include,
6011 files_to_exclude,
6012 false,
6013 None,
6014 )
6015 .unwrap(),
6016 cx
6017 )
6018 .await
6019 .unwrap(),
6020 HashMap::from_iter([(
6021 path!("dir/node_modules/prettier/package.json").to_string(),
6022 vec![9..12]
6023 )]),
6024 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6025 );
6026}
6027
6028#[gpui::test]
6029async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6030 init_test(cx);
6031
6032 let fs = FakeFs::new(cx.executor());
6033 fs.insert_tree(
6034 path!("/dir"),
6035 json!({
6036 "one.rs": "// ПРИВЕТ? привет!",
6037 "two.rs": "// ПРИВЕТ.",
6038 "three.rs": "// привет",
6039 }),
6040 )
6041 .await;
6042 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6043
6044 let unicode_case_sensitive_query = SearchQuery::text(
6045 "привет",
6046 false,
6047 true,
6048 false,
6049 Default::default(),
6050 Default::default(),
6051 false,
6052 None,
6053 );
6054 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6055 assert_eq!(
6056 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6057 .await
6058 .unwrap(),
6059 HashMap::from_iter([
6060 (path!("dir/one.rs").to_string(), vec![17..29]),
6061 (path!("dir/three.rs").to_string(), vec![3..15]),
6062 ])
6063 );
6064
6065 let unicode_case_insensitive_query = SearchQuery::text(
6066 "привет",
6067 false,
6068 false,
6069 false,
6070 Default::default(),
6071 Default::default(),
6072 false,
6073 None,
6074 );
6075 assert_matches!(
6076 unicode_case_insensitive_query,
6077 Ok(SearchQuery::Regex { .. })
6078 );
6079 assert_eq!(
6080 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6081 .await
6082 .unwrap(),
6083 HashMap::from_iter([
6084 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6085 (path!("dir/two.rs").to_string(), vec![3..15]),
6086 (path!("dir/three.rs").to_string(), vec![3..15]),
6087 ])
6088 );
6089
6090 assert_eq!(
6091 search(
6092 &project,
6093 SearchQuery::text(
6094 "привет.",
6095 false,
6096 false,
6097 false,
6098 Default::default(),
6099 Default::default(),
6100 false,
6101 None,
6102 )
6103 .unwrap(),
6104 cx
6105 )
6106 .await
6107 .unwrap(),
6108 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6109 );
6110}
6111
6112#[gpui::test]
6113async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6114 init_test(cx);
6115
6116 let fs = FakeFs::new(cx.executor());
6117 fs.insert_tree(
6118 "/one/two",
6119 json!({
6120 "three": {
6121 "a.txt": "",
6122 "four": {}
6123 },
6124 "c.rs": ""
6125 }),
6126 )
6127 .await;
6128
6129 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6130 project
6131 .update(cx, |project, cx| {
6132 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6133 project.create_entry((id, "b.."), true, cx)
6134 })
6135 .await
6136 .unwrap()
6137 .into_included()
6138 .unwrap();
6139
6140 // Can't create paths outside the project
6141 let result = project
6142 .update(cx, |project, cx| {
6143 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6144 project.create_entry((id, "../../boop"), true, cx)
6145 })
6146 .await;
6147 assert!(result.is_err());
6148
6149 // Can't create paths with '..'
6150 let result = project
6151 .update(cx, |project, cx| {
6152 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6153 project.create_entry((id, "four/../beep"), true, cx)
6154 })
6155 .await;
6156 assert!(result.is_err());
6157
6158 assert_eq!(
6159 fs.paths(true),
6160 vec![
6161 PathBuf::from(path!("/")),
6162 PathBuf::from(path!("/one")),
6163 PathBuf::from(path!("/one/two")),
6164 PathBuf::from(path!("/one/two/c.rs")),
6165 PathBuf::from(path!("/one/two/three")),
6166 PathBuf::from(path!("/one/two/three/a.txt")),
6167 PathBuf::from(path!("/one/two/three/b..")),
6168 PathBuf::from(path!("/one/two/three/four")),
6169 ]
6170 );
6171
6172 // And we cannot open buffers with '..'
6173 let result = project
6174 .update(cx, |project, cx| {
6175 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6176 project.open_buffer((id, "../c.rs"), cx)
6177 })
6178 .await;
6179 assert!(result.is_err())
6180}
6181
6182#[gpui::test]
6183async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6184 init_test(cx);
6185
6186 let fs = FakeFs::new(cx.executor());
6187 fs.insert_tree(
6188 path!("/dir"),
6189 json!({
6190 "a.tsx": "a",
6191 }),
6192 )
6193 .await;
6194
6195 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6196
6197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6198 language_registry.add(tsx_lang());
6199 let language_server_names = [
6200 "TypeScriptServer",
6201 "TailwindServer",
6202 "ESLintServer",
6203 "NoHoverCapabilitiesServer",
6204 ];
6205 let mut language_servers = [
6206 language_registry.register_fake_lsp(
6207 "tsx",
6208 FakeLspAdapter {
6209 name: language_server_names[0],
6210 capabilities: lsp::ServerCapabilities {
6211 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6212 ..lsp::ServerCapabilities::default()
6213 },
6214 ..FakeLspAdapter::default()
6215 },
6216 ),
6217 language_registry.register_fake_lsp(
6218 "tsx",
6219 FakeLspAdapter {
6220 name: language_server_names[1],
6221 capabilities: lsp::ServerCapabilities {
6222 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6223 ..lsp::ServerCapabilities::default()
6224 },
6225 ..FakeLspAdapter::default()
6226 },
6227 ),
6228 language_registry.register_fake_lsp(
6229 "tsx",
6230 FakeLspAdapter {
6231 name: language_server_names[2],
6232 capabilities: lsp::ServerCapabilities {
6233 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6234 ..lsp::ServerCapabilities::default()
6235 },
6236 ..FakeLspAdapter::default()
6237 },
6238 ),
6239 language_registry.register_fake_lsp(
6240 "tsx",
6241 FakeLspAdapter {
6242 name: language_server_names[3],
6243 capabilities: lsp::ServerCapabilities {
6244 hover_provider: None,
6245 ..lsp::ServerCapabilities::default()
6246 },
6247 ..FakeLspAdapter::default()
6248 },
6249 ),
6250 ];
6251
6252 let (buffer, _handle) = project
6253 .update(cx, |p, cx| {
6254 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6255 })
6256 .await
6257 .unwrap();
6258 cx.executor().run_until_parked();
6259
6260 let mut servers_with_hover_requests = HashMap::default();
6261 for i in 0..language_server_names.len() {
6262 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6263 panic!(
6264 "Failed to get language server #{i} with name {}",
6265 &language_server_names[i]
6266 )
6267 });
6268 let new_server_name = new_server.server.name();
6269 assert!(
6270 !servers_with_hover_requests.contains_key(&new_server_name),
6271 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6272 );
6273 match new_server_name.as_ref() {
6274 "TailwindServer" | "TypeScriptServer" => {
6275 servers_with_hover_requests.insert(
6276 new_server_name.clone(),
6277 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6278 move |_, _| {
6279 let name = new_server_name.clone();
6280 async move {
6281 Ok(Some(lsp::Hover {
6282 contents: lsp::HoverContents::Scalar(
6283 lsp::MarkedString::String(format!("{name} hover")),
6284 ),
6285 range: None,
6286 }))
6287 }
6288 },
6289 ),
6290 );
6291 }
6292 "ESLintServer" => {
6293 servers_with_hover_requests.insert(
6294 new_server_name,
6295 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6296 |_, _| async move { Ok(None) },
6297 ),
6298 );
6299 }
6300 "NoHoverCapabilitiesServer" => {
6301 let _never_handled = new_server
6302 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6303 panic!(
6304 "Should not call for hovers server with no corresponding capabilities"
6305 )
6306 });
6307 }
6308 unexpected => panic!("Unexpected server name: {unexpected}"),
6309 }
6310 }
6311
6312 let hover_task = project.update(cx, |project, cx| {
6313 project.hover(&buffer, Point::new(0, 0), cx)
6314 });
6315 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6316 |mut hover_request| async move {
6317 hover_request
6318 .next()
6319 .await
6320 .expect("All hover requests should have been triggered")
6321 },
6322 ))
6323 .await;
6324 assert_eq!(
6325 vec!["TailwindServer hover", "TypeScriptServer hover"],
6326 hover_task
6327 .await
6328 .into_iter()
6329 .flatten()
6330 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6331 .sorted()
6332 .collect::<Vec<_>>(),
6333 "Should receive hover responses from all related servers with hover capabilities"
6334 );
6335}
6336
6337#[gpui::test]
6338async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6339 init_test(cx);
6340
6341 let fs = FakeFs::new(cx.executor());
6342 fs.insert_tree(
6343 path!("/dir"),
6344 json!({
6345 "a.ts": "a",
6346 }),
6347 )
6348 .await;
6349
6350 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6351
6352 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6353 language_registry.add(typescript_lang());
6354 let mut fake_language_servers = language_registry.register_fake_lsp(
6355 "TypeScript",
6356 FakeLspAdapter {
6357 capabilities: lsp::ServerCapabilities {
6358 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6359 ..lsp::ServerCapabilities::default()
6360 },
6361 ..FakeLspAdapter::default()
6362 },
6363 );
6364
6365 let (buffer, _handle) = project
6366 .update(cx, |p, cx| {
6367 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6368 })
6369 .await
6370 .unwrap();
6371 cx.executor().run_until_parked();
6372
6373 let fake_server = fake_language_servers
6374 .next()
6375 .await
6376 .expect("failed to get the language server");
6377
6378 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6379 move |_, _| async move {
6380 Ok(Some(lsp::Hover {
6381 contents: lsp::HoverContents::Array(vec![
6382 lsp::MarkedString::String("".to_string()),
6383 lsp::MarkedString::String(" ".to_string()),
6384 lsp::MarkedString::String("\n\n\n".to_string()),
6385 ]),
6386 range: None,
6387 }))
6388 },
6389 );
6390
6391 let hover_task = project.update(cx, |project, cx| {
6392 project.hover(&buffer, Point::new(0, 0), cx)
6393 });
6394 let () = request_handled
6395 .next()
6396 .await
6397 .expect("All hover requests should have been triggered");
6398 assert_eq!(
6399 Vec::<String>::new(),
6400 hover_task
6401 .await
6402 .into_iter()
6403 .flatten()
6404 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6405 .sorted()
6406 .collect::<Vec<_>>(),
6407 "Empty hover parts should be ignored"
6408 );
6409}
6410
6411#[gpui::test]
6412async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6413 init_test(cx);
6414
6415 let fs = FakeFs::new(cx.executor());
6416 fs.insert_tree(
6417 path!("/dir"),
6418 json!({
6419 "a.ts": "a",
6420 }),
6421 )
6422 .await;
6423
6424 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6425
6426 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6427 language_registry.add(typescript_lang());
6428 let mut fake_language_servers = language_registry.register_fake_lsp(
6429 "TypeScript",
6430 FakeLspAdapter {
6431 capabilities: lsp::ServerCapabilities {
6432 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6433 ..lsp::ServerCapabilities::default()
6434 },
6435 ..FakeLspAdapter::default()
6436 },
6437 );
6438
6439 let (buffer, _handle) = project
6440 .update(cx, |p, cx| {
6441 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6442 })
6443 .await
6444 .unwrap();
6445 cx.executor().run_until_parked();
6446
6447 let fake_server = fake_language_servers
6448 .next()
6449 .await
6450 .expect("failed to get the language server");
6451
6452 let mut request_handled = fake_server
6453 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6454 Ok(Some(vec![
6455 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6456 title: "organize imports".to_string(),
6457 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6458 ..lsp::CodeAction::default()
6459 }),
6460 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6461 title: "fix code".to_string(),
6462 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6463 ..lsp::CodeAction::default()
6464 }),
6465 ]))
6466 });
6467
6468 let code_actions_task = project.update(cx, |project, cx| {
6469 project.code_actions(
6470 &buffer,
6471 0..buffer.read(cx).len(),
6472 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6473 cx,
6474 )
6475 });
6476
6477 let () = request_handled
6478 .next()
6479 .await
6480 .expect("The code action request should have been triggered");
6481
6482 let code_actions = code_actions_task.await.unwrap().unwrap();
6483 assert_eq!(code_actions.len(), 1);
6484 assert_eq!(
6485 code_actions[0].lsp_action.action_kind(),
6486 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6487 );
6488}
6489
6490#[gpui::test]
6491async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6492 init_test(cx);
6493
6494 let fs = FakeFs::new(cx.executor());
6495 fs.insert_tree(
6496 path!("/dir"),
6497 json!({
6498 "a.tsx": "a",
6499 }),
6500 )
6501 .await;
6502
6503 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6504
6505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6506 language_registry.add(tsx_lang());
6507 let language_server_names = [
6508 "TypeScriptServer",
6509 "TailwindServer",
6510 "ESLintServer",
6511 "NoActionsCapabilitiesServer",
6512 ];
6513
6514 let mut language_server_rxs = [
6515 language_registry.register_fake_lsp(
6516 "tsx",
6517 FakeLspAdapter {
6518 name: language_server_names[0],
6519 capabilities: lsp::ServerCapabilities {
6520 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6521 ..lsp::ServerCapabilities::default()
6522 },
6523 ..FakeLspAdapter::default()
6524 },
6525 ),
6526 language_registry.register_fake_lsp(
6527 "tsx",
6528 FakeLspAdapter {
6529 name: language_server_names[1],
6530 capabilities: lsp::ServerCapabilities {
6531 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6532 ..lsp::ServerCapabilities::default()
6533 },
6534 ..FakeLspAdapter::default()
6535 },
6536 ),
6537 language_registry.register_fake_lsp(
6538 "tsx",
6539 FakeLspAdapter {
6540 name: language_server_names[2],
6541 capabilities: lsp::ServerCapabilities {
6542 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6543 ..lsp::ServerCapabilities::default()
6544 },
6545 ..FakeLspAdapter::default()
6546 },
6547 ),
6548 language_registry.register_fake_lsp(
6549 "tsx",
6550 FakeLspAdapter {
6551 name: language_server_names[3],
6552 capabilities: lsp::ServerCapabilities {
6553 code_action_provider: None,
6554 ..lsp::ServerCapabilities::default()
6555 },
6556 ..FakeLspAdapter::default()
6557 },
6558 ),
6559 ];
6560
6561 let (buffer, _handle) = project
6562 .update(cx, |p, cx| {
6563 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6564 })
6565 .await
6566 .unwrap();
6567 cx.executor().run_until_parked();
6568
6569 let mut servers_with_actions_requests = HashMap::default();
6570 for i in 0..language_server_names.len() {
6571 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6572 panic!(
6573 "Failed to get language server #{i} with name {}",
6574 &language_server_names[i]
6575 )
6576 });
6577 let new_server_name = new_server.server.name();
6578
6579 assert!(
6580 !servers_with_actions_requests.contains_key(&new_server_name),
6581 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6582 );
6583 match new_server_name.0.as_ref() {
6584 "TailwindServer" | "TypeScriptServer" => {
6585 servers_with_actions_requests.insert(
6586 new_server_name.clone(),
6587 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6588 move |_, _| {
6589 let name = new_server_name.clone();
6590 async move {
6591 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6592 lsp::CodeAction {
6593 title: format!("{name} code action"),
6594 ..lsp::CodeAction::default()
6595 },
6596 )]))
6597 }
6598 },
6599 ),
6600 );
6601 }
6602 "ESLintServer" => {
6603 servers_with_actions_requests.insert(
6604 new_server_name,
6605 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6606 |_, _| async move { Ok(None) },
6607 ),
6608 );
6609 }
6610 "NoActionsCapabilitiesServer" => {
6611 let _never_handled = new_server
6612 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6613 panic!(
6614 "Should not call for code actions server with no corresponding capabilities"
6615 )
6616 });
6617 }
6618 unexpected => panic!("Unexpected server name: {unexpected}"),
6619 }
6620 }
6621
6622 let code_actions_task = project.update(cx, |project, cx| {
6623 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6624 });
6625
6626 // cx.run_until_parked();
6627 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6628 |mut code_actions_request| async move {
6629 code_actions_request
6630 .next()
6631 .await
6632 .expect("All code actions requests should have been triggered")
6633 },
6634 ))
6635 .await;
6636 assert_eq!(
6637 vec!["TailwindServer code action", "TypeScriptServer code action"],
6638 code_actions_task
6639 .await
6640 .unwrap()
6641 .unwrap()
6642 .into_iter()
6643 .map(|code_action| code_action.lsp_action.title().to_owned())
6644 .sorted()
6645 .collect::<Vec<_>>(),
6646 "Should receive code actions responses from all related servers with hover capabilities"
6647 );
6648}
6649
6650#[gpui::test]
6651async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6652 init_test(cx);
6653
6654 let fs = FakeFs::new(cx.executor());
6655 fs.insert_tree(
6656 "/dir",
6657 json!({
6658 "a.rs": "let a = 1;",
6659 "b.rs": "let b = 2;",
6660 "c.rs": "let c = 2;",
6661 }),
6662 )
6663 .await;
6664
6665 let project = Project::test(
6666 fs,
6667 [
6668 "/dir/a.rs".as_ref(),
6669 "/dir/b.rs".as_ref(),
6670 "/dir/c.rs".as_ref(),
6671 ],
6672 cx,
6673 )
6674 .await;
6675
6676 // check the initial state and get the worktrees
6677 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6678 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6679 assert_eq!(worktrees.len(), 3);
6680
6681 let worktree_a = worktrees[0].read(cx);
6682 let worktree_b = worktrees[1].read(cx);
6683 let worktree_c = worktrees[2].read(cx);
6684
6685 // check they start in the right order
6686 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6687 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6688 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6689
6690 (
6691 worktrees[0].clone(),
6692 worktrees[1].clone(),
6693 worktrees[2].clone(),
6694 )
6695 });
6696
6697 // move first worktree to after the second
6698 // [a, b, c] -> [b, a, c]
6699 project
6700 .update(cx, |project, cx| {
6701 let first = worktree_a.read(cx);
6702 let second = worktree_b.read(cx);
6703 project.move_worktree(first.id(), second.id(), cx)
6704 })
6705 .expect("moving first after second");
6706
6707 // check the state after moving
6708 project.update(cx, |project, cx| {
6709 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6710 assert_eq!(worktrees.len(), 3);
6711
6712 let first = worktrees[0].read(cx);
6713 let second = worktrees[1].read(cx);
6714 let third = worktrees[2].read(cx);
6715
6716 // check they are now in the right order
6717 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6718 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6719 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6720 });
6721
6722 // move the second worktree to before the first
6723 // [b, a, c] -> [a, b, c]
6724 project
6725 .update(cx, |project, cx| {
6726 let second = worktree_a.read(cx);
6727 let first = worktree_b.read(cx);
6728 project.move_worktree(first.id(), second.id(), cx)
6729 })
6730 .expect("moving second before first");
6731
6732 // check the state after moving
6733 project.update(cx, |project, cx| {
6734 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6735 assert_eq!(worktrees.len(), 3);
6736
6737 let first = worktrees[0].read(cx);
6738 let second = worktrees[1].read(cx);
6739 let third = worktrees[2].read(cx);
6740
6741 // check they are now in the right order
6742 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6743 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6744 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6745 });
6746
6747 // move the second worktree to after the third
6748 // [a, b, c] -> [a, c, b]
6749 project
6750 .update(cx, |project, cx| {
6751 let second = worktree_b.read(cx);
6752 let third = worktree_c.read(cx);
6753 project.move_worktree(second.id(), third.id(), cx)
6754 })
6755 .expect("moving second after third");
6756
6757 // check the state after moving
6758 project.update(cx, |project, cx| {
6759 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6760 assert_eq!(worktrees.len(), 3);
6761
6762 let first = worktrees[0].read(cx);
6763 let second = worktrees[1].read(cx);
6764 let third = worktrees[2].read(cx);
6765
6766 // check they are now in the right order
6767 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6768 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6769 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6770 });
6771
6772 // move the third worktree to before the second
6773 // [a, c, b] -> [a, b, c]
6774 project
6775 .update(cx, |project, cx| {
6776 let third = worktree_c.read(cx);
6777 let second = worktree_b.read(cx);
6778 project.move_worktree(third.id(), second.id(), cx)
6779 })
6780 .expect("moving third before second");
6781
6782 // check the state after moving
6783 project.update(cx, |project, cx| {
6784 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6785 assert_eq!(worktrees.len(), 3);
6786
6787 let first = worktrees[0].read(cx);
6788 let second = worktrees[1].read(cx);
6789 let third = worktrees[2].read(cx);
6790
6791 // check they are now in the right order
6792 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6793 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6794 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6795 });
6796
6797 // move the first worktree to after the third
6798 // [a, b, c] -> [b, c, a]
6799 project
6800 .update(cx, |project, cx| {
6801 let first = worktree_a.read(cx);
6802 let third = worktree_c.read(cx);
6803 project.move_worktree(first.id(), third.id(), cx)
6804 })
6805 .expect("moving first after third");
6806
6807 // check the state after moving
6808 project.update(cx, |project, cx| {
6809 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6810 assert_eq!(worktrees.len(), 3);
6811
6812 let first = worktrees[0].read(cx);
6813 let second = worktrees[1].read(cx);
6814 let third = worktrees[2].read(cx);
6815
6816 // check they are now in the right order
6817 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6818 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6819 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6820 });
6821
6822 // move the third worktree to before the first
6823 // [b, c, a] -> [a, b, c]
6824 project
6825 .update(cx, |project, cx| {
6826 let third = worktree_a.read(cx);
6827 let first = worktree_b.read(cx);
6828 project.move_worktree(third.id(), first.id(), cx)
6829 })
6830 .expect("moving third before first");
6831
6832 // check the state after moving
6833 project.update(cx, |project, cx| {
6834 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6835 assert_eq!(worktrees.len(), 3);
6836
6837 let first = worktrees[0].read(cx);
6838 let second = worktrees[1].read(cx);
6839 let third = worktrees[2].read(cx);
6840
6841 // check they are now in the right order
6842 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6843 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6844 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6845 });
6846}
6847
6848#[gpui::test]
6849async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6850 init_test(cx);
6851
6852 let staged_contents = r#"
6853 fn main() {
6854 println!("hello world");
6855 }
6856 "#
6857 .unindent();
6858 let file_contents = r#"
6859 // print goodbye
6860 fn main() {
6861 println!("goodbye world");
6862 }
6863 "#
6864 .unindent();
6865
6866 let fs = FakeFs::new(cx.background_executor.clone());
6867 fs.insert_tree(
6868 "/dir",
6869 json!({
6870 ".git": {},
6871 "src": {
6872 "main.rs": file_contents,
6873 }
6874 }),
6875 )
6876 .await;
6877
6878 fs.set_index_for_repo(
6879 Path::new("/dir/.git"),
6880 &[("src/main.rs".into(), staged_contents)],
6881 );
6882
6883 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6884
6885 let buffer = project
6886 .update(cx, |project, cx| {
6887 project.open_local_buffer("/dir/src/main.rs", cx)
6888 })
6889 .await
6890 .unwrap();
6891 let unstaged_diff = project
6892 .update(cx, |project, cx| {
6893 project.open_unstaged_diff(buffer.clone(), cx)
6894 })
6895 .await
6896 .unwrap();
6897
6898 cx.run_until_parked();
6899 unstaged_diff.update(cx, |unstaged_diff, cx| {
6900 let snapshot = buffer.read(cx).snapshot();
6901 assert_hunks(
6902 unstaged_diff.hunks(&snapshot, cx),
6903 &snapshot,
6904 &unstaged_diff.base_text_string().unwrap(),
6905 &[
6906 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6907 (
6908 2..3,
6909 " println!(\"hello world\");\n",
6910 " println!(\"goodbye world\");\n",
6911 DiffHunkStatus::modified_none(),
6912 ),
6913 ],
6914 );
6915 });
6916
6917 let staged_contents = r#"
6918 // print goodbye
6919 fn main() {
6920 }
6921 "#
6922 .unindent();
6923
6924 fs.set_index_for_repo(
6925 Path::new("/dir/.git"),
6926 &[("src/main.rs".into(), staged_contents)],
6927 );
6928
6929 cx.run_until_parked();
6930 unstaged_diff.update(cx, |unstaged_diff, cx| {
6931 let snapshot = buffer.read(cx).snapshot();
6932 assert_hunks(
6933 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6934 &snapshot,
6935 &unstaged_diff.base_text().text(),
6936 &[(
6937 2..3,
6938 "",
6939 " println!(\"goodbye world\");\n",
6940 DiffHunkStatus::added_none(),
6941 )],
6942 );
6943 });
6944}
6945
6946#[gpui::test]
6947async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6948 init_test(cx);
6949
6950 let committed_contents = r#"
6951 fn main() {
6952 println!("hello world");
6953 }
6954 "#
6955 .unindent();
6956 let staged_contents = r#"
6957 fn main() {
6958 println!("goodbye world");
6959 }
6960 "#
6961 .unindent();
6962 let file_contents = r#"
6963 // print goodbye
6964 fn main() {
6965 println!("goodbye world");
6966 }
6967 "#
6968 .unindent();
6969
6970 let fs = FakeFs::new(cx.background_executor.clone());
6971 fs.insert_tree(
6972 "/dir",
6973 json!({
6974 ".git": {},
6975 "src": {
6976 "modification.rs": file_contents,
6977 }
6978 }),
6979 )
6980 .await;
6981
6982 fs.set_head_for_repo(
6983 Path::new("/dir/.git"),
6984 &[
6985 ("src/modification.rs".into(), committed_contents),
6986 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6987 ],
6988 "deadbeef",
6989 );
6990 fs.set_index_for_repo(
6991 Path::new("/dir/.git"),
6992 &[
6993 ("src/modification.rs".into(), staged_contents),
6994 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6995 ],
6996 );
6997
6998 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6999 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7000 let language = rust_lang();
7001 language_registry.add(language.clone());
7002
7003 let buffer_1 = project
7004 .update(cx, |project, cx| {
7005 project.open_local_buffer("/dir/src/modification.rs", cx)
7006 })
7007 .await
7008 .unwrap();
7009 let diff_1 = project
7010 .update(cx, |project, cx| {
7011 project.open_uncommitted_diff(buffer_1.clone(), cx)
7012 })
7013 .await
7014 .unwrap();
7015 diff_1.read_with(cx, |diff, _| {
7016 assert_eq!(diff.base_text().language().cloned(), Some(language))
7017 });
7018 cx.run_until_parked();
7019 diff_1.update(cx, |diff, cx| {
7020 let snapshot = buffer_1.read(cx).snapshot();
7021 assert_hunks(
7022 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7023 &snapshot,
7024 &diff.base_text_string().unwrap(),
7025 &[
7026 (
7027 0..1,
7028 "",
7029 "// print goodbye\n",
7030 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7031 ),
7032 (
7033 2..3,
7034 " println!(\"hello world\");\n",
7035 " println!(\"goodbye world\");\n",
7036 DiffHunkStatus::modified_none(),
7037 ),
7038 ],
7039 );
7040 });
7041
7042 // Reset HEAD to a version that differs from both the buffer and the index.
7043 let committed_contents = r#"
7044 // print goodbye
7045 fn main() {
7046 }
7047 "#
7048 .unindent();
7049 fs.set_head_for_repo(
7050 Path::new("/dir/.git"),
7051 &[
7052 ("src/modification.rs".into(), committed_contents.clone()),
7053 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
7054 ],
7055 "deadbeef",
7056 );
7057
7058 // Buffer now has an unstaged hunk.
7059 cx.run_until_parked();
7060 diff_1.update(cx, |diff, cx| {
7061 let snapshot = buffer_1.read(cx).snapshot();
7062 assert_hunks(
7063 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7064 &snapshot,
7065 &diff.base_text().text(),
7066 &[(
7067 2..3,
7068 "",
7069 " println!(\"goodbye world\");\n",
7070 DiffHunkStatus::added_none(),
7071 )],
7072 );
7073 });
7074
7075 // Open a buffer for a file that's been deleted.
7076 let buffer_2 = project
7077 .update(cx, |project, cx| {
7078 project.open_local_buffer("/dir/src/deletion.rs", cx)
7079 })
7080 .await
7081 .unwrap();
7082 let diff_2 = project
7083 .update(cx, |project, cx| {
7084 project.open_uncommitted_diff(buffer_2.clone(), cx)
7085 })
7086 .await
7087 .unwrap();
7088 cx.run_until_parked();
7089 diff_2.update(cx, |diff, cx| {
7090 let snapshot = buffer_2.read(cx).snapshot();
7091 assert_hunks(
7092 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7093 &snapshot,
7094 &diff.base_text_string().unwrap(),
7095 &[(
7096 0..0,
7097 "// the-deleted-contents\n",
7098 "",
7099 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7100 )],
7101 );
7102 });
7103
7104 // Stage the deletion of this file
7105 fs.set_index_for_repo(
7106 Path::new("/dir/.git"),
7107 &[("src/modification.rs".into(), committed_contents.clone())],
7108 );
7109 cx.run_until_parked();
7110 diff_2.update(cx, |diff, cx| {
7111 let snapshot = buffer_2.read(cx).snapshot();
7112 assert_hunks(
7113 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7114 &snapshot,
7115 &diff.base_text_string().unwrap(),
7116 &[(
7117 0..0,
7118 "// the-deleted-contents\n",
7119 "",
7120 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7121 )],
7122 );
7123 });
7124}
7125
7126#[gpui::test]
7127async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7128 use DiffHunkSecondaryStatus::*;
7129 init_test(cx);
7130
7131 let committed_contents = r#"
7132 zero
7133 one
7134 two
7135 three
7136 four
7137 five
7138 "#
7139 .unindent();
7140 let file_contents = r#"
7141 one
7142 TWO
7143 three
7144 FOUR
7145 five
7146 "#
7147 .unindent();
7148
7149 let fs = FakeFs::new(cx.background_executor.clone());
7150 fs.insert_tree(
7151 "/dir",
7152 json!({
7153 ".git": {},
7154 "file.txt": file_contents.clone()
7155 }),
7156 )
7157 .await;
7158
7159 fs.set_head_and_index_for_repo(
7160 "/dir/.git".as_ref(),
7161 &[("file.txt".into(), committed_contents.clone())],
7162 );
7163
7164 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7165
7166 let buffer = project
7167 .update(cx, |project, cx| {
7168 project.open_local_buffer("/dir/file.txt", cx)
7169 })
7170 .await
7171 .unwrap();
7172 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7173 let uncommitted_diff = project
7174 .update(cx, |project, cx| {
7175 project.open_uncommitted_diff(buffer.clone(), cx)
7176 })
7177 .await
7178 .unwrap();
7179 let mut diff_events = cx.events(&uncommitted_diff);
7180
7181 // The hunks are initially unstaged.
7182 uncommitted_diff.read_with(cx, |diff, cx| {
7183 assert_hunks(
7184 diff.hunks(&snapshot, cx),
7185 &snapshot,
7186 &diff.base_text_string().unwrap(),
7187 &[
7188 (
7189 0..0,
7190 "zero\n",
7191 "",
7192 DiffHunkStatus::deleted(HasSecondaryHunk),
7193 ),
7194 (
7195 1..2,
7196 "two\n",
7197 "TWO\n",
7198 DiffHunkStatus::modified(HasSecondaryHunk),
7199 ),
7200 (
7201 3..4,
7202 "four\n",
7203 "FOUR\n",
7204 DiffHunkStatus::modified(HasSecondaryHunk),
7205 ),
7206 ],
7207 );
7208 });
7209
7210 // Stage a hunk. It appears as optimistically staged.
7211 uncommitted_diff.update(cx, |diff, cx| {
7212 let range =
7213 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7214 let hunks = diff
7215 .hunks_intersecting_range(range, &snapshot, cx)
7216 .collect::<Vec<_>>();
7217 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7218
7219 assert_hunks(
7220 diff.hunks(&snapshot, cx),
7221 &snapshot,
7222 &diff.base_text_string().unwrap(),
7223 &[
7224 (
7225 0..0,
7226 "zero\n",
7227 "",
7228 DiffHunkStatus::deleted(HasSecondaryHunk),
7229 ),
7230 (
7231 1..2,
7232 "two\n",
7233 "TWO\n",
7234 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7235 ),
7236 (
7237 3..4,
7238 "four\n",
7239 "FOUR\n",
7240 DiffHunkStatus::modified(HasSecondaryHunk),
7241 ),
7242 ],
7243 );
7244 });
7245
7246 // The diff emits a change event for the range of the staged hunk.
7247 assert!(matches!(
7248 diff_events.next().await.unwrap(),
7249 BufferDiffEvent::HunksStagedOrUnstaged(_)
7250 ));
7251 let event = diff_events.next().await.unwrap();
7252 if let BufferDiffEvent::DiffChanged {
7253 changed_range: Some(changed_range),
7254 } = event
7255 {
7256 let changed_range = changed_range.to_point(&snapshot);
7257 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7258 } else {
7259 panic!("Unexpected event {event:?}");
7260 }
7261
7262 // When the write to the index completes, it appears as staged.
7263 cx.run_until_parked();
7264 uncommitted_diff.update(cx, |diff, cx| {
7265 assert_hunks(
7266 diff.hunks(&snapshot, cx),
7267 &snapshot,
7268 &diff.base_text_string().unwrap(),
7269 &[
7270 (
7271 0..0,
7272 "zero\n",
7273 "",
7274 DiffHunkStatus::deleted(HasSecondaryHunk),
7275 ),
7276 (
7277 1..2,
7278 "two\n",
7279 "TWO\n",
7280 DiffHunkStatus::modified(NoSecondaryHunk),
7281 ),
7282 (
7283 3..4,
7284 "four\n",
7285 "FOUR\n",
7286 DiffHunkStatus::modified(HasSecondaryHunk),
7287 ),
7288 ],
7289 );
7290 });
7291
7292 // The diff emits a change event for the changed index text.
7293 let event = diff_events.next().await.unwrap();
7294 if let BufferDiffEvent::DiffChanged {
7295 changed_range: Some(changed_range),
7296 } = event
7297 {
7298 let changed_range = changed_range.to_point(&snapshot);
7299 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7300 } else {
7301 panic!("Unexpected event {event:?}");
7302 }
7303
7304 // Simulate a problem writing to the git index.
7305 fs.set_error_message_for_index_write(
7306 "/dir/.git".as_ref(),
7307 Some("failed to write git index".into()),
7308 );
7309
7310 // Stage another hunk.
7311 uncommitted_diff.update(cx, |diff, cx| {
7312 let range =
7313 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7314 let hunks = diff
7315 .hunks_intersecting_range(range, &snapshot, cx)
7316 .collect::<Vec<_>>();
7317 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7318
7319 assert_hunks(
7320 diff.hunks(&snapshot, cx),
7321 &snapshot,
7322 &diff.base_text_string().unwrap(),
7323 &[
7324 (
7325 0..0,
7326 "zero\n",
7327 "",
7328 DiffHunkStatus::deleted(HasSecondaryHunk),
7329 ),
7330 (
7331 1..2,
7332 "two\n",
7333 "TWO\n",
7334 DiffHunkStatus::modified(NoSecondaryHunk),
7335 ),
7336 (
7337 3..4,
7338 "four\n",
7339 "FOUR\n",
7340 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7341 ),
7342 ],
7343 );
7344 });
7345 assert!(matches!(
7346 diff_events.next().await.unwrap(),
7347 BufferDiffEvent::HunksStagedOrUnstaged(_)
7348 ));
7349 let event = diff_events.next().await.unwrap();
7350 if let BufferDiffEvent::DiffChanged {
7351 changed_range: Some(changed_range),
7352 } = event
7353 {
7354 let changed_range = changed_range.to_point(&snapshot);
7355 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7356 } else {
7357 panic!("Unexpected event {event:?}");
7358 }
7359
7360 // When the write fails, the hunk returns to being unstaged.
7361 cx.run_until_parked();
7362 uncommitted_diff.update(cx, |diff, cx| {
7363 assert_hunks(
7364 diff.hunks(&snapshot, cx),
7365 &snapshot,
7366 &diff.base_text_string().unwrap(),
7367 &[
7368 (
7369 0..0,
7370 "zero\n",
7371 "",
7372 DiffHunkStatus::deleted(HasSecondaryHunk),
7373 ),
7374 (
7375 1..2,
7376 "two\n",
7377 "TWO\n",
7378 DiffHunkStatus::modified(NoSecondaryHunk),
7379 ),
7380 (
7381 3..4,
7382 "four\n",
7383 "FOUR\n",
7384 DiffHunkStatus::modified(HasSecondaryHunk),
7385 ),
7386 ],
7387 );
7388 });
7389
7390 let event = diff_events.next().await.unwrap();
7391 if let BufferDiffEvent::DiffChanged {
7392 changed_range: Some(changed_range),
7393 } = event
7394 {
7395 let changed_range = changed_range.to_point(&snapshot);
7396 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7397 } else {
7398 panic!("Unexpected event {event:?}");
7399 }
7400
7401 // Allow writing to the git index to succeed again.
7402 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7403
7404 // Stage two hunks with separate operations.
7405 uncommitted_diff.update(cx, |diff, cx| {
7406 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7407 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7408 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7409 });
7410
7411 // Both staged hunks appear as pending.
7412 uncommitted_diff.update(cx, |diff, cx| {
7413 assert_hunks(
7414 diff.hunks(&snapshot, cx),
7415 &snapshot,
7416 &diff.base_text_string().unwrap(),
7417 &[
7418 (
7419 0..0,
7420 "zero\n",
7421 "",
7422 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7423 ),
7424 (
7425 1..2,
7426 "two\n",
7427 "TWO\n",
7428 DiffHunkStatus::modified(NoSecondaryHunk),
7429 ),
7430 (
7431 3..4,
7432 "four\n",
7433 "FOUR\n",
7434 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7435 ),
7436 ],
7437 );
7438 });
7439
7440 // Both staging operations take effect.
7441 cx.run_until_parked();
7442 uncommitted_diff.update(cx, |diff, cx| {
7443 assert_hunks(
7444 diff.hunks(&snapshot, cx),
7445 &snapshot,
7446 &diff.base_text_string().unwrap(),
7447 &[
7448 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7449 (
7450 1..2,
7451 "two\n",
7452 "TWO\n",
7453 DiffHunkStatus::modified(NoSecondaryHunk),
7454 ),
7455 (
7456 3..4,
7457 "four\n",
7458 "FOUR\n",
7459 DiffHunkStatus::modified(NoSecondaryHunk),
7460 ),
7461 ],
7462 );
7463 });
7464}
7465
7466#[gpui::test(seeds(340, 472))]
7467async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7468 use DiffHunkSecondaryStatus::*;
7469 init_test(cx);
7470
7471 let committed_contents = r#"
7472 zero
7473 one
7474 two
7475 three
7476 four
7477 five
7478 "#
7479 .unindent();
7480 let file_contents = r#"
7481 one
7482 TWO
7483 three
7484 FOUR
7485 five
7486 "#
7487 .unindent();
7488
7489 let fs = FakeFs::new(cx.background_executor.clone());
7490 fs.insert_tree(
7491 "/dir",
7492 json!({
7493 ".git": {},
7494 "file.txt": file_contents.clone()
7495 }),
7496 )
7497 .await;
7498
7499 fs.set_head_for_repo(
7500 "/dir/.git".as_ref(),
7501 &[("file.txt".into(), committed_contents.clone())],
7502 "deadbeef",
7503 );
7504 fs.set_index_for_repo(
7505 "/dir/.git".as_ref(),
7506 &[("file.txt".into(), committed_contents.clone())],
7507 );
7508
7509 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7510
7511 let buffer = project
7512 .update(cx, |project, cx| {
7513 project.open_local_buffer("/dir/file.txt", cx)
7514 })
7515 .await
7516 .unwrap();
7517 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7518 let uncommitted_diff = project
7519 .update(cx, |project, cx| {
7520 project.open_uncommitted_diff(buffer.clone(), cx)
7521 })
7522 .await
7523 .unwrap();
7524
7525 // The hunks are initially unstaged.
7526 uncommitted_diff.read_with(cx, |diff, cx| {
7527 assert_hunks(
7528 diff.hunks(&snapshot, cx),
7529 &snapshot,
7530 &diff.base_text_string().unwrap(),
7531 &[
7532 (
7533 0..0,
7534 "zero\n",
7535 "",
7536 DiffHunkStatus::deleted(HasSecondaryHunk),
7537 ),
7538 (
7539 1..2,
7540 "two\n",
7541 "TWO\n",
7542 DiffHunkStatus::modified(HasSecondaryHunk),
7543 ),
7544 (
7545 3..4,
7546 "four\n",
7547 "FOUR\n",
7548 DiffHunkStatus::modified(HasSecondaryHunk),
7549 ),
7550 ],
7551 );
7552 });
7553
7554 // Pause IO events
7555 fs.pause_events();
7556
7557 // Stage the first hunk.
7558 uncommitted_diff.update(cx, |diff, cx| {
7559 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7560 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7561 assert_hunks(
7562 diff.hunks(&snapshot, cx),
7563 &snapshot,
7564 &diff.base_text_string().unwrap(),
7565 &[
7566 (
7567 0..0,
7568 "zero\n",
7569 "",
7570 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7571 ),
7572 (
7573 1..2,
7574 "two\n",
7575 "TWO\n",
7576 DiffHunkStatus::modified(HasSecondaryHunk),
7577 ),
7578 (
7579 3..4,
7580 "four\n",
7581 "FOUR\n",
7582 DiffHunkStatus::modified(HasSecondaryHunk),
7583 ),
7584 ],
7585 );
7586 });
7587
7588 // Stage the second hunk *before* receiving the FS event for the first hunk.
7589 cx.run_until_parked();
7590 uncommitted_diff.update(cx, |diff, cx| {
7591 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7592 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7593 assert_hunks(
7594 diff.hunks(&snapshot, cx),
7595 &snapshot,
7596 &diff.base_text_string().unwrap(),
7597 &[
7598 (
7599 0..0,
7600 "zero\n",
7601 "",
7602 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7603 ),
7604 (
7605 1..2,
7606 "two\n",
7607 "TWO\n",
7608 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7609 ),
7610 (
7611 3..4,
7612 "four\n",
7613 "FOUR\n",
7614 DiffHunkStatus::modified(HasSecondaryHunk),
7615 ),
7616 ],
7617 );
7618 });
7619
7620 // Process the FS event for staging the first hunk (second event is still pending).
7621 fs.flush_events(1);
7622 cx.run_until_parked();
7623
7624 // Stage the third hunk before receiving the second FS event.
7625 uncommitted_diff.update(cx, |diff, cx| {
7626 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7627 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7628 });
7629
7630 // Wait for all remaining IO.
7631 cx.run_until_parked();
7632 fs.flush_events(fs.buffered_event_count());
7633
7634 // Now all hunks are staged.
7635 cx.run_until_parked();
7636 uncommitted_diff.update(cx, |diff, cx| {
7637 assert_hunks(
7638 diff.hunks(&snapshot, cx),
7639 &snapshot,
7640 &diff.base_text_string().unwrap(),
7641 &[
7642 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7643 (
7644 1..2,
7645 "two\n",
7646 "TWO\n",
7647 DiffHunkStatus::modified(NoSecondaryHunk),
7648 ),
7649 (
7650 3..4,
7651 "four\n",
7652 "FOUR\n",
7653 DiffHunkStatus::modified(NoSecondaryHunk),
7654 ),
7655 ],
7656 );
7657 });
7658}
7659
7660#[gpui::test(iterations = 25)]
7661async fn test_staging_random_hunks(
7662 mut rng: StdRng,
7663 executor: BackgroundExecutor,
7664 cx: &mut gpui::TestAppContext,
7665) {
7666 let operations = env::var("OPERATIONS")
7667 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7668 .unwrap_or(20);
7669
7670 // Try to induce races between diff recalculation and index writes.
7671 if rng.random_bool(0.5) {
7672 executor.deprioritize(*CALCULATE_DIFF_TASK);
7673 }
7674
7675 use DiffHunkSecondaryStatus::*;
7676 init_test(cx);
7677
7678 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7679 let index_text = committed_text.clone();
7680 let buffer_text = (0..30)
7681 .map(|i| match i % 5 {
7682 0 => format!("line {i} (modified)\n"),
7683 _ => format!("line {i}\n"),
7684 })
7685 .collect::<String>();
7686
7687 let fs = FakeFs::new(cx.background_executor.clone());
7688 fs.insert_tree(
7689 path!("/dir"),
7690 json!({
7691 ".git": {},
7692 "file.txt": buffer_text.clone()
7693 }),
7694 )
7695 .await;
7696 fs.set_head_for_repo(
7697 path!("/dir/.git").as_ref(),
7698 &[("file.txt".into(), committed_text.clone())],
7699 "deadbeef",
7700 );
7701 fs.set_index_for_repo(
7702 path!("/dir/.git").as_ref(),
7703 &[("file.txt".into(), index_text.clone())],
7704 );
7705 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7706
7707 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7708 let buffer = project
7709 .update(cx, |project, cx| {
7710 project.open_local_buffer(path!("/dir/file.txt"), cx)
7711 })
7712 .await
7713 .unwrap();
7714 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7715 let uncommitted_diff = project
7716 .update(cx, |project, cx| {
7717 project.open_uncommitted_diff(buffer.clone(), cx)
7718 })
7719 .await
7720 .unwrap();
7721
7722 let mut hunks =
7723 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7724 assert_eq!(hunks.len(), 6);
7725
7726 for _i in 0..operations {
7727 let hunk_ix = rng.random_range(0..hunks.len());
7728 let hunk = &mut hunks[hunk_ix];
7729 let row = hunk.range.start.row;
7730
7731 if hunk.status().has_secondary_hunk() {
7732 log::info!("staging hunk at {row}");
7733 uncommitted_diff.update(cx, |diff, cx| {
7734 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7735 });
7736 hunk.secondary_status = SecondaryHunkRemovalPending;
7737 } else {
7738 log::info!("unstaging hunk at {row}");
7739 uncommitted_diff.update(cx, |diff, cx| {
7740 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7741 });
7742 hunk.secondary_status = SecondaryHunkAdditionPending;
7743 }
7744
7745 for _ in 0..rng.random_range(0..10) {
7746 log::info!("yielding");
7747 cx.executor().simulate_random_delay().await;
7748 }
7749 }
7750
7751 cx.executor().run_until_parked();
7752
7753 for hunk in &mut hunks {
7754 if hunk.secondary_status == SecondaryHunkRemovalPending {
7755 hunk.secondary_status = NoSecondaryHunk;
7756 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7757 hunk.secondary_status = HasSecondaryHunk;
7758 }
7759 }
7760
7761 log::info!(
7762 "index text:\n{}",
7763 repo.load_index_text("file.txt".into()).await.unwrap()
7764 );
7765
7766 uncommitted_diff.update(cx, |diff, cx| {
7767 let expected_hunks = hunks
7768 .iter()
7769 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7770 .collect::<Vec<_>>();
7771 let actual_hunks = diff
7772 .hunks(&snapshot, cx)
7773 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7774 .collect::<Vec<_>>();
7775 assert_eq!(actual_hunks, expected_hunks);
7776 });
7777}
7778
7779#[gpui::test]
7780async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7781 init_test(cx);
7782
7783 let committed_contents = r#"
7784 fn main() {
7785 println!("hello from HEAD");
7786 }
7787 "#
7788 .unindent();
7789 let file_contents = r#"
7790 fn main() {
7791 println!("hello from the working copy");
7792 }
7793 "#
7794 .unindent();
7795
7796 let fs = FakeFs::new(cx.background_executor.clone());
7797 fs.insert_tree(
7798 "/dir",
7799 json!({
7800 ".git": {},
7801 "src": {
7802 "main.rs": file_contents,
7803 }
7804 }),
7805 )
7806 .await;
7807
7808 fs.set_head_for_repo(
7809 Path::new("/dir/.git"),
7810 &[("src/main.rs".into(), committed_contents.clone())],
7811 "deadbeef",
7812 );
7813 fs.set_index_for_repo(
7814 Path::new("/dir/.git"),
7815 &[("src/main.rs".into(), committed_contents.clone())],
7816 );
7817
7818 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7819
7820 let buffer = project
7821 .update(cx, |project, cx| {
7822 project.open_local_buffer("/dir/src/main.rs", cx)
7823 })
7824 .await
7825 .unwrap();
7826 let uncommitted_diff = project
7827 .update(cx, |project, cx| {
7828 project.open_uncommitted_diff(buffer.clone(), cx)
7829 })
7830 .await
7831 .unwrap();
7832
7833 cx.run_until_parked();
7834 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7835 let snapshot = buffer.read(cx).snapshot();
7836 assert_hunks(
7837 uncommitted_diff.hunks(&snapshot, cx),
7838 &snapshot,
7839 &uncommitted_diff.base_text_string().unwrap(),
7840 &[(
7841 1..2,
7842 " println!(\"hello from HEAD\");\n",
7843 " println!(\"hello from the working copy\");\n",
7844 DiffHunkStatus {
7845 kind: DiffHunkStatusKind::Modified,
7846 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7847 },
7848 )],
7849 );
7850 });
7851}
7852
7853#[gpui::test]
7854async fn test_repository_and_path_for_project_path(
7855 background_executor: BackgroundExecutor,
7856 cx: &mut gpui::TestAppContext,
7857) {
7858 init_test(cx);
7859 let fs = FakeFs::new(background_executor);
7860 fs.insert_tree(
7861 path!("/root"),
7862 json!({
7863 "c.txt": "",
7864 "dir1": {
7865 ".git": {},
7866 "deps": {
7867 "dep1": {
7868 ".git": {},
7869 "src": {
7870 "a.txt": ""
7871 }
7872 }
7873 },
7874 "src": {
7875 "b.txt": ""
7876 }
7877 },
7878 }),
7879 )
7880 .await;
7881
7882 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7883 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7884 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7885 project
7886 .update(cx, |project, cx| project.git_scans_complete(cx))
7887 .await;
7888 cx.run_until_parked();
7889
7890 project.read_with(cx, |project, cx| {
7891 let git_store = project.git_store().read(cx);
7892 let pairs = [
7893 ("c.txt", None),
7894 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7895 (
7896 "dir1/deps/dep1/src/a.txt",
7897 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7898 ),
7899 ];
7900 let expected = pairs
7901 .iter()
7902 .map(|(path, result)| {
7903 (
7904 path,
7905 result.map(|(repo, repo_path)| {
7906 (Path::new(repo).into(), RepoPath::from(repo_path))
7907 }),
7908 )
7909 })
7910 .collect::<Vec<_>>();
7911 let actual = pairs
7912 .iter()
7913 .map(|(path, _)| {
7914 let project_path = (tree_id, Path::new(path)).into();
7915 let result = maybe!({
7916 let (repo, repo_path) =
7917 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7918 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7919 });
7920 (path, result)
7921 })
7922 .collect::<Vec<_>>();
7923 pretty_assertions::assert_eq!(expected, actual);
7924 });
7925
7926 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7927 .await
7928 .unwrap();
7929 cx.run_until_parked();
7930
7931 project.read_with(cx, |project, cx| {
7932 let git_store = project.git_store().read(cx);
7933 assert_eq!(
7934 git_store.repository_and_path_for_project_path(
7935 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7936 cx
7937 ),
7938 None
7939 );
7940 });
7941}
7942
7943#[gpui::test]
7944async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7945 init_test(cx);
7946 let fs = FakeFs::new(cx.background_executor.clone());
7947 let home = paths::home_dir();
7948 fs.insert_tree(
7949 home,
7950 json!({
7951 ".git": {},
7952 "project": {
7953 "a.txt": "A"
7954 },
7955 }),
7956 )
7957 .await;
7958
7959 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
7960 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7961 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7962
7963 project
7964 .update(cx, |project, cx| project.git_scans_complete(cx))
7965 .await;
7966 tree.flush_fs_events(cx).await;
7967
7968 project.read_with(cx, |project, cx| {
7969 let containing = project
7970 .git_store()
7971 .read(cx)
7972 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7973 assert!(containing.is_none());
7974 });
7975
7976 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
7977 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7978 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7979 project
7980 .update(cx, |project, cx| project.git_scans_complete(cx))
7981 .await;
7982 tree.flush_fs_events(cx).await;
7983
7984 project.read_with(cx, |project, cx| {
7985 let containing = project
7986 .git_store()
7987 .read(cx)
7988 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7989 assert_eq!(
7990 containing
7991 .unwrap()
7992 .0
7993 .read(cx)
7994 .work_directory_abs_path
7995 .as_ref(),
7996 home,
7997 );
7998 });
7999}
8000
8001#[gpui::test]
8002async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8003 init_test(cx);
8004 cx.executor().allow_parking();
8005
8006 let root = TempTree::new(json!({
8007 "project": {
8008 "a.txt": "a", // Modified
8009 "b.txt": "bb", // Added
8010 "c.txt": "ccc", // Unchanged
8011 "d.txt": "dddd", // Deleted
8012 },
8013 }));
8014
8015 // Set up git repository before creating the project.
8016 let work_dir = root.path().join("project");
8017 let repo = git_init(work_dir.as_path());
8018 git_add("a.txt", &repo);
8019 git_add("c.txt", &repo);
8020 git_add("d.txt", &repo);
8021 git_commit("Initial commit", &repo);
8022 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8023 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8024
8025 let project = Project::test(
8026 Arc::new(RealFs::new(None, cx.executor())),
8027 [root.path()],
8028 cx,
8029 )
8030 .await;
8031
8032 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8033 tree.flush_fs_events(cx).await;
8034 project
8035 .update(cx, |project, cx| project.git_scans_complete(cx))
8036 .await;
8037 cx.executor().run_until_parked();
8038
8039 let repository = project.read_with(cx, |project, cx| {
8040 project.repositories(cx).values().next().unwrap().clone()
8041 });
8042
8043 // Check that the right git state is observed on startup
8044 repository.read_with(cx, |repository, _| {
8045 let entries = repository.cached_status().collect::<Vec<_>>();
8046 assert_eq!(
8047 entries,
8048 [
8049 StatusEntry {
8050 repo_path: "a.txt".into(),
8051 status: StatusCode::Modified.worktree(),
8052 },
8053 StatusEntry {
8054 repo_path: "b.txt".into(),
8055 status: FileStatus::Untracked,
8056 },
8057 StatusEntry {
8058 repo_path: "d.txt".into(),
8059 status: StatusCode::Deleted.worktree(),
8060 },
8061 ]
8062 );
8063 });
8064
8065 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8066
8067 tree.flush_fs_events(cx).await;
8068 project
8069 .update(cx, |project, cx| project.git_scans_complete(cx))
8070 .await;
8071 cx.executor().run_until_parked();
8072
8073 repository.read_with(cx, |repository, _| {
8074 let entries = repository.cached_status().collect::<Vec<_>>();
8075 assert_eq!(
8076 entries,
8077 [
8078 StatusEntry {
8079 repo_path: "a.txt".into(),
8080 status: StatusCode::Modified.worktree(),
8081 },
8082 StatusEntry {
8083 repo_path: "b.txt".into(),
8084 status: FileStatus::Untracked,
8085 },
8086 StatusEntry {
8087 repo_path: "c.txt".into(),
8088 status: StatusCode::Modified.worktree(),
8089 },
8090 StatusEntry {
8091 repo_path: "d.txt".into(),
8092 status: StatusCode::Deleted.worktree(),
8093 },
8094 ]
8095 );
8096 });
8097
8098 git_add("a.txt", &repo);
8099 git_add("c.txt", &repo);
8100 git_remove_index(Path::new("d.txt"), &repo);
8101 git_commit("Another commit", &repo);
8102 tree.flush_fs_events(cx).await;
8103 project
8104 .update(cx, |project, cx| project.git_scans_complete(cx))
8105 .await;
8106 cx.executor().run_until_parked();
8107
8108 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8109 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8110 tree.flush_fs_events(cx).await;
8111 project
8112 .update(cx, |project, cx| project.git_scans_complete(cx))
8113 .await;
8114 cx.executor().run_until_parked();
8115
8116 repository.read_with(cx, |repository, _cx| {
8117 let entries = repository.cached_status().collect::<Vec<_>>();
8118
8119 // Deleting an untracked entry, b.txt, should leave no status
8120 // a.txt was tracked, and so should have a status
8121 assert_eq!(
8122 entries,
8123 [StatusEntry {
8124 repo_path: "a.txt".into(),
8125 status: StatusCode::Deleted.worktree(),
8126 }]
8127 );
8128 });
8129}
8130
8131#[gpui::test]
8132async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8133 init_test(cx);
8134 cx.executor().allow_parking();
8135
8136 let root = TempTree::new(json!({
8137 "project": {
8138 "sub": {},
8139 "a.txt": "",
8140 },
8141 }));
8142
8143 let work_dir = root.path().join("project");
8144 let repo = git_init(work_dir.as_path());
8145 // a.txt exists in HEAD and the working copy but is deleted in the index.
8146 git_add("a.txt", &repo);
8147 git_commit("Initial commit", &repo);
8148 git_remove_index("a.txt".as_ref(), &repo);
8149 // `sub` is a nested git repository.
8150 let _sub = git_init(&work_dir.join("sub"));
8151
8152 let project = Project::test(
8153 Arc::new(RealFs::new(None, cx.executor())),
8154 [root.path()],
8155 cx,
8156 )
8157 .await;
8158
8159 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8160 tree.flush_fs_events(cx).await;
8161 project
8162 .update(cx, |project, cx| project.git_scans_complete(cx))
8163 .await;
8164 cx.executor().run_until_parked();
8165
8166 let repository = project.read_with(cx, |project, cx| {
8167 project
8168 .repositories(cx)
8169 .values()
8170 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8171 .unwrap()
8172 .clone()
8173 });
8174
8175 repository.read_with(cx, |repository, _cx| {
8176 let entries = repository.cached_status().collect::<Vec<_>>();
8177
8178 // `sub` doesn't appear in our computed statuses.
8179 // a.txt appears with a combined `DA` status.
8180 assert_eq!(
8181 entries,
8182 [StatusEntry {
8183 repo_path: "a.txt".into(),
8184 status: TrackedStatus {
8185 index_status: StatusCode::Deleted,
8186 worktree_status: StatusCode::Added
8187 }
8188 .into(),
8189 }]
8190 )
8191 });
8192}
8193
8194#[gpui::test]
8195async fn test_repository_subfolder_git_status(
8196 executor: gpui::BackgroundExecutor,
8197 cx: &mut gpui::TestAppContext,
8198) {
8199 init_test(cx);
8200
8201 let fs = FakeFs::new(executor);
8202 fs.insert_tree(
8203 path!("/root"),
8204 json!({
8205 "my-repo": {
8206 ".git": {},
8207 "a.txt": "a",
8208 "sub-folder-1": {
8209 "sub-folder-2": {
8210 "c.txt": "cc",
8211 "d": {
8212 "e.txt": "eee"
8213 }
8214 },
8215 }
8216 },
8217 }),
8218 )
8219 .await;
8220
8221 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8222 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8223
8224 fs.set_status_for_repo(
8225 path!("/root/my-repo/.git").as_ref(),
8226 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8227 );
8228
8229 let project = Project::test(
8230 fs.clone(),
8231 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8232 cx,
8233 )
8234 .await;
8235
8236 project
8237 .update(cx, |project, cx| project.git_scans_complete(cx))
8238 .await;
8239 cx.run_until_parked();
8240
8241 let repository = project.read_with(cx, |project, cx| {
8242 project.repositories(cx).values().next().unwrap().clone()
8243 });
8244
8245 // Ensure that the git status is loaded correctly
8246 repository.read_with(cx, |repository, _cx| {
8247 assert_eq!(
8248 repository.work_directory_abs_path,
8249 Path::new(path!("/root/my-repo")).into()
8250 );
8251
8252 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8253 assert_eq!(
8254 repository.status_for_path(&E_TXT.into()).unwrap().status,
8255 FileStatus::Untracked
8256 );
8257 });
8258
8259 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8260 project
8261 .update(cx, |project, cx| project.git_scans_complete(cx))
8262 .await;
8263 cx.run_until_parked();
8264
8265 repository.read_with(cx, |repository, _cx| {
8266 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8267 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8268 });
8269}
8270
8271// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8272#[cfg(any())]
8273#[gpui::test]
8274async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8275 init_test(cx);
8276 cx.executor().allow_parking();
8277
8278 let root = TempTree::new(json!({
8279 "project": {
8280 "a.txt": "a",
8281 },
8282 }));
8283 let root_path = root.path();
8284
8285 let repo = git_init(&root_path.join("project"));
8286 git_add("a.txt", &repo);
8287 git_commit("init", &repo);
8288
8289 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8290
8291 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8292 tree.flush_fs_events(cx).await;
8293 project
8294 .update(cx, |project, cx| project.git_scans_complete(cx))
8295 .await;
8296 cx.executor().run_until_parked();
8297
8298 let repository = project.read_with(cx, |project, cx| {
8299 project.repositories(cx).values().next().unwrap().clone()
8300 });
8301
8302 git_branch("other-branch", &repo);
8303 git_checkout("refs/heads/other-branch", &repo);
8304 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8305 git_add("a.txt", &repo);
8306 git_commit("capitalize", &repo);
8307 let commit = repo
8308 .head()
8309 .expect("Failed to get HEAD")
8310 .peel_to_commit()
8311 .expect("HEAD is not a commit");
8312 git_checkout("refs/heads/main", &repo);
8313 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8314 git_add("a.txt", &repo);
8315 git_commit("improve letter", &repo);
8316 git_cherry_pick(&commit, &repo);
8317 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8318 .expect("No CHERRY_PICK_HEAD");
8319 pretty_assertions::assert_eq!(
8320 git_status(&repo),
8321 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8322 );
8323 tree.flush_fs_events(cx).await;
8324 project
8325 .update(cx, |project, cx| project.git_scans_complete(cx))
8326 .await;
8327 cx.executor().run_until_parked();
8328 let conflicts = repository.update(cx, |repository, _| {
8329 repository
8330 .merge_conflicts
8331 .iter()
8332 .cloned()
8333 .collect::<Vec<_>>()
8334 });
8335 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8336
8337 git_add("a.txt", &repo);
8338 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8339 git_commit("whatevs", &repo);
8340 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8341 .expect("Failed to remove CHERRY_PICK_HEAD");
8342 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8343 tree.flush_fs_events(cx).await;
8344 let conflicts = repository.update(cx, |repository, _| {
8345 repository
8346 .merge_conflicts
8347 .iter()
8348 .cloned()
8349 .collect::<Vec<_>>()
8350 });
8351 pretty_assertions::assert_eq!(conflicts, []);
8352}
8353
8354#[gpui::test]
8355async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8356 init_test(cx);
8357 let fs = FakeFs::new(cx.background_executor.clone());
8358 fs.insert_tree(
8359 path!("/root"),
8360 json!({
8361 ".git": {},
8362 ".gitignore": "*.txt\n",
8363 "a.xml": "<a></a>",
8364 "b.txt": "Some text"
8365 }),
8366 )
8367 .await;
8368
8369 fs.set_head_and_index_for_repo(
8370 path!("/root/.git").as_ref(),
8371 &[
8372 (".gitignore".into(), "*.txt\n".into()),
8373 ("a.xml".into(), "<a></a>".into()),
8374 ],
8375 );
8376
8377 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8378
8379 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8380 tree.flush_fs_events(cx).await;
8381 project
8382 .update(cx, |project, cx| project.git_scans_complete(cx))
8383 .await;
8384 cx.executor().run_until_parked();
8385
8386 let repository = project.read_with(cx, |project, cx| {
8387 project.repositories(cx).values().next().unwrap().clone()
8388 });
8389
8390 // One file is unmodified, the other is ignored.
8391 cx.read(|cx| {
8392 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8393 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8394 });
8395
8396 // Change the gitignore, and stage the newly non-ignored file.
8397 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8398 .await
8399 .unwrap();
8400 fs.set_index_for_repo(
8401 Path::new(path!("/root/.git")),
8402 &[
8403 (".gitignore".into(), "*.txt\n".into()),
8404 ("a.xml".into(), "<a></a>".into()),
8405 ("b.txt".into(), "Some text".into()),
8406 ],
8407 );
8408
8409 cx.executor().run_until_parked();
8410 cx.read(|cx| {
8411 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8412 assert_entry_git_state(
8413 tree.read(cx),
8414 repository.read(cx),
8415 "b.txt",
8416 Some(StatusCode::Added),
8417 false,
8418 );
8419 });
8420}
8421
8422// NOTE:
8423// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8424// a directory which some program has already open.
8425// This is a limitation of the Windows.
8426// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8427#[gpui::test]
8428#[cfg_attr(target_os = "windows", ignore)]
8429async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8430 init_test(cx);
8431 cx.executor().allow_parking();
8432 let root = TempTree::new(json!({
8433 "projects": {
8434 "project1": {
8435 "a": "",
8436 "b": "",
8437 }
8438 },
8439
8440 }));
8441 let root_path = root.path();
8442
8443 let repo = git_init(&root_path.join("projects/project1"));
8444 git_add("a", &repo);
8445 git_commit("init", &repo);
8446 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8447
8448 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8449
8450 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8451 tree.flush_fs_events(cx).await;
8452 project
8453 .update(cx, |project, cx| project.git_scans_complete(cx))
8454 .await;
8455 cx.executor().run_until_parked();
8456
8457 let repository = project.read_with(cx, |project, cx| {
8458 project.repositories(cx).values().next().unwrap().clone()
8459 });
8460
8461 repository.read_with(cx, |repository, _| {
8462 assert_eq!(
8463 repository.work_directory_abs_path.as_ref(),
8464 root_path.join("projects/project1").as_path()
8465 );
8466 assert_eq!(
8467 repository
8468 .status_for_path(&"a".into())
8469 .map(|entry| entry.status),
8470 Some(StatusCode::Modified.worktree()),
8471 );
8472 assert_eq!(
8473 repository
8474 .status_for_path(&"b".into())
8475 .map(|entry| entry.status),
8476 Some(FileStatus::Untracked),
8477 );
8478 });
8479
8480 std::fs::rename(
8481 root_path.join("projects/project1"),
8482 root_path.join("projects/project2"),
8483 )
8484 .unwrap();
8485 tree.flush_fs_events(cx).await;
8486
8487 repository.read_with(cx, |repository, _| {
8488 assert_eq!(
8489 repository.work_directory_abs_path.as_ref(),
8490 root_path.join("projects/project2").as_path()
8491 );
8492 assert_eq!(
8493 repository.status_for_path(&"a".into()).unwrap().status,
8494 StatusCode::Modified.worktree(),
8495 );
8496 assert_eq!(
8497 repository.status_for_path(&"b".into()).unwrap().status,
8498 FileStatus::Untracked,
8499 );
8500 });
8501}
8502
8503// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8504// you can't rename a directory which some program has already open. This is a
8505// limitation of the Windows. See:
8506// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8507#[gpui::test]
8508#[cfg_attr(target_os = "windows", ignore)]
8509async fn test_file_status(cx: &mut gpui::TestAppContext) {
8510 init_test(cx);
8511 cx.executor().allow_parking();
8512 const IGNORE_RULE: &str = "**/target";
8513
8514 let root = TempTree::new(json!({
8515 "project": {
8516 "a.txt": "a",
8517 "b.txt": "bb",
8518 "c": {
8519 "d": {
8520 "e.txt": "eee"
8521 }
8522 },
8523 "f.txt": "ffff",
8524 "target": {
8525 "build_file": "???"
8526 },
8527 ".gitignore": IGNORE_RULE
8528 },
8529
8530 }));
8531 let root_path = root.path();
8532
8533 const A_TXT: &str = "a.txt";
8534 const B_TXT: &str = "b.txt";
8535 const E_TXT: &str = "c/d/e.txt";
8536 const F_TXT: &str = "f.txt";
8537 const DOTGITIGNORE: &str = ".gitignore";
8538 const BUILD_FILE: &str = "target/build_file";
8539
8540 // Set up git repository before creating the worktree.
8541 let work_dir = root.path().join("project");
8542 let mut repo = git_init(work_dir.as_path());
8543 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8544 git_add(A_TXT, &repo);
8545 git_add(E_TXT, &repo);
8546 git_add(DOTGITIGNORE, &repo);
8547 git_commit("Initial commit", &repo);
8548
8549 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8550
8551 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8552 tree.flush_fs_events(cx).await;
8553 project
8554 .update(cx, |project, cx| project.git_scans_complete(cx))
8555 .await;
8556 cx.executor().run_until_parked();
8557
8558 let repository = project.read_with(cx, |project, cx| {
8559 project.repositories(cx).values().next().unwrap().clone()
8560 });
8561
8562 // Check that the right git state is observed on startup
8563 repository.read_with(cx, |repository, _cx| {
8564 assert_eq!(
8565 repository.work_directory_abs_path.as_ref(),
8566 root_path.join("project").as_path()
8567 );
8568
8569 assert_eq!(
8570 repository.status_for_path(&B_TXT.into()).unwrap().status,
8571 FileStatus::Untracked,
8572 );
8573 assert_eq!(
8574 repository.status_for_path(&F_TXT.into()).unwrap().status,
8575 FileStatus::Untracked,
8576 );
8577 });
8578
8579 // Modify a file in the working copy.
8580 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8581 tree.flush_fs_events(cx).await;
8582 project
8583 .update(cx, |project, cx| project.git_scans_complete(cx))
8584 .await;
8585 cx.executor().run_until_parked();
8586
8587 // The worktree detects that the file's git status has changed.
8588 repository.read_with(cx, |repository, _| {
8589 assert_eq!(
8590 repository.status_for_path(&A_TXT.into()).unwrap().status,
8591 StatusCode::Modified.worktree(),
8592 );
8593 });
8594
8595 // Create a commit in the git repository.
8596 git_add(A_TXT, &repo);
8597 git_add(B_TXT, &repo);
8598 git_commit("Committing modified and added", &repo);
8599 tree.flush_fs_events(cx).await;
8600 project
8601 .update(cx, |project, cx| project.git_scans_complete(cx))
8602 .await;
8603 cx.executor().run_until_parked();
8604
8605 // The worktree detects that the files' git status have changed.
8606 repository.read_with(cx, |repository, _cx| {
8607 assert_eq!(
8608 repository.status_for_path(&F_TXT.into()).unwrap().status,
8609 FileStatus::Untracked,
8610 );
8611 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8612 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8613 });
8614
8615 // Modify files in the working copy and perform git operations on other files.
8616 git_reset(0, &repo);
8617 git_remove_index(Path::new(B_TXT), &repo);
8618 git_stash(&mut repo);
8619 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8620 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8621 tree.flush_fs_events(cx).await;
8622 project
8623 .update(cx, |project, cx| project.git_scans_complete(cx))
8624 .await;
8625 cx.executor().run_until_parked();
8626
8627 // Check that more complex repo changes are tracked
8628 repository.read_with(cx, |repository, _cx| {
8629 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8630 assert_eq!(
8631 repository.status_for_path(&B_TXT.into()).unwrap().status,
8632 FileStatus::Untracked,
8633 );
8634 assert_eq!(
8635 repository.status_for_path(&E_TXT.into()).unwrap().status,
8636 StatusCode::Modified.worktree(),
8637 );
8638 });
8639
8640 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8641 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8642 std::fs::write(
8643 work_dir.join(DOTGITIGNORE),
8644 [IGNORE_RULE, "f.txt"].join("\n"),
8645 )
8646 .unwrap();
8647
8648 git_add(Path::new(DOTGITIGNORE), &repo);
8649 git_commit("Committing modified git ignore", &repo);
8650
8651 tree.flush_fs_events(cx).await;
8652 cx.executor().run_until_parked();
8653
8654 let mut renamed_dir_name = "first_directory/second_directory";
8655 const RENAMED_FILE: &str = "rf.txt";
8656
8657 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8658 std::fs::write(
8659 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8660 "new-contents",
8661 )
8662 .unwrap();
8663
8664 tree.flush_fs_events(cx).await;
8665 project
8666 .update(cx, |project, cx| project.git_scans_complete(cx))
8667 .await;
8668 cx.executor().run_until_parked();
8669
8670 repository.read_with(cx, |repository, _cx| {
8671 assert_eq!(
8672 repository
8673 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8674 .unwrap()
8675 .status,
8676 FileStatus::Untracked,
8677 );
8678 });
8679
8680 renamed_dir_name = "new_first_directory/second_directory";
8681
8682 std::fs::rename(
8683 work_dir.join("first_directory"),
8684 work_dir.join("new_first_directory"),
8685 )
8686 .unwrap();
8687
8688 tree.flush_fs_events(cx).await;
8689 project
8690 .update(cx, |project, cx| project.git_scans_complete(cx))
8691 .await;
8692 cx.executor().run_until_parked();
8693
8694 repository.read_with(cx, |repository, _cx| {
8695 assert_eq!(
8696 repository
8697 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8698 .unwrap()
8699 .status,
8700 FileStatus::Untracked,
8701 );
8702 });
8703}
8704
8705#[gpui::test]
8706async fn test_repos_in_invisible_worktrees(
8707 executor: BackgroundExecutor,
8708 cx: &mut gpui::TestAppContext,
8709) {
8710 init_test(cx);
8711 let fs = FakeFs::new(executor);
8712 fs.insert_tree(
8713 path!("/root"),
8714 json!({
8715 "dir1": {
8716 ".git": {},
8717 "dep1": {
8718 ".git": {},
8719 "src": {
8720 "a.txt": "",
8721 },
8722 },
8723 "b.txt": "",
8724 },
8725 }),
8726 )
8727 .await;
8728
8729 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8730 let _visible_worktree =
8731 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8732 project
8733 .update(cx, |project, cx| project.git_scans_complete(cx))
8734 .await;
8735
8736 let repos = project.read_with(cx, |project, cx| {
8737 project
8738 .repositories(cx)
8739 .values()
8740 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8741 .collect::<Vec<_>>()
8742 });
8743 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8744
8745 let (_invisible_worktree, _) = project
8746 .update(cx, |project, cx| {
8747 project.worktree_store.update(cx, |worktree_store, cx| {
8748 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8749 })
8750 })
8751 .await
8752 .expect("failed to create worktree");
8753 project
8754 .update(cx, |project, cx| project.git_scans_complete(cx))
8755 .await;
8756
8757 let repos = project.read_with(cx, |project, cx| {
8758 project
8759 .repositories(cx)
8760 .values()
8761 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8762 .collect::<Vec<_>>()
8763 });
8764 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8765}
8766
8767#[gpui::test(iterations = 10)]
8768async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8769 init_test(cx);
8770 cx.update(|cx| {
8771 cx.update_global::<SettingsStore, _>(|store, cx| {
8772 store.update_user_settings(cx, |settings| {
8773 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
8774 });
8775 });
8776 });
8777 let fs = FakeFs::new(cx.background_executor.clone());
8778 fs.insert_tree(
8779 path!("/root"),
8780 json!({
8781 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8782 "tree": {
8783 ".git": {},
8784 ".gitignore": "ignored-dir\n",
8785 "tracked-dir": {
8786 "tracked-file1": "",
8787 "ancestor-ignored-file1": "",
8788 },
8789 "ignored-dir": {
8790 "ignored-file1": ""
8791 }
8792 }
8793 }),
8794 )
8795 .await;
8796 fs.set_head_and_index_for_repo(
8797 path!("/root/tree/.git").as_ref(),
8798 &[
8799 (".gitignore".into(), "ignored-dir\n".into()),
8800 ("tracked-dir/tracked-file1".into(), "".into()),
8801 ],
8802 );
8803
8804 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8805
8806 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8807 tree.flush_fs_events(cx).await;
8808 project
8809 .update(cx, |project, cx| project.git_scans_complete(cx))
8810 .await;
8811 cx.executor().run_until_parked();
8812
8813 let repository = project.read_with(cx, |project, cx| {
8814 project.repositories(cx).values().next().unwrap().clone()
8815 });
8816
8817 tree.read_with(cx, |tree, _| {
8818 tree.as_local()
8819 .unwrap()
8820 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8821 })
8822 .recv()
8823 .await;
8824
8825 cx.read(|cx| {
8826 assert_entry_git_state(
8827 tree.read(cx),
8828 repository.read(cx),
8829 "tracked-dir/tracked-file1",
8830 None,
8831 false,
8832 );
8833 assert_entry_git_state(
8834 tree.read(cx),
8835 repository.read(cx),
8836 "tracked-dir/ancestor-ignored-file1",
8837 None,
8838 false,
8839 );
8840 assert_entry_git_state(
8841 tree.read(cx),
8842 repository.read(cx),
8843 "ignored-dir/ignored-file1",
8844 None,
8845 true,
8846 );
8847 });
8848
8849 fs.create_file(
8850 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8851 Default::default(),
8852 )
8853 .await
8854 .unwrap();
8855 fs.set_index_for_repo(
8856 path!("/root/tree/.git").as_ref(),
8857 &[
8858 (".gitignore".into(), "ignored-dir\n".into()),
8859 ("tracked-dir/tracked-file1".into(), "".into()),
8860 ("tracked-dir/tracked-file2".into(), "".into()),
8861 ],
8862 );
8863 fs.create_file(
8864 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8865 Default::default(),
8866 )
8867 .await
8868 .unwrap();
8869 fs.create_file(
8870 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8871 Default::default(),
8872 )
8873 .await
8874 .unwrap();
8875
8876 cx.executor().run_until_parked();
8877 cx.read(|cx| {
8878 assert_entry_git_state(
8879 tree.read(cx),
8880 repository.read(cx),
8881 "tracked-dir/tracked-file2",
8882 Some(StatusCode::Added),
8883 false,
8884 );
8885 assert_entry_git_state(
8886 tree.read(cx),
8887 repository.read(cx),
8888 "tracked-dir/ancestor-ignored-file2",
8889 None,
8890 false,
8891 );
8892 assert_entry_git_state(
8893 tree.read(cx),
8894 repository.read(cx),
8895 "ignored-dir/ignored-file2",
8896 None,
8897 true,
8898 );
8899 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8900 });
8901}
8902
8903#[gpui::test]
8904async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8905 init_test(cx);
8906
8907 let fs = FakeFs::new(cx.executor());
8908 fs.insert_tree(
8909 path!("/project"),
8910 json!({
8911 ".git": {
8912 "worktrees": {
8913 "some-worktree": {
8914 "commondir": "../..\n",
8915 // For is_git_dir
8916 "HEAD": "",
8917 "config": ""
8918 }
8919 },
8920 "modules": {
8921 "subdir": {
8922 "some-submodule": {
8923 // For is_git_dir
8924 "HEAD": "",
8925 "config": "",
8926 }
8927 }
8928 }
8929 },
8930 "src": {
8931 "a.txt": "A",
8932 },
8933 "some-worktree": {
8934 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8935 "src": {
8936 "b.txt": "B",
8937 }
8938 },
8939 "subdir": {
8940 "some-submodule": {
8941 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8942 "c.txt": "C",
8943 }
8944 }
8945 }),
8946 )
8947 .await;
8948
8949 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8950 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8951 scan_complete.await;
8952
8953 let mut repositories = project.update(cx, |project, cx| {
8954 project
8955 .repositories(cx)
8956 .values()
8957 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8958 .collect::<Vec<_>>()
8959 });
8960 repositories.sort();
8961 pretty_assertions::assert_eq!(
8962 repositories,
8963 [
8964 Path::new(path!("/project")).into(),
8965 Path::new(path!("/project/some-worktree")).into(),
8966 Path::new(path!("/project/subdir/some-submodule")).into(),
8967 ]
8968 );
8969
8970 // Generate a git-related event for the worktree and check that it's refreshed.
8971 fs.with_git_state(
8972 path!("/project/some-worktree/.git").as_ref(),
8973 true,
8974 |state| {
8975 state
8976 .head_contents
8977 .insert("src/b.txt".into(), "b".to_owned());
8978 state
8979 .index_contents
8980 .insert("src/b.txt".into(), "b".to_owned());
8981 },
8982 )
8983 .unwrap();
8984 cx.run_until_parked();
8985
8986 let buffer = project
8987 .update(cx, |project, cx| {
8988 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8989 })
8990 .await
8991 .unwrap();
8992 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8993 let (repo, _) = project
8994 .git_store()
8995 .read(cx)
8996 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8997 .unwrap();
8998 pretty_assertions::assert_eq!(
8999 repo.read(cx).work_directory_abs_path,
9000 Path::new(path!("/project/some-worktree")).into(),
9001 );
9002 let barrier = repo.update(cx, |repo, _| repo.barrier());
9003 (repo.clone(), barrier)
9004 });
9005 barrier.await.unwrap();
9006 worktree_repo.update(cx, |repo, _| {
9007 pretty_assertions::assert_eq!(
9008 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
9009 StatusCode::Modified.worktree(),
9010 );
9011 });
9012
9013 // The same for the submodule.
9014 fs.with_git_state(
9015 path!("/project/subdir/some-submodule/.git").as_ref(),
9016 true,
9017 |state| {
9018 state.head_contents.insert("c.txt".into(), "c".to_owned());
9019 state.index_contents.insert("c.txt".into(), "c".to_owned());
9020 },
9021 )
9022 .unwrap();
9023 cx.run_until_parked();
9024
9025 let buffer = project
9026 .update(cx, |project, cx| {
9027 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9028 })
9029 .await
9030 .unwrap();
9031 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9032 let (repo, _) = project
9033 .git_store()
9034 .read(cx)
9035 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9036 .unwrap();
9037 pretty_assertions::assert_eq!(
9038 repo.read(cx).work_directory_abs_path,
9039 Path::new(path!("/project/subdir/some-submodule")).into(),
9040 );
9041 let barrier = repo.update(cx, |repo, _| repo.barrier());
9042 (repo.clone(), barrier)
9043 });
9044 barrier.await.unwrap();
9045 submodule_repo.update(cx, |repo, _| {
9046 pretty_assertions::assert_eq!(
9047 repo.status_for_path(&"c.txt".into()).unwrap().status,
9048 StatusCode::Modified.worktree(),
9049 );
9050 });
9051}
9052
9053#[gpui::test]
9054async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9055 init_test(cx);
9056 let fs = FakeFs::new(cx.background_executor.clone());
9057 fs.insert_tree(
9058 path!("/root"),
9059 json!({
9060 "project": {
9061 ".git": {},
9062 "child1": {
9063 "a.txt": "A",
9064 },
9065 "child2": {
9066 "b.txt": "B",
9067 }
9068 }
9069 }),
9070 )
9071 .await;
9072
9073 let project = Project::test(
9074 fs.clone(),
9075 [
9076 path!("/root/project/child1").as_ref(),
9077 path!("/root/project/child2").as_ref(),
9078 ],
9079 cx,
9080 )
9081 .await;
9082
9083 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9084 tree.flush_fs_events(cx).await;
9085 project
9086 .update(cx, |project, cx| project.git_scans_complete(cx))
9087 .await;
9088 cx.executor().run_until_parked();
9089
9090 let repos = project.read_with(cx, |project, cx| {
9091 project
9092 .repositories(cx)
9093 .values()
9094 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9095 .collect::<Vec<_>>()
9096 });
9097 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9098}
9099
9100async fn search(
9101 project: &Entity<Project>,
9102 query: SearchQuery,
9103 cx: &mut gpui::TestAppContext,
9104) -> Result<HashMap<String, Vec<Range<usize>>>> {
9105 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9106 let mut results = HashMap::default();
9107 while let Ok(search_result) = search_rx.recv().await {
9108 match search_result {
9109 SearchResult::Buffer { buffer, ranges } => {
9110 results.entry(buffer).or_insert(ranges);
9111 }
9112 SearchResult::LimitReached => {}
9113 }
9114 }
9115 Ok(results
9116 .into_iter()
9117 .map(|(buffer, ranges)| {
9118 buffer.update(cx, |buffer, cx| {
9119 let path = buffer
9120 .file()
9121 .unwrap()
9122 .full_path(cx)
9123 .to_string_lossy()
9124 .to_string();
9125 let ranges = ranges
9126 .into_iter()
9127 .map(|range| range.to_offset(buffer))
9128 .collect::<Vec<_>>();
9129 (path, ranges)
9130 })
9131 })
9132 .collect())
9133}
9134
9135pub fn init_test(cx: &mut gpui::TestAppContext) {
9136 zlog::init_test();
9137
9138 cx.update(|cx| {
9139 let settings_store = SettingsStore::test(cx);
9140 cx.set_global(settings_store);
9141 release_channel::init(SemanticVersion::default(), cx);
9142 language::init(cx);
9143 Project::init_settings(cx);
9144 });
9145}
9146
9147fn json_lang() -> Arc<Language> {
9148 Arc::new(Language::new(
9149 LanguageConfig {
9150 name: "JSON".into(),
9151 matcher: LanguageMatcher {
9152 path_suffixes: vec!["json".to_string()],
9153 ..Default::default()
9154 },
9155 ..Default::default()
9156 },
9157 None,
9158 ))
9159}
9160
9161fn js_lang() -> Arc<Language> {
9162 Arc::new(Language::new(
9163 LanguageConfig {
9164 name: "JavaScript".into(),
9165 matcher: LanguageMatcher {
9166 path_suffixes: vec!["js".to_string()],
9167 ..Default::default()
9168 },
9169 ..Default::default()
9170 },
9171 None,
9172 ))
9173}
9174
9175fn rust_lang() -> Arc<Language> {
9176 Arc::new(Language::new(
9177 LanguageConfig {
9178 name: "Rust".into(),
9179 matcher: LanguageMatcher {
9180 path_suffixes: vec!["rs".to_string()],
9181 ..Default::default()
9182 },
9183 ..Default::default()
9184 },
9185 Some(tree_sitter_rust::LANGUAGE.into()),
9186 ))
9187}
9188
9189fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9190 struct PythonMootToolchainLister(Arc<FakeFs>);
9191 #[async_trait]
9192 impl ToolchainLister for PythonMootToolchainLister {
9193 async fn list(
9194 &self,
9195 worktree_root: PathBuf,
9196 subroot_relative_path: Arc<Path>,
9197 _: Option<HashMap<String, String>>,
9198 ) -> ToolchainList {
9199 // This lister will always return a path .venv directories within ancestors
9200 let ancestors = subroot_relative_path
9201 .ancestors()
9202 .map(ToOwned::to_owned)
9203 .collect::<Vec<_>>();
9204 let mut toolchains = vec![];
9205 for ancestor in ancestors {
9206 let venv_path = worktree_root.join(ancestor).join(".venv");
9207 if self.0.is_dir(&venv_path).await {
9208 toolchains.push(Toolchain {
9209 name: SharedString::new("Python Venv"),
9210 path: venv_path.to_string_lossy().into_owned().into(),
9211 language_name: LanguageName(SharedString::new_static("Python")),
9212 as_json: serde_json::Value::Null,
9213 })
9214 }
9215 }
9216 ToolchainList {
9217 toolchains,
9218 ..Default::default()
9219 }
9220 }
9221 async fn resolve(
9222 &self,
9223 _: PathBuf,
9224 _: Option<HashMap<String, String>>,
9225 ) -> anyhow::Result<Toolchain> {
9226 Err(anyhow::anyhow!("Not implemented"))
9227 }
9228 fn meta(&self) -> ToolchainMetadata {
9229 ToolchainMetadata {
9230 term: SharedString::new_static("Virtual Environment"),
9231 new_toolchain_placeholder: SharedString::new_static(
9232 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9233 ),
9234 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9235 }
9236 }
9237 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9238 vec![]
9239 }
9240 }
9241 Arc::new(
9242 Language::new(
9243 LanguageConfig {
9244 name: "Python".into(),
9245 matcher: LanguageMatcher {
9246 path_suffixes: vec!["py".to_string()],
9247 ..Default::default()
9248 },
9249 ..Default::default()
9250 },
9251 None, // We're not testing Python parsing with this language.
9252 )
9253 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9254 "pyproject.toml",
9255 ))))
9256 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9257 )
9258}
9259
9260fn typescript_lang() -> Arc<Language> {
9261 Arc::new(Language::new(
9262 LanguageConfig {
9263 name: "TypeScript".into(),
9264 matcher: LanguageMatcher {
9265 path_suffixes: vec!["ts".to_string()],
9266 ..Default::default()
9267 },
9268 ..Default::default()
9269 },
9270 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9271 ))
9272}
9273
9274fn tsx_lang() -> Arc<Language> {
9275 Arc::new(Language::new(
9276 LanguageConfig {
9277 name: "tsx".into(),
9278 matcher: LanguageMatcher {
9279 path_suffixes: vec!["tsx".to_string()],
9280 ..Default::default()
9281 },
9282 ..Default::default()
9283 },
9284 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9285 ))
9286}
9287
9288fn get_all_tasks(
9289 project: &Entity<Project>,
9290 task_contexts: Arc<TaskContexts>,
9291 cx: &mut App,
9292) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9293 let new_tasks = project.update(cx, |project, cx| {
9294 project.task_store.update(cx, |task_store, cx| {
9295 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9296 this.used_and_current_resolved_tasks(task_contexts, cx)
9297 })
9298 })
9299 });
9300
9301 cx.background_spawn(async move {
9302 let (mut old, new) = new_tasks.await;
9303 old.extend(new);
9304 old
9305 })
9306}
9307
9308#[track_caller]
9309fn assert_entry_git_state(
9310 tree: &Worktree,
9311 repository: &Repository,
9312 path: &str,
9313 index_status: Option<StatusCode>,
9314 is_ignored: bool,
9315) {
9316 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9317 let entry = tree
9318 .entry_for_path(path)
9319 .unwrap_or_else(|| panic!("entry {path} not found"));
9320 let status = repository
9321 .status_for_path(&path.into())
9322 .map(|entry| entry.status);
9323 let expected = index_status.map(|index_status| {
9324 TrackedStatus {
9325 index_status,
9326 worktree_status: StatusCode::Unmodified,
9327 }
9328 .into()
9329 });
9330 assert_eq!(
9331 status, expected,
9332 "expected {path} to have git status: {expected:?}"
9333 );
9334 assert_eq!(
9335 entry.is_ignored, is_ignored,
9336 "expected {path} to have is_ignored: {is_ignored}"
9337 );
9338}
9339
9340#[track_caller]
9341fn git_init(path: &Path) -> git2::Repository {
9342 let mut init_opts = RepositoryInitOptions::new();
9343 init_opts.initial_head("main");
9344 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9345}
9346
9347#[track_caller]
9348fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9349 let path = path.as_ref();
9350 let mut index = repo.index().expect("Failed to get index");
9351 index.add_path(path).expect("Failed to add file");
9352 index.write().expect("Failed to write index");
9353}
9354
9355#[track_caller]
9356fn git_remove_index(path: &Path, repo: &git2::Repository) {
9357 let mut index = repo.index().expect("Failed to get index");
9358 index.remove_path(path).expect("Failed to add file");
9359 index.write().expect("Failed to write index");
9360}
9361
9362#[track_caller]
9363fn git_commit(msg: &'static str, repo: &git2::Repository) {
9364 use git2::Signature;
9365
9366 let signature = Signature::now("test", "test@zed.dev").unwrap();
9367 let oid = repo.index().unwrap().write_tree().unwrap();
9368 let tree = repo.find_tree(oid).unwrap();
9369 if let Ok(head) = repo.head() {
9370 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9371
9372 let parent_commit = parent_obj.as_commit().unwrap();
9373
9374 repo.commit(
9375 Some("HEAD"),
9376 &signature,
9377 &signature,
9378 msg,
9379 &tree,
9380 &[parent_commit],
9381 )
9382 .expect("Failed to commit with parent");
9383 } else {
9384 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9385 .expect("Failed to commit");
9386 }
9387}
9388
9389#[cfg(any())]
9390#[track_caller]
9391fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9392 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9393}
9394
9395#[track_caller]
9396fn git_stash(repo: &mut git2::Repository) {
9397 use git2::Signature;
9398
9399 let signature = Signature::now("test", "test@zed.dev").unwrap();
9400 repo.stash_save(&signature, "N/A", None)
9401 .expect("Failed to stash");
9402}
9403
9404#[track_caller]
9405fn git_reset(offset: usize, repo: &git2::Repository) {
9406 let head = repo.head().expect("Couldn't get repo head");
9407 let object = head.peel(git2::ObjectType::Commit).unwrap();
9408 let commit = object.as_commit().unwrap();
9409 let new_head = commit
9410 .parents()
9411 .inspect(|parnet| {
9412 parnet.message();
9413 })
9414 .nth(offset)
9415 .expect("Not enough history");
9416 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9417 .expect("Could not reset");
9418}
9419
9420#[cfg(any())]
9421#[track_caller]
9422fn git_branch(name: &str, repo: &git2::Repository) {
9423 let head = repo
9424 .head()
9425 .expect("Couldn't get repo head")
9426 .peel_to_commit()
9427 .expect("HEAD is not a commit");
9428 repo.branch(name, &head, false).expect("Failed to commit");
9429}
9430
9431#[cfg(any())]
9432#[track_caller]
9433fn git_checkout(name: &str, repo: &git2::Repository) {
9434 repo.set_head(name).expect("Failed to set head");
9435 repo.checkout_head(None).expect("Failed to check out head");
9436}
9437
9438#[cfg(any())]
9439#[track_caller]
9440fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9441 repo.statuses(None)
9442 .unwrap()
9443 .iter()
9444 .map(|status| (status.path().unwrap().to_string(), status.status()))
9445 .collect()
9446}
9447
9448#[gpui::test]
9449async fn test_find_project_path_abs(
9450 background_executor: BackgroundExecutor,
9451 cx: &mut gpui::TestAppContext,
9452) {
9453 // find_project_path should work with absolute paths
9454 init_test(cx);
9455
9456 let fs = FakeFs::new(background_executor);
9457 fs.insert_tree(
9458 path!("/root"),
9459 json!({
9460 "project1": {
9461 "file1.txt": "content1",
9462 "subdir": {
9463 "file2.txt": "content2"
9464 }
9465 },
9466 "project2": {
9467 "file3.txt": "content3"
9468 }
9469 }),
9470 )
9471 .await;
9472
9473 let project = Project::test(
9474 fs.clone(),
9475 [
9476 path!("/root/project1").as_ref(),
9477 path!("/root/project2").as_ref(),
9478 ],
9479 cx,
9480 )
9481 .await;
9482
9483 // Make sure the worktrees are fully initialized
9484 project
9485 .update(cx, |project, cx| project.git_scans_complete(cx))
9486 .await;
9487 cx.run_until_parked();
9488
9489 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9490 project.read_with(cx, |project, cx| {
9491 let worktrees: Vec<_> = project.worktrees(cx).collect();
9492 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9493 let id1 = worktrees[0].read(cx).id();
9494 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9495 let id2 = worktrees[1].read(cx).id();
9496 (abs_path1, id1, abs_path2, id2)
9497 });
9498
9499 project.update(cx, |project, cx| {
9500 let abs_path = project1_abs_path.join("file1.txt");
9501 let found_path = project.find_project_path(abs_path, cx).unwrap();
9502 assert_eq!(found_path.worktree_id, project1_id);
9503 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9504
9505 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9506 let found_path = project.find_project_path(abs_path, cx).unwrap();
9507 assert_eq!(found_path.worktree_id, project1_id);
9508 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9509
9510 let abs_path = project2_abs_path.join("file3.txt");
9511 let found_path = project.find_project_path(abs_path, cx).unwrap();
9512 assert_eq!(found_path.worktree_id, project2_id);
9513 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9514
9515 let abs_path = project1_abs_path.join("nonexistent.txt");
9516 let found_path = project.find_project_path(abs_path, cx);
9517 assert!(
9518 found_path.is_some(),
9519 "Should find project path for nonexistent file in worktree"
9520 );
9521
9522 // Test with an absolute path outside any worktree
9523 let abs_path = Path::new("/some/other/path");
9524 let found_path = project.find_project_path(abs_path, cx);
9525 assert!(
9526 found_path.is_none(),
9527 "Should not find project path for path outside any worktree"
9528 );
9529 });
9530}