1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::RepoPath,
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
25 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainLister,
26 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
27 tree_sitter_rust, tree_sitter_typescript,
28};
29use lsp::{
30 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
31 Uri, WillRenameFiles, notification::DidRenameFiles,
32};
33use parking_lot::Mutex;
34use paths::{config_dir, tasks_file};
35use postage::stream::Stream as _;
36use pretty_assertions::{assert_eq, assert_matches};
37use rand::{Rng as _, rngs::StdRng};
38use serde_json::json;
39#[cfg(not(windows))]
40use std::os;
41use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
42use task::{ResolvedTask, ShellKind, TaskContext};
43use unindent::Unindent as _;
44use util::{
45 TryFutureExt as _, assert_set_eq, maybe, path,
46 paths::PathMatcher,
47 test::{TempTree, marked_text_offsets},
48 uri,
49};
50use worktree::WorktreeModelHandle as _;
51
52#[gpui::test]
53async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
54 cx.executor().allow_parking();
55
56 let (tx, mut rx) = futures::channel::mpsc::unbounded();
57 let _thread = std::thread::spawn(move || {
58 #[cfg(not(target_os = "windows"))]
59 std::fs::metadata("/tmp").unwrap();
60 #[cfg(target_os = "windows")]
61 std::fs::metadata("C:/Windows").unwrap();
62 std::thread::sleep(Duration::from_millis(1000));
63 tx.unbounded_send(1).unwrap();
64 });
65 rx.next().await.unwrap();
66}
67
68#[gpui::test]
69async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
70 cx.executor().allow_parking();
71
72 let io_task = smol::unblock(move || {
73 println!("sleeping on thread {:?}", std::thread::current().id());
74 std::thread::sleep(Duration::from_millis(10));
75 1
76 });
77
78 let task = cx.foreground_executor().spawn(async move {
79 io_task.await;
80 });
81
82 task.await;
83}
84
85#[cfg(not(windows))]
86#[gpui::test]
87async fn test_symlinks(cx: &mut gpui::TestAppContext) {
88 init_test(cx);
89 cx.executor().allow_parking();
90
91 let dir = TempTree::new(json!({
92 "root": {
93 "apple": "",
94 "banana": {
95 "carrot": {
96 "date": "",
97 "endive": "",
98 }
99 },
100 "fennel": {
101 "grape": "",
102 }
103 }
104 }));
105
106 let root_link_path = dir.path().join("root_link");
107 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
108 os::unix::fs::symlink(
109 dir.path().join("root/fennel"),
110 dir.path().join("root/finnochio"),
111 )
112 .unwrap();
113
114 let project = Project::test(
115 Arc::new(RealFs::new(None, cx.executor())),
116 [root_link_path.as_ref()],
117 cx,
118 )
119 .await;
120
121 project.update(cx, |project, cx| {
122 let tree = project.worktrees(cx).next().unwrap().read(cx);
123 assert_eq!(tree.file_count(), 5);
124 assert_eq!(
125 tree.inode_for_path("fennel/grape"),
126 tree.inode_for_path("finnochio/grape")
127 );
128 });
129}
130
131#[gpui::test]
132async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
133 init_test(cx);
134
135 let dir = TempTree::new(json!({
136 ".editorconfig": r#"
137 root = true
138 [*.rs]
139 indent_style = tab
140 indent_size = 3
141 end_of_line = lf
142 insert_final_newline = true
143 trim_trailing_whitespace = true
144 max_line_length = 120
145 [*.js]
146 tab_width = 10
147 max_line_length = off
148 "#,
149 ".zed": {
150 "settings.json": r#"{
151 "tab_size": 8,
152 "hard_tabs": false,
153 "ensure_final_newline_on_save": false,
154 "remove_trailing_whitespace_on_save": false,
155 "preferred_line_length": 64,
156 "soft_wrap": "editor_width",
157 }"#,
158 },
159 "a.rs": "fn a() {\n A\n}",
160 "b": {
161 ".editorconfig": r#"
162 [*.rs]
163 indent_size = 2
164 max_line_length = off,
165 "#,
166 "b.rs": "fn b() {\n B\n}",
167 },
168 "c.js": "def c\n C\nend",
169 "README.json": "tabs are better\n",
170 }));
171
172 let path = dir.path();
173 let fs = FakeFs::new(cx.executor());
174 fs.insert_tree_from_real_fs(path, path).await;
175 let project = Project::test(fs, [path], cx).await;
176
177 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
178 language_registry.add(js_lang());
179 language_registry.add(json_lang());
180 language_registry.add(rust_lang());
181
182 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
183
184 cx.executor().run_until_parked();
185
186 cx.update(|cx| {
187 let tree = worktree.read(cx);
188 let settings_for = |path: &str| {
189 let file_entry = tree.entry_for_path(path).unwrap().clone();
190 let file = File::for_entry(file_entry, worktree.clone());
191 let file_language = project
192 .read(cx)
193 .languages()
194 .language_for_file_path(file.path.as_ref());
195 let file_language = cx
196 .background_executor()
197 .block(file_language)
198 .expect("Failed to get file language");
199 let file = file as _;
200 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
201 };
202
203 let settings_a = settings_for("a.rs");
204 let settings_b = settings_for("b/b.rs");
205 let settings_c = settings_for("c.js");
206 let settings_readme = settings_for("README.json");
207
208 // .editorconfig overrides .zed/settings
209 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
210 assert_eq!(settings_a.hard_tabs, true);
211 assert_eq!(settings_a.ensure_final_newline_on_save, true);
212 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
213 assert_eq!(settings_a.preferred_line_length, 120);
214
215 // .editorconfig in b/ overrides .editorconfig in root
216 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
217
218 // "indent_size" is not set, so "tab_width" is used
219 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
220
221 // When max_line_length is "off", default to .zed/settings.json
222 assert_eq!(settings_b.preferred_line_length, 64);
223 assert_eq!(settings_c.preferred_line_length, 64);
224
225 // README.md should not be affected by .editorconfig's globe "*.rs"
226 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
227 });
228}
229
230#[gpui::test]
231async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
232 init_test(cx);
233 cx.update(|cx| {
234 GitHostingProviderRegistry::default_global(cx);
235 git_hosting_providers::init(cx);
236 });
237
238 let fs = FakeFs::new(cx.executor());
239 let str_path = path!("/dir");
240 let path = Path::new(str_path);
241
242 fs.insert_tree(
243 path!("/dir"),
244 json!({
245 ".zed": {
246 "settings.json": r#"{
247 "git_hosting_providers": [
248 {
249 "provider": "gitlab",
250 "base_url": "https://google.com",
251 "name": "foo"
252 }
253 ]
254 }"#
255 },
256 }),
257 )
258 .await;
259
260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
261 let (_worktree, _) =
262 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
263 cx.executor().run_until_parked();
264
265 cx.update(|cx| {
266 let provider = GitHostingProviderRegistry::global(cx);
267 assert!(
268 provider
269 .list_hosting_providers()
270 .into_iter()
271 .any(|provider| provider.name() == "foo")
272 );
273 });
274
275 fs.atomic_write(
276 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
277 "{}".into(),
278 )
279 .await
280 .unwrap();
281
282 cx.run_until_parked();
283
284 cx.update(|cx| {
285 let provider = GitHostingProviderRegistry::global(cx);
286 assert!(
287 !provider
288 .list_hosting_providers()
289 .into_iter()
290 .any(|provider| provider.name() == "foo")
291 );
292 });
293}
294
295#[gpui::test]
296async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
297 init_test(cx);
298 TaskStore::init(None);
299
300 let fs = FakeFs::new(cx.executor());
301 fs.insert_tree(
302 path!("/dir"),
303 json!({
304 ".zed": {
305 "settings.json": r#"{ "tab_size": 8 }"#,
306 "tasks.json": r#"[{
307 "label": "cargo check all",
308 "command": "cargo",
309 "args": ["check", "--all"]
310 },]"#,
311 },
312 "a": {
313 "a.rs": "fn a() {\n A\n}"
314 },
315 "b": {
316 ".zed": {
317 "settings.json": r#"{ "tab_size": 2 }"#,
318 "tasks.json": r#"[{
319 "label": "cargo check",
320 "command": "cargo",
321 "args": ["check"]
322 },]"#,
323 },
324 "b.rs": "fn b() {\n B\n}"
325 }
326 }),
327 )
328 .await;
329
330 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
331 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
332
333 cx.executor().run_until_parked();
334 let worktree_id = cx.update(|cx| {
335 project.update(cx, |project, cx| {
336 project.worktrees(cx).next().unwrap().read(cx).id()
337 })
338 });
339
340 let mut task_contexts = TaskContexts::default();
341 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
342 let task_contexts = Arc::new(task_contexts);
343
344 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
345 id: worktree_id,
346 directory_in_worktree: PathBuf::from(".zed"),
347 id_base: "local worktree tasks from directory \".zed\"".into(),
348 };
349
350 let all_tasks = cx
351 .update(|cx| {
352 let tree = worktree.read(cx);
353
354 let file_a = File::for_entry(
355 tree.entry_for_path("a/a.rs").unwrap().clone(),
356 worktree.clone(),
357 ) as _;
358 let settings_a = language_settings(None, Some(&file_a), cx);
359 let file_b = File::for_entry(
360 tree.entry_for_path("b/b.rs").unwrap().clone(),
361 worktree.clone(),
362 ) as _;
363 let settings_b = language_settings(None, Some(&file_b), cx);
364
365 assert_eq!(settings_a.tab_size.get(), 8);
366 assert_eq!(settings_b.tab_size.get(), 2);
367
368 get_all_tasks(&project, task_contexts.clone(), cx)
369 })
370 .await
371 .into_iter()
372 .map(|(source_kind, task)| {
373 let resolved = task.resolved;
374 (
375 source_kind,
376 task.resolved_label,
377 resolved.args,
378 resolved.env,
379 )
380 })
381 .collect::<Vec<_>>();
382 assert_eq!(
383 all_tasks,
384 vec![
385 (
386 TaskSourceKind::Worktree {
387 id: worktree_id,
388 directory_in_worktree: PathBuf::from(path!("b/.zed")),
389 id_base: if cfg!(windows) {
390 "local worktree tasks from directory \"b\\\\.zed\"".into()
391 } else {
392 "local worktree tasks from directory \"b/.zed\"".into()
393 },
394 },
395 "cargo check".to_string(),
396 vec!["check".to_string()],
397 HashMap::default(),
398 ),
399 (
400 topmost_local_task_source_kind.clone(),
401 "cargo check all".to_string(),
402 vec!["check".to_string(), "--all".to_string()],
403 HashMap::default(),
404 ),
405 ]
406 );
407
408 let (_, resolved_task) = cx
409 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
410 .await
411 .into_iter()
412 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
413 .expect("should have one global task");
414 project.update(cx, |project, cx| {
415 let task_inventory = project
416 .task_store
417 .read(cx)
418 .task_inventory()
419 .cloned()
420 .unwrap();
421 task_inventory.update(cx, |inventory, _| {
422 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
423 inventory
424 .update_file_based_tasks(
425 TaskSettingsLocation::Global(tasks_file()),
426 Some(
427 &json!([{
428 "label": "cargo check unstable",
429 "command": "cargo",
430 "args": [
431 "check",
432 "--all",
433 "--all-targets"
434 ],
435 "env": {
436 "RUSTFLAGS": "-Zunstable-options"
437 }
438 }])
439 .to_string(),
440 ),
441 )
442 .unwrap();
443 });
444 });
445 cx.run_until_parked();
446
447 let all_tasks = cx
448 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
449 .await
450 .into_iter()
451 .map(|(source_kind, task)| {
452 let resolved = task.resolved;
453 (
454 source_kind,
455 task.resolved_label,
456 resolved.args,
457 resolved.env,
458 )
459 })
460 .collect::<Vec<_>>();
461 assert_eq!(
462 all_tasks,
463 vec![
464 (
465 topmost_local_task_source_kind.clone(),
466 "cargo check all".to_string(),
467 vec!["check".to_string(), "--all".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::Worktree {
472 id: worktree_id,
473 directory_in_worktree: PathBuf::from(path!("b/.zed")),
474 id_base: if cfg!(windows) {
475 "local worktree tasks from directory \"b\\\\.zed\"".into()
476 } else {
477 "local worktree tasks from directory \"b/.zed\"".into()
478 },
479 },
480 "cargo check".to_string(),
481 vec!["check".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::AbsPath {
486 abs_path: paths::tasks_file().clone(),
487 id_base: "global tasks.json".into(),
488 },
489 "cargo check unstable".to_string(),
490 vec![
491 "check".to_string(),
492 "--all".to_string(),
493 "--all-targets".to_string(),
494 ],
495 HashMap::from_iter(Some((
496 "RUSTFLAGS".to_string(),
497 "-Zunstable-options".to_string()
498 ))),
499 ),
500 ]
501 );
502}
503
504#[gpui::test]
505async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
506 init_test(cx);
507 TaskStore::init(None);
508
509 let fs = FakeFs::new(cx.executor());
510 fs.insert_tree(
511 path!("/dir"),
512 json!({
513 ".zed": {
514 "tasks.json": r#"[{
515 "label": "test worktree root",
516 "command": "echo $ZED_WORKTREE_ROOT"
517 }]"#,
518 },
519 "a": {
520 "a.rs": "fn a() {\n A\n}"
521 },
522 }),
523 )
524 .await;
525
526 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
527 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
528
529 cx.executor().run_until_parked();
530 let worktree_id = cx.update(|cx| {
531 project.update(cx, |project, cx| {
532 project.worktrees(cx).next().unwrap().read(cx).id()
533 })
534 });
535
536 let active_non_worktree_item_tasks = cx
537 .update(|cx| {
538 get_all_tasks(
539 &project,
540 Arc::new(TaskContexts {
541 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
542 active_worktree_context: None,
543 other_worktree_contexts: Vec::new(),
544 lsp_task_sources: HashMap::default(),
545 latest_selection: None,
546 }),
547 cx,
548 )
549 })
550 .await;
551 assert!(
552 active_non_worktree_item_tasks.is_empty(),
553 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
554 );
555
556 let active_worktree_tasks = cx
557 .update(|cx| {
558 get_all_tasks(
559 &project,
560 Arc::new(TaskContexts {
561 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
562 active_worktree_context: Some((worktree_id, {
563 let mut worktree_context = TaskContext::default();
564 worktree_context
565 .task_variables
566 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
567 worktree_context
568 })),
569 other_worktree_contexts: Vec::new(),
570 lsp_task_sources: HashMap::default(),
571 latest_selection: None,
572 }),
573 cx,
574 )
575 })
576 .await;
577 assert_eq!(
578 active_worktree_tasks
579 .into_iter()
580 .map(|(source_kind, task)| {
581 let resolved = task.resolved;
582 (source_kind, resolved.command.unwrap())
583 })
584 .collect::<Vec<_>>(),
585 vec![(
586 TaskSourceKind::Worktree {
587 id: worktree_id,
588 directory_in_worktree: PathBuf::from(path!(".zed")),
589 id_base: if cfg!(windows) {
590 "local worktree tasks from directory \".zed\"".into()
591 } else {
592 "local worktree tasks from directory \".zed\"".into()
593 },
594 },
595 "echo /dir".to_string(),
596 )]
597 );
598}
599
600#[gpui::test]
601async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
602 cx: &mut gpui::TestAppContext,
603) {
604 pub(crate) struct PyprojectTomlManifestProvider;
605
606 impl ManifestProvider for PyprojectTomlManifestProvider {
607 fn name(&self) -> ManifestName {
608 SharedString::new_static("pyproject.toml").into()
609 }
610
611 fn search(
612 &self,
613 ManifestQuery {
614 path,
615 depth,
616 delegate,
617 }: ManifestQuery,
618 ) -> Option<Arc<Path>> {
619 for path in path.ancestors().take(depth) {
620 let p = path.join("pyproject.toml");
621 if delegate.exists(&p, Some(false)) {
622 return Some(path.into());
623 }
624 }
625
626 None
627 }
628 }
629
630 init_test(cx);
631 let fs = FakeFs::new(cx.executor());
632
633 fs.insert_tree(
634 path!("/the-root"),
635 json!({
636 ".zed": {
637 "settings.json": r#"
638 {
639 "languages": {
640 "Python": {
641 "language_servers": ["ty"]
642 }
643 }
644 }"#
645 },
646 "project-a": {
647 ".venv": {},
648 "file.py": "",
649 "pyproject.toml": ""
650 },
651 "project-b": {
652 ".venv": {},
653 "source_file.py":"",
654 "another_file.py": "",
655 "pyproject.toml": ""
656 }
657 }),
658 )
659 .await;
660 cx.update(|cx| {
661 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
662 });
663
664 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
666 let _fake_python_server = language_registry.register_fake_lsp(
667 "Python",
668 FakeLspAdapter {
669 name: "ty",
670 capabilities: lsp::ServerCapabilities {
671 ..Default::default()
672 },
673 ..Default::default()
674 },
675 );
676
677 language_registry.add(python_lang(fs.clone()));
678 let (first_buffer, _handle) = project
679 .update(cx, |project, cx| {
680 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
681 })
682 .await
683 .unwrap();
684 cx.executor().run_until_parked();
685 let servers = project.update(cx, |project, cx| {
686 project.lsp_store.update(cx, |this, cx| {
687 first_buffer.update(cx, |buffer, cx| {
688 this.language_servers_for_local_buffer(buffer, cx)
689 .map(|(adapter, server)| (adapter.clone(), server.clone()))
690 .collect::<Vec<_>>()
691 })
692 })
693 });
694 cx.executor().run_until_parked();
695 assert_eq!(servers.len(), 1);
696 let (adapter, server) = servers.into_iter().next().unwrap();
697 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
698 assert_eq!(server.server_id(), LanguageServerId(0));
699 // `workspace_folders` are set to the rooting point.
700 assert_eq!(
701 server.workspace_folders(),
702 BTreeSet::from_iter(
703 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
704 )
705 );
706
707 let (second_project_buffer, _other_handle) = project
708 .update(cx, |project, cx| {
709 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
710 })
711 .await
712 .unwrap();
713 cx.executor().run_until_parked();
714 let servers = project.update(cx, |project, cx| {
715 project.lsp_store.update(cx, |this, cx| {
716 second_project_buffer.update(cx, |buffer, cx| {
717 this.language_servers_for_local_buffer(buffer, cx)
718 .map(|(adapter, server)| (adapter.clone(), server.clone()))
719 .collect::<Vec<_>>()
720 })
721 })
722 });
723 cx.executor().run_until_parked();
724 assert_eq!(servers.len(), 1);
725 let (adapter, server) = servers.into_iter().next().unwrap();
726 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
727 // We're not using venvs at all here, so both folders should fall under the same root.
728 assert_eq!(server.server_id(), LanguageServerId(0));
729 // Now, let's select a different toolchain for one of subprojects.
730 let (available_toolchains_for_b, root_path) = project
731 .update(cx, |this, cx| {
732 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
733 this.available_toolchains(
734 ProjectPath {
735 worktree_id,
736 path: Arc::from("project-b/source_file.py".as_ref()),
737 },
738 LanguageName::new("Python"),
739 cx,
740 )
741 })
742 .await
743 .expect("A toolchain to be discovered");
744 assert_eq!(root_path.as_ref(), Path::new("project-b"));
745 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
746 let currently_active_toolchain = project
747 .update(cx, |this, cx| {
748 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
749 this.active_toolchain(
750 ProjectPath {
751 worktree_id,
752 path: Arc::from("project-b/source_file.py".as_ref()),
753 },
754 LanguageName::new("Python"),
755 cx,
756 )
757 })
758 .await;
759
760 assert!(currently_active_toolchain.is_none());
761 let _ = project
762 .update(cx, |this, cx| {
763 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
764 this.activate_toolchain(
765 ProjectPath {
766 worktree_id,
767 path: root_path,
768 },
769 available_toolchains_for_b
770 .toolchains
771 .into_iter()
772 .next()
773 .unwrap(),
774 cx,
775 )
776 })
777 .await
778 .unwrap();
779 cx.run_until_parked();
780 let servers = project.update(cx, |project, cx| {
781 project.lsp_store.update(cx, |this, cx| {
782 second_project_buffer.update(cx, |buffer, cx| {
783 this.language_servers_for_local_buffer(buffer, cx)
784 .map(|(adapter, server)| (adapter.clone(), server.clone()))
785 .collect::<Vec<_>>()
786 })
787 })
788 });
789 cx.executor().run_until_parked();
790 assert_eq!(servers.len(), 1);
791 let (adapter, server) = servers.into_iter().next().unwrap();
792 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
793 // There's a new language server in town.
794 assert_eq!(server.server_id(), LanguageServerId(1));
795}
796
797#[gpui::test]
798async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
799 init_test(cx);
800
801 let fs = FakeFs::new(cx.executor());
802 fs.insert_tree(
803 path!("/dir"),
804 json!({
805 "test.rs": "const A: i32 = 1;",
806 "test2.rs": "",
807 "Cargo.toml": "a = 1",
808 "package.json": "{\"a\": 1}",
809 }),
810 )
811 .await;
812
813 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
814 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
815
816 let mut fake_rust_servers = language_registry.register_fake_lsp(
817 "Rust",
818 FakeLspAdapter {
819 name: "the-rust-language-server",
820 capabilities: lsp::ServerCapabilities {
821 completion_provider: Some(lsp::CompletionOptions {
822 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
823 ..Default::default()
824 }),
825 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
826 lsp::TextDocumentSyncOptions {
827 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
828 ..Default::default()
829 },
830 )),
831 ..Default::default()
832 },
833 ..Default::default()
834 },
835 );
836 let mut fake_json_servers = language_registry.register_fake_lsp(
837 "JSON",
838 FakeLspAdapter {
839 name: "the-json-language-server",
840 capabilities: lsp::ServerCapabilities {
841 completion_provider: Some(lsp::CompletionOptions {
842 trigger_characters: Some(vec![":".to_string()]),
843 ..Default::default()
844 }),
845 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
846 lsp::TextDocumentSyncOptions {
847 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
848 ..Default::default()
849 },
850 )),
851 ..Default::default()
852 },
853 ..Default::default()
854 },
855 );
856
857 // Open a buffer without an associated language server.
858 let (toml_buffer, _handle) = project
859 .update(cx, |project, cx| {
860 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
861 })
862 .await
863 .unwrap();
864
865 // Open a buffer with an associated language server before the language for it has been loaded.
866 let (rust_buffer, _handle2) = project
867 .update(cx, |project, cx| {
868 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
869 })
870 .await
871 .unwrap();
872 rust_buffer.update(cx, |buffer, _| {
873 assert_eq!(buffer.language().map(|l| l.name()), None);
874 });
875
876 // Now we add the languages to the project, and ensure they get assigned to all
877 // the relevant open buffers.
878 language_registry.add(json_lang());
879 language_registry.add(rust_lang());
880 cx.executor().run_until_parked();
881 rust_buffer.update(cx, |buffer, _| {
882 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
883 });
884
885 // A server is started up, and it is notified about Rust files.
886 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
887 assert_eq!(
888 fake_rust_server
889 .receive_notification::<lsp::notification::DidOpenTextDocument>()
890 .await
891 .text_document,
892 lsp::TextDocumentItem {
893 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
894 version: 0,
895 text: "const A: i32 = 1;".to_string(),
896 language_id: "rust".to_string(),
897 }
898 );
899
900 // The buffer is configured based on the language server's capabilities.
901 rust_buffer.update(cx, |buffer, _| {
902 assert_eq!(
903 buffer
904 .completion_triggers()
905 .iter()
906 .cloned()
907 .collect::<Vec<_>>(),
908 &[".".to_string(), "::".to_string()]
909 );
910 });
911 toml_buffer.update(cx, |buffer, _| {
912 assert!(buffer.completion_triggers().is_empty());
913 });
914
915 // Edit a buffer. The changes are reported to the language server.
916 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
917 assert_eq!(
918 fake_rust_server
919 .receive_notification::<lsp::notification::DidChangeTextDocument>()
920 .await
921 .text_document,
922 lsp::VersionedTextDocumentIdentifier::new(
923 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
924 1
925 )
926 );
927
928 // Open a third buffer with a different associated language server.
929 let (json_buffer, _json_handle) = project
930 .update(cx, |project, cx| {
931 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
932 })
933 .await
934 .unwrap();
935
936 // A json language server is started up and is only notified about the json buffer.
937 let mut fake_json_server = fake_json_servers.next().await.unwrap();
938 assert_eq!(
939 fake_json_server
940 .receive_notification::<lsp::notification::DidOpenTextDocument>()
941 .await
942 .text_document,
943 lsp::TextDocumentItem {
944 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
945 version: 0,
946 text: "{\"a\": 1}".to_string(),
947 language_id: "json".to_string(),
948 }
949 );
950
951 // This buffer is configured based on the second language server's
952 // capabilities.
953 json_buffer.update(cx, |buffer, _| {
954 assert_eq!(
955 buffer
956 .completion_triggers()
957 .iter()
958 .cloned()
959 .collect::<Vec<_>>(),
960 &[":".to_string()]
961 );
962 });
963
964 // When opening another buffer whose language server is already running,
965 // it is also configured based on the existing language server's capabilities.
966 let (rust_buffer2, _handle4) = project
967 .update(cx, |project, cx| {
968 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
969 })
970 .await
971 .unwrap();
972 rust_buffer2.update(cx, |buffer, _| {
973 assert_eq!(
974 buffer
975 .completion_triggers()
976 .iter()
977 .cloned()
978 .collect::<Vec<_>>(),
979 &[".".to_string(), "::".to_string()]
980 );
981 });
982
983 // Changes are reported only to servers matching the buffer's language.
984 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
985 rust_buffer2.update(cx, |buffer, cx| {
986 buffer.edit([(0..0, "let x = 1;")], None, cx)
987 });
988 assert_eq!(
989 fake_rust_server
990 .receive_notification::<lsp::notification::DidChangeTextDocument>()
991 .await
992 .text_document,
993 lsp::VersionedTextDocumentIdentifier::new(
994 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
995 1
996 )
997 );
998
999 // Save notifications are reported to all servers.
1000 project
1001 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1002 .await
1003 .unwrap();
1004 assert_eq!(
1005 fake_rust_server
1006 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1007 .await
1008 .text_document,
1009 lsp::TextDocumentIdentifier::new(
1010 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1011 )
1012 );
1013 assert_eq!(
1014 fake_json_server
1015 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1016 .await
1017 .text_document,
1018 lsp::TextDocumentIdentifier::new(
1019 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1020 )
1021 );
1022
1023 // Renames are reported only to servers matching the buffer's language.
1024 fs.rename(
1025 Path::new(path!("/dir/test2.rs")),
1026 Path::new(path!("/dir/test3.rs")),
1027 Default::default(),
1028 )
1029 .await
1030 .unwrap();
1031 assert_eq!(
1032 fake_rust_server
1033 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1034 .await
1035 .text_document,
1036 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1037 );
1038 assert_eq!(
1039 fake_rust_server
1040 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1041 .await
1042 .text_document,
1043 lsp::TextDocumentItem {
1044 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1045 version: 0,
1046 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1047 language_id: "rust".to_string(),
1048 },
1049 );
1050
1051 rust_buffer2.update(cx, |buffer, cx| {
1052 buffer.update_diagnostics(
1053 LanguageServerId(0),
1054 DiagnosticSet::from_sorted_entries(
1055 vec![DiagnosticEntry {
1056 diagnostic: Default::default(),
1057 range: Anchor::MIN..Anchor::MAX,
1058 }],
1059 &buffer.snapshot(),
1060 ),
1061 cx,
1062 );
1063 assert_eq!(
1064 buffer
1065 .snapshot()
1066 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1067 .count(),
1068 1
1069 );
1070 });
1071
1072 // When the rename changes the extension of the file, the buffer gets closed on the old
1073 // language server and gets opened on the new one.
1074 fs.rename(
1075 Path::new(path!("/dir/test3.rs")),
1076 Path::new(path!("/dir/test3.json")),
1077 Default::default(),
1078 )
1079 .await
1080 .unwrap();
1081 assert_eq!(
1082 fake_rust_server
1083 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1084 .await
1085 .text_document,
1086 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1087 );
1088 assert_eq!(
1089 fake_json_server
1090 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1091 .await
1092 .text_document,
1093 lsp::TextDocumentItem {
1094 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1095 version: 0,
1096 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1097 language_id: "json".to_string(),
1098 },
1099 );
1100
1101 // We clear the diagnostics, since the language has changed.
1102 rust_buffer2.update(cx, |buffer, _| {
1103 assert_eq!(
1104 buffer
1105 .snapshot()
1106 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1107 .count(),
1108 0
1109 );
1110 });
1111
1112 // The renamed file's version resets after changing language server.
1113 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1114 assert_eq!(
1115 fake_json_server
1116 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1117 .await
1118 .text_document,
1119 lsp::VersionedTextDocumentIdentifier::new(
1120 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1121 1
1122 )
1123 );
1124
1125 // Restart language servers
1126 project.update(cx, |project, cx| {
1127 project.restart_language_servers_for_buffers(
1128 vec![rust_buffer.clone(), json_buffer.clone()],
1129 HashSet::default(),
1130 cx,
1131 );
1132 });
1133
1134 let mut rust_shutdown_requests = fake_rust_server
1135 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1136 let mut json_shutdown_requests = fake_json_server
1137 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1138 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1139
1140 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1141 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1142
1143 // Ensure rust document is reopened in new rust language server
1144 assert_eq!(
1145 fake_rust_server
1146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1147 .await
1148 .text_document,
1149 lsp::TextDocumentItem {
1150 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1151 version: 0,
1152 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1153 language_id: "rust".to_string(),
1154 }
1155 );
1156
1157 // Ensure json documents are reopened in new json language server
1158 assert_set_eq!(
1159 [
1160 fake_json_server
1161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1162 .await
1163 .text_document,
1164 fake_json_server
1165 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1166 .await
1167 .text_document,
1168 ],
1169 [
1170 lsp::TextDocumentItem {
1171 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1172 version: 0,
1173 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1174 language_id: "json".to_string(),
1175 },
1176 lsp::TextDocumentItem {
1177 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1178 version: 0,
1179 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1180 language_id: "json".to_string(),
1181 }
1182 ]
1183 );
1184
1185 // Close notifications are reported only to servers matching the buffer's language.
1186 cx.update(|_| drop(_json_handle));
1187 let close_message = lsp::DidCloseTextDocumentParams {
1188 text_document: lsp::TextDocumentIdentifier::new(
1189 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1190 ),
1191 };
1192 assert_eq!(
1193 fake_json_server
1194 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1195 .await,
1196 close_message,
1197 );
1198}
1199
1200#[gpui::test]
1201async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let fs = FakeFs::new(cx.executor());
1205 fs.insert_tree(
1206 path!("/the-root"),
1207 json!({
1208 ".gitignore": "target\n",
1209 "Cargo.lock": "",
1210 "src": {
1211 "a.rs": "",
1212 "b.rs": "",
1213 },
1214 "target": {
1215 "x": {
1216 "out": {
1217 "x.rs": ""
1218 }
1219 },
1220 "y": {
1221 "out": {
1222 "y.rs": "",
1223 }
1224 },
1225 "z": {
1226 "out": {
1227 "z.rs": ""
1228 }
1229 }
1230 }
1231 }),
1232 )
1233 .await;
1234 fs.insert_tree(
1235 path!("/the-registry"),
1236 json!({
1237 "dep1": {
1238 "src": {
1239 "dep1.rs": "",
1240 }
1241 },
1242 "dep2": {
1243 "src": {
1244 "dep2.rs": "",
1245 }
1246 },
1247 }),
1248 )
1249 .await;
1250 fs.insert_tree(
1251 path!("/the/stdlib"),
1252 json!({
1253 "LICENSE": "",
1254 "src": {
1255 "string.rs": "",
1256 }
1257 }),
1258 )
1259 .await;
1260
1261 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1262 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1263 (project.languages().clone(), project.lsp_store())
1264 });
1265 language_registry.add(rust_lang());
1266 let mut fake_servers = language_registry.register_fake_lsp(
1267 "Rust",
1268 FakeLspAdapter {
1269 name: "the-language-server",
1270 ..Default::default()
1271 },
1272 );
1273
1274 cx.executor().run_until_parked();
1275
1276 // Start the language server by opening a buffer with a compatible file extension.
1277 project
1278 .update(cx, |project, cx| {
1279 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1280 })
1281 .await
1282 .unwrap();
1283
1284 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1285 project.update(cx, |project, cx| {
1286 let worktree = project.worktrees(cx).next().unwrap();
1287 assert_eq!(
1288 worktree
1289 .read(cx)
1290 .snapshot()
1291 .entries(true, 0)
1292 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1293 .collect::<Vec<_>>(),
1294 &[
1295 (Path::new(""), false),
1296 (Path::new(".gitignore"), false),
1297 (Path::new("Cargo.lock"), false),
1298 (Path::new("src"), false),
1299 (Path::new("src/a.rs"), false),
1300 (Path::new("src/b.rs"), false),
1301 (Path::new("target"), true),
1302 ]
1303 );
1304 });
1305
1306 let prev_read_dir_count = fs.read_dir_call_count();
1307
1308 let fake_server = fake_servers.next().await.unwrap();
1309 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1310 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1311 id
1312 });
1313
1314 // Simulate jumping to a definition in a dependency outside of the worktree.
1315 let _out_of_worktree_buffer = project
1316 .update(cx, |project, cx| {
1317 project.open_local_buffer_via_lsp(
1318 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1319 server_id,
1320 cx,
1321 )
1322 })
1323 .await
1324 .unwrap();
1325
1326 // Keep track of the FS events reported to the language server.
1327 let file_changes = Arc::new(Mutex::new(Vec::new()));
1328 fake_server
1329 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1330 registrations: vec![lsp::Registration {
1331 id: Default::default(),
1332 method: "workspace/didChangeWatchedFiles".to_string(),
1333 register_options: serde_json::to_value(
1334 lsp::DidChangeWatchedFilesRegistrationOptions {
1335 watchers: vec![
1336 lsp::FileSystemWatcher {
1337 glob_pattern: lsp::GlobPattern::String(
1338 path!("/the-root/Cargo.toml").to_string(),
1339 ),
1340 kind: None,
1341 },
1342 lsp::FileSystemWatcher {
1343 glob_pattern: lsp::GlobPattern::String(
1344 path!("/the-root/src/*.{rs,c}").to_string(),
1345 ),
1346 kind: None,
1347 },
1348 lsp::FileSystemWatcher {
1349 glob_pattern: lsp::GlobPattern::String(
1350 path!("/the-root/target/y/**/*.rs").to_string(),
1351 ),
1352 kind: None,
1353 },
1354 lsp::FileSystemWatcher {
1355 glob_pattern: lsp::GlobPattern::String(
1356 path!("/the/stdlib/src/**/*.rs").to_string(),
1357 ),
1358 kind: None,
1359 },
1360 lsp::FileSystemWatcher {
1361 glob_pattern: lsp::GlobPattern::String(
1362 path!("**/Cargo.lock").to_string(),
1363 ),
1364 kind: None,
1365 },
1366 ],
1367 },
1368 )
1369 .ok(),
1370 }],
1371 })
1372 .await
1373 .into_response()
1374 .unwrap();
1375 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1376 let file_changes = file_changes.clone();
1377 move |params, _| {
1378 let mut file_changes = file_changes.lock();
1379 file_changes.extend(params.changes);
1380 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1381 }
1382 });
1383
1384 cx.executor().run_until_parked();
1385 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1386 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1387
1388 let mut new_watched_paths = fs.watched_paths();
1389 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1390 assert_eq!(
1391 &new_watched_paths,
1392 &[
1393 Path::new(path!("/the-root")),
1394 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1395 Path::new(path!("/the/stdlib/src"))
1396 ]
1397 );
1398
1399 // Now the language server has asked us to watch an ignored directory path,
1400 // so we recursively load it.
1401 project.update(cx, |project, cx| {
1402 let worktree = project.visible_worktrees(cx).next().unwrap();
1403 assert_eq!(
1404 worktree
1405 .read(cx)
1406 .snapshot()
1407 .entries(true, 0)
1408 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1409 .collect::<Vec<_>>(),
1410 &[
1411 (Path::new(""), false),
1412 (Path::new(".gitignore"), false),
1413 (Path::new("Cargo.lock"), false),
1414 (Path::new("src"), false),
1415 (Path::new("src/a.rs"), false),
1416 (Path::new("src/b.rs"), false),
1417 (Path::new("target"), true),
1418 (Path::new("target/x"), true),
1419 (Path::new("target/y"), true),
1420 (Path::new("target/y/out"), true),
1421 (Path::new("target/y/out/y.rs"), true),
1422 (Path::new("target/z"), true),
1423 ]
1424 );
1425 });
1426
1427 // Perform some file system mutations, two of which match the watched patterns,
1428 // and one of which does not.
1429 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1430 .await
1431 .unwrap();
1432 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1433 .await
1434 .unwrap();
1435 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1436 .await
1437 .unwrap();
1438 fs.create_file(
1439 path!("/the-root/target/x/out/x2.rs").as_ref(),
1440 Default::default(),
1441 )
1442 .await
1443 .unwrap();
1444 fs.create_file(
1445 path!("/the-root/target/y/out/y2.rs").as_ref(),
1446 Default::default(),
1447 )
1448 .await
1449 .unwrap();
1450 fs.save(
1451 path!("/the-root/Cargo.lock").as_ref(),
1452 &"".into(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.save(
1458 path!("/the-stdlib/LICENSE").as_ref(),
1459 &"".into(),
1460 Default::default(),
1461 )
1462 .await
1463 .unwrap();
1464 fs.save(
1465 path!("/the/stdlib/src/string.rs").as_ref(),
1466 &"".into(),
1467 Default::default(),
1468 )
1469 .await
1470 .unwrap();
1471
1472 // The language server receives events for the FS mutations that match its watch patterns.
1473 cx.executor().run_until_parked();
1474 assert_eq!(
1475 &*file_changes.lock(),
1476 &[
1477 lsp::FileEvent {
1478 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1479 typ: lsp::FileChangeType::CHANGED,
1480 },
1481 lsp::FileEvent {
1482 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1483 typ: lsp::FileChangeType::DELETED,
1484 },
1485 lsp::FileEvent {
1486 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1487 typ: lsp::FileChangeType::CREATED,
1488 },
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1491 typ: lsp::FileChangeType::CREATED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1495 typ: lsp::FileChangeType::CHANGED,
1496 },
1497 ]
1498 );
1499}
1500
1501#[gpui::test]
1502async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1503 init_test(cx);
1504
1505 let fs = FakeFs::new(cx.executor());
1506 fs.insert_tree(
1507 path!("/dir"),
1508 json!({
1509 "a.rs": "let a = 1;",
1510 "b.rs": "let b = 2;"
1511 }),
1512 )
1513 .await;
1514
1515 let project = Project::test(
1516 fs,
1517 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1518 cx,
1519 )
1520 .await;
1521 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1522
1523 let buffer_a = project
1524 .update(cx, |project, cx| {
1525 project.open_local_buffer(path!("/dir/a.rs"), cx)
1526 })
1527 .await
1528 .unwrap();
1529 let buffer_b = project
1530 .update(cx, |project, cx| {
1531 project.open_local_buffer(path!("/dir/b.rs"), cx)
1532 })
1533 .await
1534 .unwrap();
1535
1536 lsp_store.update(cx, |lsp_store, cx| {
1537 lsp_store
1538 .update_diagnostics(
1539 LanguageServerId(0),
1540 lsp::PublishDiagnosticsParams {
1541 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1542 version: None,
1543 diagnostics: vec![lsp::Diagnostic {
1544 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1545 severity: Some(lsp::DiagnosticSeverity::ERROR),
1546 message: "error 1".to_string(),
1547 ..Default::default()
1548 }],
1549 },
1550 None,
1551 DiagnosticSourceKind::Pushed,
1552 &[],
1553 cx,
1554 )
1555 .unwrap();
1556 lsp_store
1557 .update_diagnostics(
1558 LanguageServerId(0),
1559 lsp::PublishDiagnosticsParams {
1560 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1561 version: None,
1562 diagnostics: vec![lsp::Diagnostic {
1563 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1564 severity: Some(DiagnosticSeverity::WARNING),
1565 message: "error 2".to_string(),
1566 ..Default::default()
1567 }],
1568 },
1569 None,
1570 DiagnosticSourceKind::Pushed,
1571 &[],
1572 cx,
1573 )
1574 .unwrap();
1575 });
1576
1577 buffer_a.update(cx, |buffer, _| {
1578 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1579 assert_eq!(
1580 chunks
1581 .iter()
1582 .map(|(s, d)| (s.as_str(), *d))
1583 .collect::<Vec<_>>(),
1584 &[
1585 ("let ", None),
1586 ("a", Some(DiagnosticSeverity::ERROR)),
1587 (" = 1;", None),
1588 ]
1589 );
1590 });
1591 buffer_b.update(cx, |buffer, _| {
1592 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1593 assert_eq!(
1594 chunks
1595 .iter()
1596 .map(|(s, d)| (s.as_str(), *d))
1597 .collect::<Vec<_>>(),
1598 &[
1599 ("let ", None),
1600 ("b", Some(DiagnosticSeverity::WARNING)),
1601 (" = 2;", None),
1602 ]
1603 );
1604 });
1605}
1606
1607#[gpui::test]
1608async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1609 init_test(cx);
1610
1611 let fs = FakeFs::new(cx.executor());
1612 fs.insert_tree(
1613 path!("/root"),
1614 json!({
1615 "dir": {
1616 ".git": {
1617 "HEAD": "ref: refs/heads/main",
1618 },
1619 ".gitignore": "b.rs",
1620 "a.rs": "let a = 1;",
1621 "b.rs": "let b = 2;",
1622 },
1623 "other.rs": "let b = c;"
1624 }),
1625 )
1626 .await;
1627
1628 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1629 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1630 let (worktree, _) = project
1631 .update(cx, |project, cx| {
1632 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1633 })
1634 .await
1635 .unwrap();
1636 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1637
1638 let (worktree, _) = project
1639 .update(cx, |project, cx| {
1640 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1641 })
1642 .await
1643 .unwrap();
1644 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1645
1646 let server_id = LanguageServerId(0);
1647 lsp_store.update(cx, |lsp_store, cx| {
1648 lsp_store
1649 .update_diagnostics(
1650 server_id,
1651 lsp::PublishDiagnosticsParams {
1652 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1653 version: None,
1654 diagnostics: vec![lsp::Diagnostic {
1655 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1656 severity: Some(lsp::DiagnosticSeverity::ERROR),
1657 message: "unused variable 'b'".to_string(),
1658 ..Default::default()
1659 }],
1660 },
1661 None,
1662 DiagnosticSourceKind::Pushed,
1663 &[],
1664 cx,
1665 )
1666 .unwrap();
1667 lsp_store
1668 .update_diagnostics(
1669 server_id,
1670 lsp::PublishDiagnosticsParams {
1671 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1672 version: None,
1673 diagnostics: vec![lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1675 severity: Some(lsp::DiagnosticSeverity::ERROR),
1676 message: "unknown variable 'c'".to_string(),
1677 ..Default::default()
1678 }],
1679 },
1680 None,
1681 DiagnosticSourceKind::Pushed,
1682 &[],
1683 cx,
1684 )
1685 .unwrap();
1686 });
1687
1688 let main_ignored_buffer = project
1689 .update(cx, |project, cx| {
1690 project.open_buffer((main_worktree_id, "b.rs"), cx)
1691 })
1692 .await
1693 .unwrap();
1694 main_ignored_buffer.update(cx, |buffer, _| {
1695 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1696 assert_eq!(
1697 chunks
1698 .iter()
1699 .map(|(s, d)| (s.as_str(), *d))
1700 .collect::<Vec<_>>(),
1701 &[
1702 ("let ", None),
1703 ("b", Some(DiagnosticSeverity::ERROR)),
1704 (" = 2;", None),
1705 ],
1706 "Gigitnored buffers should still get in-buffer diagnostics",
1707 );
1708 });
1709 let other_buffer = project
1710 .update(cx, |project, cx| {
1711 project.open_buffer((other_worktree_id, ""), cx)
1712 })
1713 .await
1714 .unwrap();
1715 other_buffer.update(cx, |buffer, _| {
1716 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1717 assert_eq!(
1718 chunks
1719 .iter()
1720 .map(|(s, d)| (s.as_str(), *d))
1721 .collect::<Vec<_>>(),
1722 &[
1723 ("let b = ", None),
1724 ("c", Some(DiagnosticSeverity::ERROR)),
1725 (";", None),
1726 ],
1727 "Buffers from hidden projects should still get in-buffer diagnostics"
1728 );
1729 });
1730
1731 project.update(cx, |project, cx| {
1732 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1733 assert_eq!(
1734 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1735 vec![(
1736 ProjectPath {
1737 worktree_id: main_worktree_id,
1738 path: Arc::from(Path::new("b.rs")),
1739 },
1740 server_id,
1741 DiagnosticSummary {
1742 error_count: 1,
1743 warning_count: 0,
1744 }
1745 )]
1746 );
1747 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1748 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1749 });
1750}
1751
1752#[gpui::test]
1753async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1754 init_test(cx);
1755
1756 let progress_token = "the-progress-token";
1757
1758 let fs = FakeFs::new(cx.executor());
1759 fs.insert_tree(
1760 path!("/dir"),
1761 json!({
1762 "a.rs": "fn a() { A }",
1763 "b.rs": "const y: i32 = 1",
1764 }),
1765 )
1766 .await;
1767
1768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1770
1771 language_registry.add(rust_lang());
1772 let mut fake_servers = language_registry.register_fake_lsp(
1773 "Rust",
1774 FakeLspAdapter {
1775 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1776 disk_based_diagnostics_sources: vec!["disk".into()],
1777 ..Default::default()
1778 },
1779 );
1780
1781 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1782
1783 // Cause worktree to start the fake language server
1784 let _ = project
1785 .update(cx, |project, cx| {
1786 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1787 })
1788 .await
1789 .unwrap();
1790
1791 let mut events = cx.events(&project);
1792
1793 let fake_server = fake_servers.next().await.unwrap();
1794 assert_eq!(
1795 events.next().await.unwrap(),
1796 Event::LanguageServerAdded(
1797 LanguageServerId(0),
1798 fake_server.server.name(),
1799 Some(worktree_id)
1800 ),
1801 );
1802
1803 fake_server
1804 .start_progress(format!("{}/0", progress_token))
1805 .await;
1806 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1807 assert_eq!(
1808 events.next().await.unwrap(),
1809 Event::DiskBasedDiagnosticsStarted {
1810 language_server_id: LanguageServerId(0),
1811 }
1812 );
1813
1814 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1815 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1816 version: None,
1817 diagnostics: vec![lsp::Diagnostic {
1818 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1819 severity: Some(lsp::DiagnosticSeverity::ERROR),
1820 message: "undefined variable 'A'".to_string(),
1821 ..Default::default()
1822 }],
1823 });
1824 assert_eq!(
1825 events.next().await.unwrap(),
1826 Event::DiagnosticsUpdated {
1827 language_server_id: LanguageServerId(0),
1828 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1829 }
1830 );
1831
1832 fake_server.end_progress(format!("{}/0", progress_token));
1833 assert_eq!(
1834 events.next().await.unwrap(),
1835 Event::DiskBasedDiagnosticsFinished {
1836 language_server_id: LanguageServerId(0)
1837 }
1838 );
1839
1840 let buffer = project
1841 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1842 .await
1843 .unwrap();
1844
1845 buffer.update(cx, |buffer, _| {
1846 let snapshot = buffer.snapshot();
1847 let diagnostics = snapshot
1848 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1849 .collect::<Vec<_>>();
1850 assert_eq!(
1851 diagnostics,
1852 &[DiagnosticEntry {
1853 range: Point::new(0, 9)..Point::new(0, 10),
1854 diagnostic: Diagnostic {
1855 severity: lsp::DiagnosticSeverity::ERROR,
1856 message: "undefined variable 'A'".to_string(),
1857 group_id: 0,
1858 is_primary: true,
1859 source_kind: DiagnosticSourceKind::Pushed,
1860 ..Diagnostic::default()
1861 }
1862 }]
1863 )
1864 });
1865
1866 // Ensure publishing empty diagnostics twice only results in one update event.
1867 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1868 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1869 version: None,
1870 diagnostics: Default::default(),
1871 });
1872 assert_eq!(
1873 events.next().await.unwrap(),
1874 Event::DiagnosticsUpdated {
1875 language_server_id: LanguageServerId(0),
1876 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1877 }
1878 );
1879
1880 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1881 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1882 version: None,
1883 diagnostics: Default::default(),
1884 });
1885 cx.executor().run_until_parked();
1886 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1887}
1888
1889#[gpui::test]
1890async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1891 init_test(cx);
1892
1893 let progress_token = "the-progress-token";
1894
1895 let fs = FakeFs::new(cx.executor());
1896 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1897
1898 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1899
1900 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1901 language_registry.add(rust_lang());
1902 let mut fake_servers = language_registry.register_fake_lsp(
1903 "Rust",
1904 FakeLspAdapter {
1905 name: "the-language-server",
1906 disk_based_diagnostics_sources: vec!["disk".into()],
1907 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1908 ..FakeLspAdapter::default()
1909 },
1910 );
1911
1912 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1913
1914 let (buffer, _handle) = project
1915 .update(cx, |project, cx| {
1916 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1917 })
1918 .await
1919 .unwrap();
1920 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1921 // Simulate diagnostics starting to update.
1922 let fake_server = fake_servers.next().await.unwrap();
1923 fake_server.start_progress(progress_token).await;
1924
1925 // Restart the server before the diagnostics finish updating.
1926 project.update(cx, |project, cx| {
1927 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1928 });
1929 let mut events = cx.events(&project);
1930
1931 // Simulate the newly started server sending more diagnostics.
1932 let fake_server = fake_servers.next().await.unwrap();
1933 assert_eq!(
1934 events.next().await.unwrap(),
1935 Event::LanguageServerRemoved(LanguageServerId(0))
1936 );
1937 assert_eq!(
1938 events.next().await.unwrap(),
1939 Event::LanguageServerAdded(
1940 LanguageServerId(1),
1941 fake_server.server.name(),
1942 Some(worktree_id)
1943 )
1944 );
1945 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1946 fake_server.start_progress(progress_token).await;
1947 assert_eq!(
1948 events.next().await.unwrap(),
1949 Event::LanguageServerBufferRegistered {
1950 server_id: LanguageServerId(1),
1951 buffer_id,
1952 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1953 name: Some(fake_server.server.name())
1954 }
1955 );
1956 assert_eq!(
1957 events.next().await.unwrap(),
1958 Event::DiskBasedDiagnosticsStarted {
1959 language_server_id: LanguageServerId(1)
1960 }
1961 );
1962 project.update(cx, |project, cx| {
1963 assert_eq!(
1964 project
1965 .language_servers_running_disk_based_diagnostics(cx)
1966 .collect::<Vec<_>>(),
1967 [LanguageServerId(1)]
1968 );
1969 });
1970
1971 // All diagnostics are considered done, despite the old server's diagnostic
1972 // task never completing.
1973 fake_server.end_progress(progress_token);
1974 assert_eq!(
1975 events.next().await.unwrap(),
1976 Event::DiskBasedDiagnosticsFinished {
1977 language_server_id: LanguageServerId(1)
1978 }
1979 );
1980 project.update(cx, |project, cx| {
1981 assert_eq!(
1982 project
1983 .language_servers_running_disk_based_diagnostics(cx)
1984 .collect::<Vec<_>>(),
1985 [] as [language::LanguageServerId; 0]
1986 );
1987 });
1988}
1989
1990#[gpui::test]
1991async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1992 init_test(cx);
1993
1994 let fs = FakeFs::new(cx.executor());
1995 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1996
1997 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1998
1999 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2000 language_registry.add(rust_lang());
2001 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2002
2003 let (buffer, _) = project
2004 .update(cx, |project, cx| {
2005 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2006 })
2007 .await
2008 .unwrap();
2009
2010 // Publish diagnostics
2011 let fake_server = fake_servers.next().await.unwrap();
2012 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2013 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2014 version: None,
2015 diagnostics: vec![lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2017 severity: Some(lsp::DiagnosticSeverity::ERROR),
2018 message: "the message".to_string(),
2019 ..Default::default()
2020 }],
2021 });
2022
2023 cx.executor().run_until_parked();
2024 buffer.update(cx, |buffer, _| {
2025 assert_eq!(
2026 buffer
2027 .snapshot()
2028 .diagnostics_in_range::<_, usize>(0..1, false)
2029 .map(|entry| entry.diagnostic.message)
2030 .collect::<Vec<_>>(),
2031 ["the message".to_string()]
2032 );
2033 });
2034 project.update(cx, |project, cx| {
2035 assert_eq!(
2036 project.diagnostic_summary(false, cx),
2037 DiagnosticSummary {
2038 error_count: 1,
2039 warning_count: 0,
2040 }
2041 );
2042 });
2043
2044 project.update(cx, |project, cx| {
2045 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2046 });
2047
2048 // The diagnostics are cleared.
2049 cx.executor().run_until_parked();
2050 buffer.update(cx, |buffer, _| {
2051 assert_eq!(
2052 buffer
2053 .snapshot()
2054 .diagnostics_in_range::<_, usize>(0..1, false)
2055 .map(|entry| entry.diagnostic.message)
2056 .collect::<Vec<_>>(),
2057 Vec::<String>::new(),
2058 );
2059 });
2060 project.update(cx, |project, cx| {
2061 assert_eq!(
2062 project.diagnostic_summary(false, cx),
2063 DiagnosticSummary {
2064 error_count: 0,
2065 warning_count: 0,
2066 }
2067 );
2068 });
2069}
2070
2071#[gpui::test]
2072async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2073 init_test(cx);
2074
2075 let fs = FakeFs::new(cx.executor());
2076 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2077
2078 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2079 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2080
2081 language_registry.add(rust_lang());
2082 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2083
2084 let (buffer, _handle) = project
2085 .update(cx, |project, cx| {
2086 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2087 })
2088 .await
2089 .unwrap();
2090
2091 // Before restarting the server, report diagnostics with an unknown buffer version.
2092 let fake_server = fake_servers.next().await.unwrap();
2093 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2094 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2095 version: Some(10000),
2096 diagnostics: Vec::new(),
2097 });
2098 cx.executor().run_until_parked();
2099 project.update(cx, |project, cx| {
2100 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2101 });
2102
2103 let mut fake_server = fake_servers.next().await.unwrap();
2104 let notification = fake_server
2105 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2106 .await
2107 .text_document;
2108 assert_eq!(notification.version, 0);
2109}
2110
2111#[gpui::test]
2112async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2113 init_test(cx);
2114
2115 let progress_token = "the-progress-token";
2116
2117 let fs = FakeFs::new(cx.executor());
2118 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2119
2120 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2121
2122 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2123 language_registry.add(rust_lang());
2124 let mut fake_servers = language_registry.register_fake_lsp(
2125 "Rust",
2126 FakeLspAdapter {
2127 name: "the-language-server",
2128 disk_based_diagnostics_sources: vec!["disk".into()],
2129 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2130 ..Default::default()
2131 },
2132 );
2133
2134 let (buffer, _handle) = project
2135 .update(cx, |project, cx| {
2136 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2137 })
2138 .await
2139 .unwrap();
2140
2141 // Simulate diagnostics starting to update.
2142 let mut fake_server = fake_servers.next().await.unwrap();
2143 fake_server
2144 .start_progress_with(
2145 "another-token",
2146 lsp::WorkDoneProgressBegin {
2147 cancellable: Some(false),
2148 ..Default::default()
2149 },
2150 )
2151 .await;
2152 fake_server
2153 .start_progress_with(
2154 progress_token,
2155 lsp::WorkDoneProgressBegin {
2156 cancellable: Some(true),
2157 ..Default::default()
2158 },
2159 )
2160 .await;
2161 cx.executor().run_until_parked();
2162
2163 project.update(cx, |project, cx| {
2164 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2165 });
2166
2167 let cancel_notification = fake_server
2168 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2169 .await;
2170 assert_eq!(
2171 cancel_notification.token,
2172 NumberOrString::String(progress_token.into())
2173 );
2174}
2175
2176#[gpui::test]
2177async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2178 init_test(cx);
2179
2180 let fs = FakeFs::new(cx.executor());
2181 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2182 .await;
2183
2184 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2185 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2186
2187 let mut fake_rust_servers = language_registry.register_fake_lsp(
2188 "Rust",
2189 FakeLspAdapter {
2190 name: "rust-lsp",
2191 ..Default::default()
2192 },
2193 );
2194 let mut fake_js_servers = language_registry.register_fake_lsp(
2195 "JavaScript",
2196 FakeLspAdapter {
2197 name: "js-lsp",
2198 ..Default::default()
2199 },
2200 );
2201 language_registry.add(rust_lang());
2202 language_registry.add(js_lang());
2203
2204 let _rs_buffer = project
2205 .update(cx, |project, cx| {
2206 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2207 })
2208 .await
2209 .unwrap();
2210 let _js_buffer = project
2211 .update(cx, |project, cx| {
2212 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2213 })
2214 .await
2215 .unwrap();
2216
2217 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2218 assert_eq!(
2219 fake_rust_server_1
2220 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2221 .await
2222 .text_document
2223 .uri
2224 .as_str(),
2225 uri!("file:///dir/a.rs")
2226 );
2227
2228 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2229 assert_eq!(
2230 fake_js_server
2231 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2232 .await
2233 .text_document
2234 .uri
2235 .as_str(),
2236 uri!("file:///dir/b.js")
2237 );
2238
2239 // Disable Rust language server, ensuring only that server gets stopped.
2240 cx.update(|cx| {
2241 SettingsStore::update_global(cx, |settings, cx| {
2242 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2243 settings.languages.0.insert(
2244 "Rust".into(),
2245 LanguageSettingsContent {
2246 enable_language_server: Some(false),
2247 ..Default::default()
2248 },
2249 );
2250 });
2251 })
2252 });
2253 fake_rust_server_1
2254 .receive_notification::<lsp::notification::Exit>()
2255 .await;
2256
2257 // Enable Rust and disable JavaScript language servers, ensuring that the
2258 // former gets started again and that the latter stops.
2259 cx.update(|cx| {
2260 SettingsStore::update_global(cx, |settings, cx| {
2261 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2262 settings.languages.0.insert(
2263 LanguageName::new("Rust"),
2264 LanguageSettingsContent {
2265 enable_language_server: Some(true),
2266 ..Default::default()
2267 },
2268 );
2269 settings.languages.0.insert(
2270 LanguageName::new("JavaScript"),
2271 LanguageSettingsContent {
2272 enable_language_server: Some(false),
2273 ..Default::default()
2274 },
2275 );
2276 });
2277 })
2278 });
2279 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2280 assert_eq!(
2281 fake_rust_server_2
2282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2283 .await
2284 .text_document
2285 .uri
2286 .as_str(),
2287 uri!("file:///dir/a.rs")
2288 );
2289 fake_js_server
2290 .receive_notification::<lsp::notification::Exit>()
2291 .await;
2292}
2293
2294#[gpui::test(iterations = 3)]
2295async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2296 init_test(cx);
2297
2298 let text = "
2299 fn a() { A }
2300 fn b() { BB }
2301 fn c() { CCC }
2302 "
2303 .unindent();
2304
2305 let fs = FakeFs::new(cx.executor());
2306 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2307
2308 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2310
2311 language_registry.add(rust_lang());
2312 let mut fake_servers = language_registry.register_fake_lsp(
2313 "Rust",
2314 FakeLspAdapter {
2315 disk_based_diagnostics_sources: vec!["disk".into()],
2316 ..Default::default()
2317 },
2318 );
2319
2320 let buffer = project
2321 .update(cx, |project, cx| {
2322 project.open_local_buffer(path!("/dir/a.rs"), cx)
2323 })
2324 .await
2325 .unwrap();
2326
2327 let _handle = project.update(cx, |project, cx| {
2328 project.register_buffer_with_language_servers(&buffer, cx)
2329 });
2330
2331 let mut fake_server = fake_servers.next().await.unwrap();
2332 let open_notification = fake_server
2333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2334 .await;
2335
2336 // Edit the buffer, moving the content down
2337 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2338 let change_notification_1 = fake_server
2339 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2340 .await;
2341 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2342
2343 // Report some diagnostics for the initial version of the buffer
2344 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2345 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2346 version: Some(open_notification.text_document.version),
2347 diagnostics: vec![
2348 lsp::Diagnostic {
2349 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2350 severity: Some(DiagnosticSeverity::ERROR),
2351 message: "undefined variable 'A'".to_string(),
2352 source: Some("disk".to_string()),
2353 ..Default::default()
2354 },
2355 lsp::Diagnostic {
2356 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2357 severity: Some(DiagnosticSeverity::ERROR),
2358 message: "undefined variable 'BB'".to_string(),
2359 source: Some("disk".to_string()),
2360 ..Default::default()
2361 },
2362 lsp::Diagnostic {
2363 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2364 severity: Some(DiagnosticSeverity::ERROR),
2365 source: Some("disk".to_string()),
2366 message: "undefined variable 'CCC'".to_string(),
2367 ..Default::default()
2368 },
2369 ],
2370 });
2371
2372 // The diagnostics have moved down since they were created.
2373 cx.executor().run_until_parked();
2374 buffer.update(cx, |buffer, _| {
2375 assert_eq!(
2376 buffer
2377 .snapshot()
2378 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2379 .collect::<Vec<_>>(),
2380 &[
2381 DiagnosticEntry {
2382 range: Point::new(3, 9)..Point::new(3, 11),
2383 diagnostic: Diagnostic {
2384 source: Some("disk".into()),
2385 severity: DiagnosticSeverity::ERROR,
2386 message: "undefined variable 'BB'".to_string(),
2387 is_disk_based: true,
2388 group_id: 1,
2389 is_primary: true,
2390 source_kind: DiagnosticSourceKind::Pushed,
2391 ..Diagnostic::default()
2392 },
2393 },
2394 DiagnosticEntry {
2395 range: Point::new(4, 9)..Point::new(4, 12),
2396 diagnostic: Diagnostic {
2397 source: Some("disk".into()),
2398 severity: DiagnosticSeverity::ERROR,
2399 message: "undefined variable 'CCC'".to_string(),
2400 is_disk_based: true,
2401 group_id: 2,
2402 is_primary: true,
2403 source_kind: DiagnosticSourceKind::Pushed,
2404 ..Diagnostic::default()
2405 }
2406 }
2407 ]
2408 );
2409 assert_eq!(
2410 chunks_with_diagnostics(buffer, 0..buffer.len()),
2411 [
2412 ("\n\nfn a() { ".to_string(), None),
2413 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2414 (" }\nfn b() { ".to_string(), None),
2415 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2416 (" }\nfn c() { ".to_string(), None),
2417 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2418 (" }\n".to_string(), None),
2419 ]
2420 );
2421 assert_eq!(
2422 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2423 [
2424 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2425 (" }\nfn c() { ".to_string(), None),
2426 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2427 ]
2428 );
2429 });
2430
2431 // Ensure overlapping diagnostics are highlighted correctly.
2432 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2433 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2434 version: Some(open_notification.text_document.version),
2435 diagnostics: vec![
2436 lsp::Diagnostic {
2437 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2438 severity: Some(DiagnosticSeverity::ERROR),
2439 message: "undefined variable 'A'".to_string(),
2440 source: Some("disk".to_string()),
2441 ..Default::default()
2442 },
2443 lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2445 severity: Some(DiagnosticSeverity::WARNING),
2446 message: "unreachable statement".to_string(),
2447 source: Some("disk".to_string()),
2448 ..Default::default()
2449 },
2450 ],
2451 });
2452
2453 cx.executor().run_until_parked();
2454 buffer.update(cx, |buffer, _| {
2455 assert_eq!(
2456 buffer
2457 .snapshot()
2458 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2459 .collect::<Vec<_>>(),
2460 &[
2461 DiagnosticEntry {
2462 range: Point::new(2, 9)..Point::new(2, 12),
2463 diagnostic: Diagnostic {
2464 source: Some("disk".into()),
2465 severity: DiagnosticSeverity::WARNING,
2466 message: "unreachable statement".to_string(),
2467 is_disk_based: true,
2468 group_id: 4,
2469 is_primary: true,
2470 source_kind: DiagnosticSourceKind::Pushed,
2471 ..Diagnostic::default()
2472 }
2473 },
2474 DiagnosticEntry {
2475 range: Point::new(2, 9)..Point::new(2, 10),
2476 diagnostic: Diagnostic {
2477 source: Some("disk".into()),
2478 severity: DiagnosticSeverity::ERROR,
2479 message: "undefined variable 'A'".to_string(),
2480 is_disk_based: true,
2481 group_id: 3,
2482 is_primary: true,
2483 source_kind: DiagnosticSourceKind::Pushed,
2484 ..Diagnostic::default()
2485 },
2486 }
2487 ]
2488 );
2489 assert_eq!(
2490 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2491 [
2492 ("fn a() { ".to_string(), None),
2493 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2494 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2495 ("\n".to_string(), None),
2496 ]
2497 );
2498 assert_eq!(
2499 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2500 [
2501 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2502 ("\n".to_string(), None),
2503 ]
2504 );
2505 });
2506
2507 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2508 // changes since the last save.
2509 buffer.update(cx, |buffer, cx| {
2510 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2511 buffer.edit(
2512 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2513 None,
2514 cx,
2515 );
2516 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2517 });
2518 let change_notification_2 = fake_server
2519 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2520 .await;
2521 assert!(
2522 change_notification_2.text_document.version > change_notification_1.text_document.version
2523 );
2524
2525 // Handle out-of-order diagnostics
2526 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2527 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2528 version: Some(change_notification_2.text_document.version),
2529 diagnostics: vec![
2530 lsp::Diagnostic {
2531 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2532 severity: Some(DiagnosticSeverity::ERROR),
2533 message: "undefined variable 'BB'".to_string(),
2534 source: Some("disk".to_string()),
2535 ..Default::default()
2536 },
2537 lsp::Diagnostic {
2538 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2539 severity: Some(DiagnosticSeverity::WARNING),
2540 message: "undefined variable 'A'".to_string(),
2541 source: Some("disk".to_string()),
2542 ..Default::default()
2543 },
2544 ],
2545 });
2546
2547 cx.executor().run_until_parked();
2548 buffer.update(cx, |buffer, _| {
2549 assert_eq!(
2550 buffer
2551 .snapshot()
2552 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2553 .collect::<Vec<_>>(),
2554 &[
2555 DiagnosticEntry {
2556 range: Point::new(2, 21)..Point::new(2, 22),
2557 diagnostic: Diagnostic {
2558 source: Some("disk".into()),
2559 severity: DiagnosticSeverity::WARNING,
2560 message: "undefined variable 'A'".to_string(),
2561 is_disk_based: true,
2562 group_id: 6,
2563 is_primary: true,
2564 source_kind: DiagnosticSourceKind::Pushed,
2565 ..Diagnostic::default()
2566 }
2567 },
2568 DiagnosticEntry {
2569 range: Point::new(3, 9)..Point::new(3, 14),
2570 diagnostic: Diagnostic {
2571 source: Some("disk".into()),
2572 severity: DiagnosticSeverity::ERROR,
2573 message: "undefined variable 'BB'".to_string(),
2574 is_disk_based: true,
2575 group_id: 5,
2576 is_primary: true,
2577 source_kind: DiagnosticSourceKind::Pushed,
2578 ..Diagnostic::default()
2579 },
2580 }
2581 ]
2582 );
2583 });
2584}
2585
2586#[gpui::test]
2587async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2588 init_test(cx);
2589
2590 let text = concat!(
2591 "let one = ;\n", //
2592 "let two = \n",
2593 "let three = 3;\n",
2594 );
2595
2596 let fs = FakeFs::new(cx.executor());
2597 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2598
2599 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2600 let buffer = project
2601 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2602 .await
2603 .unwrap();
2604
2605 project.update(cx, |project, cx| {
2606 project.lsp_store.update(cx, |lsp_store, cx| {
2607 lsp_store
2608 .update_diagnostic_entries(
2609 LanguageServerId(0),
2610 PathBuf::from("/dir/a.rs"),
2611 None,
2612 None,
2613 vec![
2614 DiagnosticEntry {
2615 range: Unclipped(PointUtf16::new(0, 10))
2616 ..Unclipped(PointUtf16::new(0, 10)),
2617 diagnostic: Diagnostic {
2618 severity: DiagnosticSeverity::ERROR,
2619 message: "syntax error 1".to_string(),
2620 source_kind: DiagnosticSourceKind::Pushed,
2621 ..Diagnostic::default()
2622 },
2623 },
2624 DiagnosticEntry {
2625 range: Unclipped(PointUtf16::new(1, 10))
2626 ..Unclipped(PointUtf16::new(1, 10)),
2627 diagnostic: Diagnostic {
2628 severity: DiagnosticSeverity::ERROR,
2629 message: "syntax error 2".to_string(),
2630 source_kind: DiagnosticSourceKind::Pushed,
2631 ..Diagnostic::default()
2632 },
2633 },
2634 ],
2635 cx,
2636 )
2637 .unwrap();
2638 })
2639 });
2640
2641 // An empty range is extended forward to include the following character.
2642 // At the end of a line, an empty range is extended backward to include
2643 // the preceding character.
2644 buffer.update(cx, |buffer, _| {
2645 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2646 assert_eq!(
2647 chunks
2648 .iter()
2649 .map(|(s, d)| (s.as_str(), *d))
2650 .collect::<Vec<_>>(),
2651 &[
2652 ("let one = ", None),
2653 (";", Some(DiagnosticSeverity::ERROR)),
2654 ("\nlet two =", None),
2655 (" ", Some(DiagnosticSeverity::ERROR)),
2656 ("\nlet three = 3;\n", None)
2657 ]
2658 );
2659 });
2660}
2661
2662#[gpui::test]
2663async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2664 init_test(cx);
2665
2666 let fs = FakeFs::new(cx.executor());
2667 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2668 .await;
2669
2670 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2671 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2672
2673 lsp_store.update(cx, |lsp_store, cx| {
2674 lsp_store
2675 .update_diagnostic_entries(
2676 LanguageServerId(0),
2677 Path::new("/dir/a.rs").to_owned(),
2678 None,
2679 None,
2680 vec![DiagnosticEntry {
2681 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2682 diagnostic: Diagnostic {
2683 severity: DiagnosticSeverity::ERROR,
2684 is_primary: true,
2685 message: "syntax error a1".to_string(),
2686 source_kind: DiagnosticSourceKind::Pushed,
2687 ..Diagnostic::default()
2688 },
2689 }],
2690 cx,
2691 )
2692 .unwrap();
2693 lsp_store
2694 .update_diagnostic_entries(
2695 LanguageServerId(1),
2696 Path::new("/dir/a.rs").to_owned(),
2697 None,
2698 None,
2699 vec![DiagnosticEntry {
2700 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2701 diagnostic: Diagnostic {
2702 severity: DiagnosticSeverity::ERROR,
2703 is_primary: true,
2704 message: "syntax error b1".to_string(),
2705 source_kind: DiagnosticSourceKind::Pushed,
2706 ..Diagnostic::default()
2707 },
2708 }],
2709 cx,
2710 )
2711 .unwrap();
2712
2713 assert_eq!(
2714 lsp_store.diagnostic_summary(false, cx),
2715 DiagnosticSummary {
2716 error_count: 2,
2717 warning_count: 0,
2718 }
2719 );
2720 });
2721}
2722
2723#[gpui::test]
2724async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2725 init_test(cx);
2726
2727 let text = "
2728 fn a() {
2729 f1();
2730 }
2731 fn b() {
2732 f2();
2733 }
2734 fn c() {
2735 f3();
2736 }
2737 "
2738 .unindent();
2739
2740 let fs = FakeFs::new(cx.executor());
2741 fs.insert_tree(
2742 path!("/dir"),
2743 json!({
2744 "a.rs": text.clone(),
2745 }),
2746 )
2747 .await;
2748
2749 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2750 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2751
2752 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2753 language_registry.add(rust_lang());
2754 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2755
2756 let (buffer, _handle) = project
2757 .update(cx, |project, cx| {
2758 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2759 })
2760 .await
2761 .unwrap();
2762
2763 let mut fake_server = fake_servers.next().await.unwrap();
2764 let lsp_document_version = fake_server
2765 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2766 .await
2767 .text_document
2768 .version;
2769
2770 // Simulate editing the buffer after the language server computes some edits.
2771 buffer.update(cx, |buffer, cx| {
2772 buffer.edit(
2773 [(
2774 Point::new(0, 0)..Point::new(0, 0),
2775 "// above first function\n",
2776 )],
2777 None,
2778 cx,
2779 );
2780 buffer.edit(
2781 [(
2782 Point::new(2, 0)..Point::new(2, 0),
2783 " // inside first function\n",
2784 )],
2785 None,
2786 cx,
2787 );
2788 buffer.edit(
2789 [(
2790 Point::new(6, 4)..Point::new(6, 4),
2791 "// inside second function ",
2792 )],
2793 None,
2794 cx,
2795 );
2796
2797 assert_eq!(
2798 buffer.text(),
2799 "
2800 // above first function
2801 fn a() {
2802 // inside first function
2803 f1();
2804 }
2805 fn b() {
2806 // inside second function f2();
2807 }
2808 fn c() {
2809 f3();
2810 }
2811 "
2812 .unindent()
2813 );
2814 });
2815
2816 let edits = lsp_store
2817 .update(cx, |lsp_store, cx| {
2818 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2819 &buffer,
2820 vec![
2821 // replace body of first function
2822 lsp::TextEdit {
2823 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2824 new_text: "
2825 fn a() {
2826 f10();
2827 }
2828 "
2829 .unindent(),
2830 },
2831 // edit inside second function
2832 lsp::TextEdit {
2833 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2834 new_text: "00".into(),
2835 },
2836 // edit inside third function via two distinct edits
2837 lsp::TextEdit {
2838 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2839 new_text: "4000".into(),
2840 },
2841 lsp::TextEdit {
2842 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2843 new_text: "".into(),
2844 },
2845 ],
2846 LanguageServerId(0),
2847 Some(lsp_document_version),
2848 cx,
2849 )
2850 })
2851 .await
2852 .unwrap();
2853
2854 buffer.update(cx, |buffer, cx| {
2855 for (range, new_text) in edits {
2856 buffer.edit([(range, new_text)], None, cx);
2857 }
2858 assert_eq!(
2859 buffer.text(),
2860 "
2861 // above first function
2862 fn a() {
2863 // inside first function
2864 f10();
2865 }
2866 fn b() {
2867 // inside second function f200();
2868 }
2869 fn c() {
2870 f4000();
2871 }
2872 "
2873 .unindent()
2874 );
2875 });
2876}
2877
2878#[gpui::test]
2879async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2880 init_test(cx);
2881
2882 let text = "
2883 use a::b;
2884 use a::c;
2885
2886 fn f() {
2887 b();
2888 c();
2889 }
2890 "
2891 .unindent();
2892
2893 let fs = FakeFs::new(cx.executor());
2894 fs.insert_tree(
2895 path!("/dir"),
2896 json!({
2897 "a.rs": text.clone(),
2898 }),
2899 )
2900 .await;
2901
2902 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2903 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2904 let buffer = project
2905 .update(cx, |project, cx| {
2906 project.open_local_buffer(path!("/dir/a.rs"), cx)
2907 })
2908 .await
2909 .unwrap();
2910
2911 // Simulate the language server sending us a small edit in the form of a very large diff.
2912 // Rust-analyzer does this when performing a merge-imports code action.
2913 let edits = lsp_store
2914 .update(cx, |lsp_store, cx| {
2915 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2916 &buffer,
2917 [
2918 // Replace the first use statement without editing the semicolon.
2919 lsp::TextEdit {
2920 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2921 new_text: "a::{b, c}".into(),
2922 },
2923 // Reinsert the remainder of the file between the semicolon and the final
2924 // newline of the file.
2925 lsp::TextEdit {
2926 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2927 new_text: "\n\n".into(),
2928 },
2929 lsp::TextEdit {
2930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2931 new_text: "
2932 fn f() {
2933 b();
2934 c();
2935 }"
2936 .unindent(),
2937 },
2938 // Delete everything after the first newline of the file.
2939 lsp::TextEdit {
2940 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2941 new_text: "".into(),
2942 },
2943 ],
2944 LanguageServerId(0),
2945 None,
2946 cx,
2947 )
2948 })
2949 .await
2950 .unwrap();
2951
2952 buffer.update(cx, |buffer, cx| {
2953 let edits = edits
2954 .into_iter()
2955 .map(|(range, text)| {
2956 (
2957 range.start.to_point(buffer)..range.end.to_point(buffer),
2958 text,
2959 )
2960 })
2961 .collect::<Vec<_>>();
2962
2963 assert_eq!(
2964 edits,
2965 [
2966 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2967 (Point::new(1, 0)..Point::new(2, 0), "".into())
2968 ]
2969 );
2970
2971 for (range, new_text) in edits {
2972 buffer.edit([(range, new_text)], None, cx);
2973 }
2974 assert_eq!(
2975 buffer.text(),
2976 "
2977 use a::{b, c};
2978
2979 fn f() {
2980 b();
2981 c();
2982 }
2983 "
2984 .unindent()
2985 );
2986 });
2987}
2988
2989#[gpui::test]
2990async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2991 cx: &mut gpui::TestAppContext,
2992) {
2993 init_test(cx);
2994
2995 let text = "Path()";
2996
2997 let fs = FakeFs::new(cx.executor());
2998 fs.insert_tree(
2999 path!("/dir"),
3000 json!({
3001 "a.rs": text
3002 }),
3003 )
3004 .await;
3005
3006 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3007 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3008 let buffer = project
3009 .update(cx, |project, cx| {
3010 project.open_local_buffer(path!("/dir/a.rs"), cx)
3011 })
3012 .await
3013 .unwrap();
3014
3015 // Simulate the language server sending us a pair of edits at the same location,
3016 // with an insertion following a replacement (which violates the LSP spec).
3017 let edits = lsp_store
3018 .update(cx, |lsp_store, cx| {
3019 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3020 &buffer,
3021 [
3022 lsp::TextEdit {
3023 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3024 new_text: "Path".into(),
3025 },
3026 lsp::TextEdit {
3027 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3028 new_text: "from path import Path\n\n\n".into(),
3029 },
3030 ],
3031 LanguageServerId(0),
3032 None,
3033 cx,
3034 )
3035 })
3036 .await
3037 .unwrap();
3038
3039 buffer.update(cx, |buffer, cx| {
3040 buffer.edit(edits, None, cx);
3041 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3042 });
3043}
3044
3045#[gpui::test]
3046async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let text = "
3050 use a::b;
3051 use a::c;
3052
3053 fn f() {
3054 b();
3055 c();
3056 }
3057 "
3058 .unindent();
3059
3060 let fs = FakeFs::new(cx.executor());
3061 fs.insert_tree(
3062 path!("/dir"),
3063 json!({
3064 "a.rs": text.clone(),
3065 }),
3066 )
3067 .await;
3068
3069 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3070 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3071 let buffer = project
3072 .update(cx, |project, cx| {
3073 project.open_local_buffer(path!("/dir/a.rs"), cx)
3074 })
3075 .await
3076 .unwrap();
3077
3078 // Simulate the language server sending us edits in a non-ordered fashion,
3079 // with ranges sometimes being inverted or pointing to invalid locations.
3080 let edits = lsp_store
3081 .update(cx, |lsp_store, cx| {
3082 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3083 &buffer,
3084 [
3085 lsp::TextEdit {
3086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3087 new_text: "\n\n".into(),
3088 },
3089 lsp::TextEdit {
3090 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3091 new_text: "a::{b, c}".into(),
3092 },
3093 lsp::TextEdit {
3094 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3095 new_text: "".into(),
3096 },
3097 lsp::TextEdit {
3098 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3099 new_text: "
3100 fn f() {
3101 b();
3102 c();
3103 }"
3104 .unindent(),
3105 },
3106 ],
3107 LanguageServerId(0),
3108 None,
3109 cx,
3110 )
3111 })
3112 .await
3113 .unwrap();
3114
3115 buffer.update(cx, |buffer, cx| {
3116 let edits = edits
3117 .into_iter()
3118 .map(|(range, text)| {
3119 (
3120 range.start.to_point(buffer)..range.end.to_point(buffer),
3121 text,
3122 )
3123 })
3124 .collect::<Vec<_>>();
3125
3126 assert_eq!(
3127 edits,
3128 [
3129 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3130 (Point::new(1, 0)..Point::new(2, 0), "".into())
3131 ]
3132 );
3133
3134 for (range, new_text) in edits {
3135 buffer.edit([(range, new_text)], None, cx);
3136 }
3137 assert_eq!(
3138 buffer.text(),
3139 "
3140 use a::{b, c};
3141
3142 fn f() {
3143 b();
3144 c();
3145 }
3146 "
3147 .unindent()
3148 );
3149 });
3150}
3151
3152fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3153 buffer: &Buffer,
3154 range: Range<T>,
3155) -> Vec<(String, Option<DiagnosticSeverity>)> {
3156 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3157 for chunk in buffer.snapshot().chunks(range, true) {
3158 if chunks
3159 .last()
3160 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3161 {
3162 chunks.last_mut().unwrap().0.push_str(chunk.text);
3163 } else {
3164 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3165 }
3166 }
3167 chunks
3168}
3169
3170#[gpui::test(iterations = 10)]
3171async fn test_definition(cx: &mut gpui::TestAppContext) {
3172 init_test(cx);
3173
3174 let fs = FakeFs::new(cx.executor());
3175 fs.insert_tree(
3176 path!("/dir"),
3177 json!({
3178 "a.rs": "const fn a() { A }",
3179 "b.rs": "const y: i32 = crate::a()",
3180 }),
3181 )
3182 .await;
3183
3184 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3185
3186 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3187 language_registry.add(rust_lang());
3188 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3189
3190 let (buffer, _handle) = project
3191 .update(cx, |project, cx| {
3192 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3193 })
3194 .await
3195 .unwrap();
3196
3197 let fake_server = fake_servers.next().await.unwrap();
3198 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3199 let params = params.text_document_position_params;
3200 assert_eq!(
3201 params.text_document.uri.to_file_path().unwrap(),
3202 Path::new(path!("/dir/b.rs")),
3203 );
3204 assert_eq!(params.position, lsp::Position::new(0, 22));
3205
3206 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3207 lsp::Location::new(
3208 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3209 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3210 ),
3211 )))
3212 });
3213 let mut definitions = project
3214 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3215 .await
3216 .unwrap()
3217 .unwrap();
3218
3219 // Assert no new language server started
3220 cx.executor().run_until_parked();
3221 assert!(fake_servers.try_next().is_err());
3222
3223 assert_eq!(definitions.len(), 1);
3224 let definition = definitions.pop().unwrap();
3225 cx.update(|cx| {
3226 let target_buffer = definition.target.buffer.read(cx);
3227 assert_eq!(
3228 target_buffer
3229 .file()
3230 .unwrap()
3231 .as_local()
3232 .unwrap()
3233 .abs_path(cx),
3234 Path::new(path!("/dir/a.rs")),
3235 );
3236 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3237 assert_eq!(
3238 list_worktrees(&project, cx),
3239 [
3240 (path!("/dir/a.rs").as_ref(), false),
3241 (path!("/dir/b.rs").as_ref(), true)
3242 ],
3243 );
3244
3245 drop(definition);
3246 });
3247 cx.update(|cx| {
3248 assert_eq!(
3249 list_worktrees(&project, cx),
3250 [(path!("/dir/b.rs").as_ref(), true)]
3251 );
3252 });
3253
3254 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3255 project
3256 .read(cx)
3257 .worktrees(cx)
3258 .map(|worktree| {
3259 let worktree = worktree.read(cx);
3260 (
3261 worktree.as_local().unwrap().abs_path().as_ref(),
3262 worktree.is_visible(),
3263 )
3264 })
3265 .collect::<Vec<_>>()
3266 }
3267}
3268
3269#[gpui::test]
3270async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3271 init_test(cx);
3272
3273 let fs = FakeFs::new(cx.executor());
3274 fs.insert_tree(
3275 path!("/dir"),
3276 json!({
3277 "a.ts": "",
3278 }),
3279 )
3280 .await;
3281
3282 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3283
3284 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3285 language_registry.add(typescript_lang());
3286 let mut fake_language_servers = language_registry.register_fake_lsp(
3287 "TypeScript",
3288 FakeLspAdapter {
3289 capabilities: lsp::ServerCapabilities {
3290 completion_provider: Some(lsp::CompletionOptions {
3291 trigger_characters: Some(vec![".".to_string()]),
3292 ..Default::default()
3293 }),
3294 ..Default::default()
3295 },
3296 ..Default::default()
3297 },
3298 );
3299
3300 let (buffer, _handle) = project
3301 .update(cx, |p, cx| {
3302 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3303 })
3304 .await
3305 .unwrap();
3306
3307 let fake_server = fake_language_servers.next().await.unwrap();
3308
3309 // When text_edit exists, it takes precedence over insert_text and label
3310 let text = "let a = obj.fqn";
3311 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3312 let completions = project.update(cx, |project, cx| {
3313 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3314 });
3315
3316 fake_server
3317 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3318 Ok(Some(lsp::CompletionResponse::Array(vec![
3319 lsp::CompletionItem {
3320 label: "labelText".into(),
3321 insert_text: Some("insertText".into()),
3322 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3323 range: lsp::Range::new(
3324 lsp::Position::new(0, text.len() as u32 - 3),
3325 lsp::Position::new(0, text.len() as u32),
3326 ),
3327 new_text: "textEditText".into(),
3328 })),
3329 ..Default::default()
3330 },
3331 ])))
3332 })
3333 .next()
3334 .await;
3335
3336 let completions = completions
3337 .await
3338 .unwrap()
3339 .into_iter()
3340 .flat_map(|response| response.completions)
3341 .collect::<Vec<_>>();
3342 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3343
3344 assert_eq!(completions.len(), 1);
3345 assert_eq!(completions[0].new_text, "textEditText");
3346 assert_eq!(
3347 completions[0].replace_range.to_offset(&snapshot),
3348 text.len() - 3..text.len()
3349 );
3350}
3351
3352#[gpui::test]
3353async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3354 init_test(cx);
3355
3356 let fs = FakeFs::new(cx.executor());
3357 fs.insert_tree(
3358 path!("/dir"),
3359 json!({
3360 "a.ts": "",
3361 }),
3362 )
3363 .await;
3364
3365 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3366
3367 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3368 language_registry.add(typescript_lang());
3369 let mut fake_language_servers = language_registry.register_fake_lsp(
3370 "TypeScript",
3371 FakeLspAdapter {
3372 capabilities: lsp::ServerCapabilities {
3373 completion_provider: Some(lsp::CompletionOptions {
3374 trigger_characters: Some(vec![".".to_string()]),
3375 ..Default::default()
3376 }),
3377 ..Default::default()
3378 },
3379 ..Default::default()
3380 },
3381 );
3382
3383 let (buffer, _handle) = project
3384 .update(cx, |p, cx| {
3385 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3386 })
3387 .await
3388 .unwrap();
3389
3390 let fake_server = fake_language_servers.next().await.unwrap();
3391 let text = "let a = obj.fqn";
3392
3393 // Test 1: When text_edit is None but insert_text exists with default edit_range
3394 {
3395 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3396 let completions = project.update(cx, |project, cx| {
3397 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3398 });
3399
3400 fake_server
3401 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3402 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3403 is_incomplete: false,
3404 item_defaults: Some(lsp::CompletionListItemDefaults {
3405 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3406 lsp::Range::new(
3407 lsp::Position::new(0, text.len() as u32 - 3),
3408 lsp::Position::new(0, text.len() as u32),
3409 ),
3410 )),
3411 ..Default::default()
3412 }),
3413 items: vec![lsp::CompletionItem {
3414 label: "labelText".into(),
3415 insert_text: Some("insertText".into()),
3416 text_edit: None,
3417 ..Default::default()
3418 }],
3419 })))
3420 })
3421 .next()
3422 .await;
3423
3424 let completions = completions
3425 .await
3426 .unwrap()
3427 .into_iter()
3428 .flat_map(|response| response.completions)
3429 .collect::<Vec<_>>();
3430 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3431
3432 assert_eq!(completions.len(), 1);
3433 assert_eq!(completions[0].new_text, "insertText");
3434 assert_eq!(
3435 completions[0].replace_range.to_offset(&snapshot),
3436 text.len() - 3..text.len()
3437 );
3438 }
3439
3440 // Test 2: When both text_edit and insert_text are None with default edit_range
3441 {
3442 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3443 let completions = project.update(cx, |project, cx| {
3444 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3445 });
3446
3447 fake_server
3448 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3449 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3450 is_incomplete: false,
3451 item_defaults: Some(lsp::CompletionListItemDefaults {
3452 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3453 lsp::Range::new(
3454 lsp::Position::new(0, text.len() as u32 - 3),
3455 lsp::Position::new(0, text.len() as u32),
3456 ),
3457 )),
3458 ..Default::default()
3459 }),
3460 items: vec![lsp::CompletionItem {
3461 label: "labelText".into(),
3462 insert_text: None,
3463 text_edit: None,
3464 ..Default::default()
3465 }],
3466 })))
3467 })
3468 .next()
3469 .await;
3470
3471 let completions = completions
3472 .await
3473 .unwrap()
3474 .into_iter()
3475 .flat_map(|response| response.completions)
3476 .collect::<Vec<_>>();
3477 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3478
3479 assert_eq!(completions.len(), 1);
3480 assert_eq!(completions[0].new_text, "labelText");
3481 assert_eq!(
3482 completions[0].replace_range.to_offset(&snapshot),
3483 text.len() - 3..text.len()
3484 );
3485 }
3486}
3487
3488#[gpui::test]
3489async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3490 init_test(cx);
3491
3492 let fs = FakeFs::new(cx.executor());
3493 fs.insert_tree(
3494 path!("/dir"),
3495 json!({
3496 "a.ts": "",
3497 }),
3498 )
3499 .await;
3500
3501 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3502
3503 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3504 language_registry.add(typescript_lang());
3505 let mut fake_language_servers = language_registry.register_fake_lsp(
3506 "TypeScript",
3507 FakeLspAdapter {
3508 capabilities: lsp::ServerCapabilities {
3509 completion_provider: Some(lsp::CompletionOptions {
3510 trigger_characters: Some(vec![":".to_string()]),
3511 ..Default::default()
3512 }),
3513 ..Default::default()
3514 },
3515 ..Default::default()
3516 },
3517 );
3518
3519 let (buffer, _handle) = project
3520 .update(cx, |p, cx| {
3521 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3522 })
3523 .await
3524 .unwrap();
3525
3526 let fake_server = fake_language_servers.next().await.unwrap();
3527
3528 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3529 let text = "let a = b.fqn";
3530 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3531 let completions = project.update(cx, |project, cx| {
3532 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3533 });
3534
3535 fake_server
3536 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3537 Ok(Some(lsp::CompletionResponse::Array(vec![
3538 lsp::CompletionItem {
3539 label: "fullyQualifiedName?".into(),
3540 insert_text: Some("fullyQualifiedName".into()),
3541 ..Default::default()
3542 },
3543 ])))
3544 })
3545 .next()
3546 .await;
3547 let completions = completions
3548 .await
3549 .unwrap()
3550 .into_iter()
3551 .flat_map(|response| response.completions)
3552 .collect::<Vec<_>>();
3553 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3554 assert_eq!(completions.len(), 1);
3555 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3556 assert_eq!(
3557 completions[0].replace_range.to_offset(&snapshot),
3558 text.len() - 3..text.len()
3559 );
3560
3561 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3562 let text = "let a = \"atoms/cmp\"";
3563 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3564 let completions = project.update(cx, |project, cx| {
3565 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3566 });
3567
3568 fake_server
3569 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3570 Ok(Some(lsp::CompletionResponse::Array(vec![
3571 lsp::CompletionItem {
3572 label: "component".into(),
3573 ..Default::default()
3574 },
3575 ])))
3576 })
3577 .next()
3578 .await;
3579 let completions = completions
3580 .await
3581 .unwrap()
3582 .into_iter()
3583 .flat_map(|response| response.completions)
3584 .collect::<Vec<_>>();
3585 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3586 assert_eq!(completions.len(), 1);
3587 assert_eq!(completions[0].new_text, "component");
3588 assert_eq!(
3589 completions[0].replace_range.to_offset(&snapshot),
3590 text.len() - 4..text.len() - 1
3591 );
3592}
3593
3594#[gpui::test]
3595async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3596 init_test(cx);
3597
3598 let fs = FakeFs::new(cx.executor());
3599 fs.insert_tree(
3600 path!("/dir"),
3601 json!({
3602 "a.ts": "",
3603 }),
3604 )
3605 .await;
3606
3607 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3608
3609 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3610 language_registry.add(typescript_lang());
3611 let mut fake_language_servers = language_registry.register_fake_lsp(
3612 "TypeScript",
3613 FakeLspAdapter {
3614 capabilities: lsp::ServerCapabilities {
3615 completion_provider: Some(lsp::CompletionOptions {
3616 trigger_characters: Some(vec![":".to_string()]),
3617 ..Default::default()
3618 }),
3619 ..Default::default()
3620 },
3621 ..Default::default()
3622 },
3623 );
3624
3625 let (buffer, _handle) = project
3626 .update(cx, |p, cx| {
3627 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3628 })
3629 .await
3630 .unwrap();
3631
3632 let fake_server = fake_language_servers.next().await.unwrap();
3633
3634 let text = "let a = b.fqn";
3635 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3636 let completions = project.update(cx, |project, cx| {
3637 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3638 });
3639
3640 fake_server
3641 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3642 Ok(Some(lsp::CompletionResponse::Array(vec![
3643 lsp::CompletionItem {
3644 label: "fullyQualifiedName?".into(),
3645 insert_text: Some("fully\rQualified\r\nName".into()),
3646 ..Default::default()
3647 },
3648 ])))
3649 })
3650 .next()
3651 .await;
3652 let completions = completions
3653 .await
3654 .unwrap()
3655 .into_iter()
3656 .flat_map(|response| response.completions)
3657 .collect::<Vec<_>>();
3658 assert_eq!(completions.len(), 1);
3659 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3660}
3661
3662#[gpui::test(iterations = 10)]
3663async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3664 init_test(cx);
3665
3666 let fs = FakeFs::new(cx.executor());
3667 fs.insert_tree(
3668 path!("/dir"),
3669 json!({
3670 "a.ts": "a",
3671 }),
3672 )
3673 .await;
3674
3675 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3676
3677 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3678 language_registry.add(typescript_lang());
3679 let mut fake_language_servers = language_registry.register_fake_lsp(
3680 "TypeScript",
3681 FakeLspAdapter {
3682 capabilities: lsp::ServerCapabilities {
3683 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3684 lsp::CodeActionOptions {
3685 resolve_provider: Some(true),
3686 ..lsp::CodeActionOptions::default()
3687 },
3688 )),
3689 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3690 commands: vec!["_the/command".to_string()],
3691 ..lsp::ExecuteCommandOptions::default()
3692 }),
3693 ..lsp::ServerCapabilities::default()
3694 },
3695 ..FakeLspAdapter::default()
3696 },
3697 );
3698
3699 let (buffer, _handle) = project
3700 .update(cx, |p, cx| {
3701 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3702 })
3703 .await
3704 .unwrap();
3705
3706 let fake_server = fake_language_servers.next().await.unwrap();
3707
3708 // Language server returns code actions that contain commands, and not edits.
3709 let actions = project.update(cx, |project, cx| {
3710 project.code_actions(&buffer, 0..0, None, cx)
3711 });
3712 fake_server
3713 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3714 Ok(Some(vec![
3715 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3716 title: "The code action".into(),
3717 data: Some(serde_json::json!({
3718 "command": "_the/command",
3719 })),
3720 ..lsp::CodeAction::default()
3721 }),
3722 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3723 title: "two".into(),
3724 ..lsp::CodeAction::default()
3725 }),
3726 ]))
3727 })
3728 .next()
3729 .await;
3730
3731 let action = actions.await.unwrap().unwrap()[0].clone();
3732 let apply = project.update(cx, |project, cx| {
3733 project.apply_code_action(buffer.clone(), action, true, cx)
3734 });
3735
3736 // Resolving the code action does not populate its edits. In absence of
3737 // edits, we must execute the given command.
3738 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3739 |mut action, _| async move {
3740 if action.data.is_some() {
3741 action.command = Some(lsp::Command {
3742 title: "The command".into(),
3743 command: "_the/command".into(),
3744 arguments: Some(vec![json!("the-argument")]),
3745 });
3746 }
3747 Ok(action)
3748 },
3749 );
3750
3751 // While executing the command, the language server sends the editor
3752 // a `workspaceEdit` request.
3753 fake_server
3754 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3755 let fake = fake_server.clone();
3756 move |params, _| {
3757 assert_eq!(params.command, "_the/command");
3758 let fake = fake.clone();
3759 async move {
3760 fake.server
3761 .request::<lsp::request::ApplyWorkspaceEdit>(
3762 lsp::ApplyWorkspaceEditParams {
3763 label: None,
3764 edit: lsp::WorkspaceEdit {
3765 changes: Some(
3766 [(
3767 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3768 vec![lsp::TextEdit {
3769 range: lsp::Range::new(
3770 lsp::Position::new(0, 0),
3771 lsp::Position::new(0, 0),
3772 ),
3773 new_text: "X".into(),
3774 }],
3775 )]
3776 .into_iter()
3777 .collect(),
3778 ),
3779 ..Default::default()
3780 },
3781 },
3782 )
3783 .await
3784 .into_response()
3785 .unwrap();
3786 Ok(Some(json!(null)))
3787 }
3788 }
3789 })
3790 .next()
3791 .await;
3792
3793 // Applying the code action returns a project transaction containing the edits
3794 // sent by the language server in its `workspaceEdit` request.
3795 let transaction = apply.await.unwrap();
3796 assert!(transaction.0.contains_key(&buffer));
3797 buffer.update(cx, |buffer, cx| {
3798 assert_eq!(buffer.text(), "Xa");
3799 buffer.undo(cx);
3800 assert_eq!(buffer.text(), "a");
3801 });
3802}
3803
3804#[gpui::test(iterations = 10)]
3805async fn test_save_file(cx: &mut gpui::TestAppContext) {
3806 init_test(cx);
3807
3808 let fs = FakeFs::new(cx.executor());
3809 fs.insert_tree(
3810 path!("/dir"),
3811 json!({
3812 "file1": "the old contents",
3813 }),
3814 )
3815 .await;
3816
3817 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3818 let buffer = project
3819 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3820 .await
3821 .unwrap();
3822 buffer.update(cx, |buffer, cx| {
3823 assert_eq!(buffer.text(), "the old contents");
3824 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3825 });
3826
3827 project
3828 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3829 .await
3830 .unwrap();
3831
3832 let new_text = fs
3833 .load(Path::new(path!("/dir/file1")))
3834 .await
3835 .unwrap()
3836 .replace("\r\n", "\n");
3837 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3838}
3839
3840#[gpui::test(iterations = 10)]
3841async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3842 // Issue: #24349
3843 init_test(cx);
3844
3845 let fs = FakeFs::new(cx.executor());
3846 fs.insert_tree(path!("/dir"), json!({})).await;
3847
3848 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3849 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3850
3851 language_registry.add(rust_lang());
3852 let mut fake_rust_servers = language_registry.register_fake_lsp(
3853 "Rust",
3854 FakeLspAdapter {
3855 name: "the-rust-language-server",
3856 capabilities: lsp::ServerCapabilities {
3857 completion_provider: Some(lsp::CompletionOptions {
3858 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3859 ..Default::default()
3860 }),
3861 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3862 lsp::TextDocumentSyncOptions {
3863 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3864 ..Default::default()
3865 },
3866 )),
3867 ..Default::default()
3868 },
3869 ..Default::default()
3870 },
3871 );
3872
3873 let buffer = project
3874 .update(cx, |this, cx| this.create_buffer(cx))
3875 .unwrap()
3876 .await;
3877 project.update(cx, |this, cx| {
3878 this.register_buffer_with_language_servers(&buffer, cx);
3879 buffer.update(cx, |buffer, cx| {
3880 assert!(!this.has_language_servers_for(buffer, cx));
3881 })
3882 });
3883
3884 project
3885 .update(cx, |this, cx| {
3886 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3887 this.save_buffer_as(
3888 buffer.clone(),
3889 ProjectPath {
3890 worktree_id,
3891 path: Arc::from("file.rs".as_ref()),
3892 },
3893 cx,
3894 )
3895 })
3896 .await
3897 .unwrap();
3898 // A server is started up, and it is notified about Rust files.
3899 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3900 assert_eq!(
3901 fake_rust_server
3902 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3903 .await
3904 .text_document,
3905 lsp::TextDocumentItem {
3906 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
3907 version: 0,
3908 text: "".to_string(),
3909 language_id: "rust".to_string(),
3910 }
3911 );
3912
3913 project.update(cx, |this, cx| {
3914 buffer.update(cx, |buffer, cx| {
3915 assert!(this.has_language_servers_for(buffer, cx));
3916 })
3917 });
3918}
3919
3920#[gpui::test(iterations = 30)]
3921async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3922 init_test(cx);
3923
3924 let fs = FakeFs::new(cx.executor());
3925 fs.insert_tree(
3926 path!("/dir"),
3927 json!({
3928 "file1": "the original contents",
3929 }),
3930 )
3931 .await;
3932
3933 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3934 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3935 let buffer = project
3936 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3937 .await
3938 .unwrap();
3939
3940 // Simulate buffer diffs being slow, so that they don't complete before
3941 // the next file change occurs.
3942 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3943
3944 // Change the buffer's file on disk, and then wait for the file change
3945 // to be detected by the worktree, so that the buffer starts reloading.
3946 fs.save(
3947 path!("/dir/file1").as_ref(),
3948 &"the first contents".into(),
3949 Default::default(),
3950 )
3951 .await
3952 .unwrap();
3953 worktree.next_event(cx).await;
3954
3955 // Change the buffer's file again. Depending on the random seed, the
3956 // previous file change may still be in progress.
3957 fs.save(
3958 path!("/dir/file1").as_ref(),
3959 &"the second contents".into(),
3960 Default::default(),
3961 )
3962 .await
3963 .unwrap();
3964 worktree.next_event(cx).await;
3965
3966 cx.executor().run_until_parked();
3967 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3968 buffer.read_with(cx, |buffer, _| {
3969 assert_eq!(buffer.text(), on_disk_text);
3970 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3971 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3972 });
3973}
3974
3975#[gpui::test(iterations = 30)]
3976async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3977 init_test(cx);
3978
3979 let fs = FakeFs::new(cx.executor());
3980 fs.insert_tree(
3981 path!("/dir"),
3982 json!({
3983 "file1": "the original contents",
3984 }),
3985 )
3986 .await;
3987
3988 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3989 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3990 let buffer = project
3991 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3992 .await
3993 .unwrap();
3994
3995 // Simulate buffer diffs being slow, so that they don't complete before
3996 // the next file change occurs.
3997 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3998
3999 // Change the buffer's file on disk, and then wait for the file change
4000 // to be detected by the worktree, so that the buffer starts reloading.
4001 fs.save(
4002 path!("/dir/file1").as_ref(),
4003 &"the first contents".into(),
4004 Default::default(),
4005 )
4006 .await
4007 .unwrap();
4008 worktree.next_event(cx).await;
4009
4010 cx.executor()
4011 .spawn(cx.executor().simulate_random_delay())
4012 .await;
4013
4014 // Perform a noop edit, causing the buffer's version to increase.
4015 buffer.update(cx, |buffer, cx| {
4016 buffer.edit([(0..0, " ")], None, cx);
4017 buffer.undo(cx);
4018 });
4019
4020 cx.executor().run_until_parked();
4021 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4022 buffer.read_with(cx, |buffer, _| {
4023 let buffer_text = buffer.text();
4024 if buffer_text == on_disk_text {
4025 assert!(
4026 !buffer.is_dirty() && !buffer.has_conflict(),
4027 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4028 );
4029 }
4030 // If the file change occurred while the buffer was processing the first
4031 // change, the buffer will be in a conflicting state.
4032 else {
4033 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4034 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4035 }
4036 });
4037}
4038
4039#[gpui::test]
4040async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4041 init_test(cx);
4042
4043 let fs = FakeFs::new(cx.executor());
4044 fs.insert_tree(
4045 path!("/dir"),
4046 json!({
4047 "file1": "the old contents",
4048 }),
4049 )
4050 .await;
4051
4052 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4053 let buffer = project
4054 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4055 .await
4056 .unwrap();
4057 buffer.update(cx, |buffer, cx| {
4058 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4059 });
4060
4061 project
4062 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4063 .await
4064 .unwrap();
4065
4066 let new_text = fs
4067 .load(Path::new(path!("/dir/file1")))
4068 .await
4069 .unwrap()
4070 .replace("\r\n", "\n");
4071 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4072}
4073
4074#[gpui::test]
4075async fn test_save_as(cx: &mut gpui::TestAppContext) {
4076 init_test(cx);
4077
4078 let fs = FakeFs::new(cx.executor());
4079 fs.insert_tree("/dir", json!({})).await;
4080
4081 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4082
4083 let languages = project.update(cx, |project, _| project.languages().clone());
4084 languages.add(rust_lang());
4085
4086 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
4087 buffer.update(cx, |buffer, cx| {
4088 buffer.edit([(0..0, "abc")], None, cx);
4089 assert!(buffer.is_dirty());
4090 assert!(!buffer.has_conflict());
4091 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4092 });
4093 project
4094 .update(cx, |project, cx| {
4095 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4096 let path = ProjectPath {
4097 worktree_id,
4098 path: Arc::from(Path::new("file1.rs")),
4099 };
4100 project.save_buffer_as(buffer.clone(), path, cx)
4101 })
4102 .await
4103 .unwrap();
4104 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4105
4106 cx.executor().run_until_parked();
4107 buffer.update(cx, |buffer, cx| {
4108 assert_eq!(
4109 buffer.file().unwrap().full_path(cx),
4110 Path::new("dir/file1.rs")
4111 );
4112 assert!(!buffer.is_dirty());
4113 assert!(!buffer.has_conflict());
4114 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4115 });
4116
4117 let opened_buffer = project
4118 .update(cx, |project, cx| {
4119 project.open_local_buffer("/dir/file1.rs", cx)
4120 })
4121 .await
4122 .unwrap();
4123 assert_eq!(opened_buffer, buffer);
4124}
4125
4126#[gpui::test(retries = 5)]
4127async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4128 use worktree::WorktreeModelHandle as _;
4129
4130 init_test(cx);
4131 cx.executor().allow_parking();
4132
4133 let dir = TempTree::new(json!({
4134 "a": {
4135 "file1": "",
4136 "file2": "",
4137 "file3": "",
4138 },
4139 "b": {
4140 "c": {
4141 "file4": "",
4142 "file5": "",
4143 }
4144 }
4145 }));
4146
4147 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4148
4149 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4150 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4151 async move { buffer.await.unwrap() }
4152 };
4153 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4154 project.update(cx, |project, cx| {
4155 let tree = project.worktrees(cx).next().unwrap();
4156 tree.read(cx)
4157 .entry_for_path(path)
4158 .unwrap_or_else(|| panic!("no entry for path {}", path))
4159 .id
4160 })
4161 };
4162
4163 let buffer2 = buffer_for_path("a/file2", cx).await;
4164 let buffer3 = buffer_for_path("a/file3", cx).await;
4165 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4166 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4167
4168 let file2_id = id_for_path("a/file2", cx);
4169 let file3_id = id_for_path("a/file3", cx);
4170 let file4_id = id_for_path("b/c/file4", cx);
4171
4172 // Create a remote copy of this worktree.
4173 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4174 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4175
4176 let updates = Arc::new(Mutex::new(Vec::new()));
4177 tree.update(cx, |tree, cx| {
4178 let updates = updates.clone();
4179 tree.observe_updates(0, cx, move |update| {
4180 updates.lock().push(update);
4181 async { true }
4182 });
4183 });
4184
4185 let remote =
4186 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
4187
4188 cx.executor().run_until_parked();
4189
4190 cx.update(|cx| {
4191 assert!(!buffer2.read(cx).is_dirty());
4192 assert!(!buffer3.read(cx).is_dirty());
4193 assert!(!buffer4.read(cx).is_dirty());
4194 assert!(!buffer5.read(cx).is_dirty());
4195 });
4196
4197 // Rename and delete files and directories.
4198 tree.flush_fs_events(cx).await;
4199 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4200 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4201 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4202 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4203 tree.flush_fs_events(cx).await;
4204
4205 cx.update(|app| {
4206 assert_eq!(
4207 tree.read(app)
4208 .paths()
4209 .map(|p| p.to_str().unwrap())
4210 .collect::<Vec<_>>(),
4211 vec![
4212 "a",
4213 path!("a/file1"),
4214 path!("a/file2.new"),
4215 "b",
4216 "d",
4217 path!("d/file3"),
4218 path!("d/file4"),
4219 ]
4220 );
4221 });
4222
4223 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4224 assert_eq!(id_for_path("d/file3", cx), file3_id);
4225 assert_eq!(id_for_path("d/file4", cx), file4_id);
4226
4227 cx.update(|cx| {
4228 assert_eq!(
4229 buffer2.read(cx).file().unwrap().path().as_ref(),
4230 Path::new("a/file2.new")
4231 );
4232 assert_eq!(
4233 buffer3.read(cx).file().unwrap().path().as_ref(),
4234 Path::new("d/file3")
4235 );
4236 assert_eq!(
4237 buffer4.read(cx).file().unwrap().path().as_ref(),
4238 Path::new("d/file4")
4239 );
4240 assert_eq!(
4241 buffer5.read(cx).file().unwrap().path().as_ref(),
4242 Path::new("b/c/file5")
4243 );
4244
4245 assert_matches!(
4246 buffer2.read(cx).file().unwrap().disk_state(),
4247 DiskState::Present { .. }
4248 );
4249 assert_matches!(
4250 buffer3.read(cx).file().unwrap().disk_state(),
4251 DiskState::Present { .. }
4252 );
4253 assert_matches!(
4254 buffer4.read(cx).file().unwrap().disk_state(),
4255 DiskState::Present { .. }
4256 );
4257 assert_eq!(
4258 buffer5.read(cx).file().unwrap().disk_state(),
4259 DiskState::Deleted
4260 );
4261 });
4262
4263 // Update the remote worktree. Check that it becomes consistent with the
4264 // local worktree.
4265 cx.executor().run_until_parked();
4266
4267 remote.update(cx, |remote, _| {
4268 for update in updates.lock().drain(..) {
4269 remote.as_remote_mut().unwrap().update_from_remote(update);
4270 }
4271 });
4272 cx.executor().run_until_parked();
4273 remote.update(cx, |remote, _| {
4274 assert_eq!(
4275 remote
4276 .paths()
4277 .map(|p| p.to_str().unwrap())
4278 .collect::<Vec<_>>(),
4279 vec![
4280 "a",
4281 path!("a/file1"),
4282 path!("a/file2.new"),
4283 "b",
4284 "d",
4285 path!("d/file3"),
4286 path!("d/file4"),
4287 ]
4288 );
4289 });
4290}
4291
4292#[gpui::test(iterations = 10)]
4293async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4294 init_test(cx);
4295
4296 let fs = FakeFs::new(cx.executor());
4297 fs.insert_tree(
4298 path!("/dir"),
4299 json!({
4300 "a": {
4301 "file1": "",
4302 }
4303 }),
4304 )
4305 .await;
4306
4307 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4308 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4309 let tree_id = tree.update(cx, |tree, _| tree.id());
4310
4311 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4312 project.update(cx, |project, cx| {
4313 let tree = project.worktrees(cx).next().unwrap();
4314 tree.read(cx)
4315 .entry_for_path(path)
4316 .unwrap_or_else(|| panic!("no entry for path {}", path))
4317 .id
4318 })
4319 };
4320
4321 let dir_id = id_for_path("a", cx);
4322 let file_id = id_for_path("a/file1", cx);
4323 let buffer = project
4324 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4325 .await
4326 .unwrap();
4327 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4328
4329 project
4330 .update(cx, |project, cx| {
4331 project.rename_entry(dir_id, Path::new("b"), cx)
4332 })
4333 .unwrap()
4334 .await
4335 .into_included()
4336 .unwrap();
4337 cx.executor().run_until_parked();
4338
4339 assert_eq!(id_for_path("b", cx), dir_id);
4340 assert_eq!(id_for_path("b/file1", cx), file_id);
4341 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4342}
4343
4344#[gpui::test]
4345async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4346 init_test(cx);
4347
4348 let fs = FakeFs::new(cx.executor());
4349 fs.insert_tree(
4350 "/dir",
4351 json!({
4352 "a.txt": "a-contents",
4353 "b.txt": "b-contents",
4354 }),
4355 )
4356 .await;
4357
4358 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4359
4360 // Spawn multiple tasks to open paths, repeating some paths.
4361 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4362 (
4363 p.open_local_buffer("/dir/a.txt", cx),
4364 p.open_local_buffer("/dir/b.txt", cx),
4365 p.open_local_buffer("/dir/a.txt", cx),
4366 )
4367 });
4368
4369 let buffer_a_1 = buffer_a_1.await.unwrap();
4370 let buffer_a_2 = buffer_a_2.await.unwrap();
4371 let buffer_b = buffer_b.await.unwrap();
4372 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4373 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4374
4375 // There is only one buffer per path.
4376 let buffer_a_id = buffer_a_1.entity_id();
4377 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4378
4379 // Open the same path again while it is still open.
4380 drop(buffer_a_1);
4381 let buffer_a_3 = project
4382 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4383 .await
4384 .unwrap();
4385
4386 // There's still only one buffer per path.
4387 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4388}
4389
4390#[gpui::test]
4391async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4392 init_test(cx);
4393
4394 let fs = FakeFs::new(cx.executor());
4395 fs.insert_tree(
4396 path!("/dir"),
4397 json!({
4398 "file1": "abc",
4399 "file2": "def",
4400 "file3": "ghi",
4401 }),
4402 )
4403 .await;
4404
4405 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4406
4407 let buffer1 = project
4408 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4409 .await
4410 .unwrap();
4411 let events = Arc::new(Mutex::new(Vec::new()));
4412
4413 // initially, the buffer isn't dirty.
4414 buffer1.update(cx, |buffer, cx| {
4415 cx.subscribe(&buffer1, {
4416 let events = events.clone();
4417 move |_, _, event, _| match event {
4418 BufferEvent::Operation { .. } => {}
4419 _ => events.lock().push(event.clone()),
4420 }
4421 })
4422 .detach();
4423
4424 assert!(!buffer.is_dirty());
4425 assert!(events.lock().is_empty());
4426
4427 buffer.edit([(1..2, "")], None, cx);
4428 });
4429
4430 // after the first edit, the buffer is dirty, and emits a dirtied event.
4431 buffer1.update(cx, |buffer, cx| {
4432 assert!(buffer.text() == "ac");
4433 assert!(buffer.is_dirty());
4434 assert_eq!(
4435 *events.lock(),
4436 &[
4437 language::BufferEvent::Edited,
4438 language::BufferEvent::DirtyChanged
4439 ]
4440 );
4441 events.lock().clear();
4442 buffer.did_save(
4443 buffer.version(),
4444 buffer.file().unwrap().disk_state().mtime(),
4445 cx,
4446 );
4447 });
4448
4449 // after saving, the buffer is not dirty, and emits a saved event.
4450 buffer1.update(cx, |buffer, cx| {
4451 assert!(!buffer.is_dirty());
4452 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4453 events.lock().clear();
4454
4455 buffer.edit([(1..1, "B")], None, cx);
4456 buffer.edit([(2..2, "D")], None, cx);
4457 });
4458
4459 // after editing again, the buffer is dirty, and emits another dirty event.
4460 buffer1.update(cx, |buffer, cx| {
4461 assert!(buffer.text() == "aBDc");
4462 assert!(buffer.is_dirty());
4463 assert_eq!(
4464 *events.lock(),
4465 &[
4466 language::BufferEvent::Edited,
4467 language::BufferEvent::DirtyChanged,
4468 language::BufferEvent::Edited,
4469 ],
4470 );
4471 events.lock().clear();
4472
4473 // After restoring the buffer to its previously-saved state,
4474 // the buffer is not considered dirty anymore.
4475 buffer.edit([(1..3, "")], None, cx);
4476 assert!(buffer.text() == "ac");
4477 assert!(!buffer.is_dirty());
4478 });
4479
4480 assert_eq!(
4481 *events.lock(),
4482 &[
4483 language::BufferEvent::Edited,
4484 language::BufferEvent::DirtyChanged
4485 ]
4486 );
4487
4488 // When a file is deleted, it is not considered dirty.
4489 let events = Arc::new(Mutex::new(Vec::new()));
4490 let buffer2 = project
4491 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4492 .await
4493 .unwrap();
4494 buffer2.update(cx, |_, cx| {
4495 cx.subscribe(&buffer2, {
4496 let events = events.clone();
4497 move |_, _, event, _| match event {
4498 BufferEvent::Operation { .. } => {}
4499 _ => events.lock().push(event.clone()),
4500 }
4501 })
4502 .detach();
4503 });
4504
4505 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4506 .await
4507 .unwrap();
4508 cx.executor().run_until_parked();
4509 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4510 assert_eq!(
4511 mem::take(&mut *events.lock()),
4512 &[language::BufferEvent::FileHandleChanged]
4513 );
4514
4515 // Buffer becomes dirty when edited.
4516 buffer2.update(cx, |buffer, cx| {
4517 buffer.edit([(2..3, "")], None, cx);
4518 assert_eq!(buffer.is_dirty(), true);
4519 });
4520 assert_eq!(
4521 mem::take(&mut *events.lock()),
4522 &[
4523 language::BufferEvent::Edited,
4524 language::BufferEvent::DirtyChanged
4525 ]
4526 );
4527
4528 // Buffer becomes clean again when all of its content is removed, because
4529 // the file was deleted.
4530 buffer2.update(cx, |buffer, cx| {
4531 buffer.edit([(0..2, "")], None, cx);
4532 assert_eq!(buffer.is_empty(), true);
4533 assert_eq!(buffer.is_dirty(), false);
4534 });
4535 assert_eq!(
4536 *events.lock(),
4537 &[
4538 language::BufferEvent::Edited,
4539 language::BufferEvent::DirtyChanged
4540 ]
4541 );
4542
4543 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4544 let events = Arc::new(Mutex::new(Vec::new()));
4545 let buffer3 = project
4546 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4547 .await
4548 .unwrap();
4549 buffer3.update(cx, |_, cx| {
4550 cx.subscribe(&buffer3, {
4551 let events = events.clone();
4552 move |_, _, event, _| match event {
4553 BufferEvent::Operation { .. } => {}
4554 _ => events.lock().push(event.clone()),
4555 }
4556 })
4557 .detach();
4558 });
4559
4560 buffer3.update(cx, |buffer, cx| {
4561 buffer.edit([(0..0, "x")], None, cx);
4562 });
4563 events.lock().clear();
4564 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4565 .await
4566 .unwrap();
4567 cx.executor().run_until_parked();
4568 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4569 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4570}
4571
4572#[gpui::test]
4573async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4574 init_test(cx);
4575
4576 let (initial_contents, initial_offsets) =
4577 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4578 let fs = FakeFs::new(cx.executor());
4579 fs.insert_tree(
4580 path!("/dir"),
4581 json!({
4582 "the-file": initial_contents,
4583 }),
4584 )
4585 .await;
4586 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4587 let buffer = project
4588 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4589 .await
4590 .unwrap();
4591
4592 let anchors = initial_offsets
4593 .iter()
4594 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4595 .collect::<Vec<_>>();
4596
4597 // Change the file on disk, adding two new lines of text, and removing
4598 // one line.
4599 buffer.update(cx, |buffer, _| {
4600 assert!(!buffer.is_dirty());
4601 assert!(!buffer.has_conflict());
4602 });
4603
4604 let (new_contents, new_offsets) =
4605 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4606 fs.save(
4607 path!("/dir/the-file").as_ref(),
4608 &new_contents.as_str().into(),
4609 LineEnding::Unix,
4610 )
4611 .await
4612 .unwrap();
4613
4614 // Because the buffer was not modified, it is reloaded from disk. Its
4615 // contents are edited according to the diff between the old and new
4616 // file contents.
4617 cx.executor().run_until_parked();
4618 buffer.update(cx, |buffer, _| {
4619 assert_eq!(buffer.text(), new_contents);
4620 assert!(!buffer.is_dirty());
4621 assert!(!buffer.has_conflict());
4622
4623 let anchor_offsets = anchors
4624 .iter()
4625 .map(|anchor| anchor.to_offset(&*buffer))
4626 .collect::<Vec<_>>();
4627 assert_eq!(anchor_offsets, new_offsets);
4628 });
4629
4630 // Modify the buffer
4631 buffer.update(cx, |buffer, cx| {
4632 buffer.edit([(0..0, " ")], None, cx);
4633 assert!(buffer.is_dirty());
4634 assert!(!buffer.has_conflict());
4635 });
4636
4637 // Change the file on disk again, adding blank lines to the beginning.
4638 fs.save(
4639 path!("/dir/the-file").as_ref(),
4640 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4641 LineEnding::Unix,
4642 )
4643 .await
4644 .unwrap();
4645
4646 // Because the buffer is modified, it doesn't reload from disk, but is
4647 // marked as having a conflict.
4648 cx.executor().run_until_parked();
4649 buffer.update(cx, |buffer, _| {
4650 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4651 assert!(buffer.has_conflict());
4652 });
4653}
4654
4655#[gpui::test]
4656async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4657 init_test(cx);
4658
4659 let fs = FakeFs::new(cx.executor());
4660 fs.insert_tree(
4661 path!("/dir"),
4662 json!({
4663 "file1": "a\nb\nc\n",
4664 "file2": "one\r\ntwo\r\nthree\r\n",
4665 }),
4666 )
4667 .await;
4668
4669 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4670 let buffer1 = project
4671 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4672 .await
4673 .unwrap();
4674 let buffer2 = project
4675 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4676 .await
4677 .unwrap();
4678
4679 buffer1.update(cx, |buffer, _| {
4680 assert_eq!(buffer.text(), "a\nb\nc\n");
4681 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4682 });
4683 buffer2.update(cx, |buffer, _| {
4684 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4685 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4686 });
4687
4688 // Change a file's line endings on disk from unix to windows. The buffer's
4689 // state updates correctly.
4690 fs.save(
4691 path!("/dir/file1").as_ref(),
4692 &"aaa\nb\nc\n".into(),
4693 LineEnding::Windows,
4694 )
4695 .await
4696 .unwrap();
4697 cx.executor().run_until_parked();
4698 buffer1.update(cx, |buffer, _| {
4699 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4700 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4701 });
4702
4703 // Save a file with windows line endings. The file is written correctly.
4704 buffer2.update(cx, |buffer, cx| {
4705 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4706 });
4707 project
4708 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4709 .await
4710 .unwrap();
4711 assert_eq!(
4712 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4713 "one\r\ntwo\r\nthree\r\nfour\r\n",
4714 );
4715}
4716
4717#[gpui::test]
4718async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4719 init_test(cx);
4720
4721 let fs = FakeFs::new(cx.executor());
4722 fs.insert_tree(
4723 path!("/dir"),
4724 json!({
4725 "a.rs": "
4726 fn foo(mut v: Vec<usize>) {
4727 for x in &v {
4728 v.push(1);
4729 }
4730 }
4731 "
4732 .unindent(),
4733 }),
4734 )
4735 .await;
4736
4737 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4738 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4739 let buffer = project
4740 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4741 .await
4742 .unwrap();
4743
4744 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4745 let message = lsp::PublishDiagnosticsParams {
4746 uri: buffer_uri.clone(),
4747 diagnostics: vec![
4748 lsp::Diagnostic {
4749 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4750 severity: Some(DiagnosticSeverity::WARNING),
4751 message: "error 1".to_string(),
4752 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4753 location: lsp::Location {
4754 uri: buffer_uri.clone(),
4755 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4756 },
4757 message: "error 1 hint 1".to_string(),
4758 }]),
4759 ..Default::default()
4760 },
4761 lsp::Diagnostic {
4762 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4763 severity: Some(DiagnosticSeverity::HINT),
4764 message: "error 1 hint 1".to_string(),
4765 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4766 location: lsp::Location {
4767 uri: buffer_uri.clone(),
4768 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4769 },
4770 message: "original diagnostic".to_string(),
4771 }]),
4772 ..Default::default()
4773 },
4774 lsp::Diagnostic {
4775 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4776 severity: Some(DiagnosticSeverity::ERROR),
4777 message: "error 2".to_string(),
4778 related_information: Some(vec![
4779 lsp::DiagnosticRelatedInformation {
4780 location: lsp::Location {
4781 uri: buffer_uri.clone(),
4782 range: lsp::Range::new(
4783 lsp::Position::new(1, 13),
4784 lsp::Position::new(1, 15),
4785 ),
4786 },
4787 message: "error 2 hint 1".to_string(),
4788 },
4789 lsp::DiagnosticRelatedInformation {
4790 location: lsp::Location {
4791 uri: buffer_uri.clone(),
4792 range: lsp::Range::new(
4793 lsp::Position::new(1, 13),
4794 lsp::Position::new(1, 15),
4795 ),
4796 },
4797 message: "error 2 hint 2".to_string(),
4798 },
4799 ]),
4800 ..Default::default()
4801 },
4802 lsp::Diagnostic {
4803 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4804 severity: Some(DiagnosticSeverity::HINT),
4805 message: "error 2 hint 1".to_string(),
4806 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4807 location: lsp::Location {
4808 uri: buffer_uri.clone(),
4809 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4810 },
4811 message: "original diagnostic".to_string(),
4812 }]),
4813 ..Default::default()
4814 },
4815 lsp::Diagnostic {
4816 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4817 severity: Some(DiagnosticSeverity::HINT),
4818 message: "error 2 hint 2".to_string(),
4819 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4820 location: lsp::Location {
4821 uri: buffer_uri,
4822 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4823 },
4824 message: "original diagnostic".to_string(),
4825 }]),
4826 ..Default::default()
4827 },
4828 ],
4829 version: None,
4830 };
4831
4832 lsp_store
4833 .update(cx, |lsp_store, cx| {
4834 lsp_store.update_diagnostics(
4835 LanguageServerId(0),
4836 message,
4837 None,
4838 DiagnosticSourceKind::Pushed,
4839 &[],
4840 cx,
4841 )
4842 })
4843 .unwrap();
4844 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4845
4846 assert_eq!(
4847 buffer
4848 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4849 .collect::<Vec<_>>(),
4850 &[
4851 DiagnosticEntry {
4852 range: Point::new(1, 8)..Point::new(1, 9),
4853 diagnostic: Diagnostic {
4854 severity: DiagnosticSeverity::WARNING,
4855 message: "error 1".to_string(),
4856 group_id: 1,
4857 is_primary: true,
4858 source_kind: DiagnosticSourceKind::Pushed,
4859 ..Diagnostic::default()
4860 }
4861 },
4862 DiagnosticEntry {
4863 range: Point::new(1, 8)..Point::new(1, 9),
4864 diagnostic: Diagnostic {
4865 severity: DiagnosticSeverity::HINT,
4866 message: "error 1 hint 1".to_string(),
4867 group_id: 1,
4868 is_primary: false,
4869 source_kind: DiagnosticSourceKind::Pushed,
4870 ..Diagnostic::default()
4871 }
4872 },
4873 DiagnosticEntry {
4874 range: Point::new(1, 13)..Point::new(1, 15),
4875 diagnostic: Diagnostic {
4876 severity: DiagnosticSeverity::HINT,
4877 message: "error 2 hint 1".to_string(),
4878 group_id: 0,
4879 is_primary: false,
4880 source_kind: DiagnosticSourceKind::Pushed,
4881 ..Diagnostic::default()
4882 }
4883 },
4884 DiagnosticEntry {
4885 range: Point::new(1, 13)..Point::new(1, 15),
4886 diagnostic: Diagnostic {
4887 severity: DiagnosticSeverity::HINT,
4888 message: "error 2 hint 2".to_string(),
4889 group_id: 0,
4890 is_primary: false,
4891 source_kind: DiagnosticSourceKind::Pushed,
4892 ..Diagnostic::default()
4893 }
4894 },
4895 DiagnosticEntry {
4896 range: Point::new(2, 8)..Point::new(2, 17),
4897 diagnostic: Diagnostic {
4898 severity: DiagnosticSeverity::ERROR,
4899 message: "error 2".to_string(),
4900 group_id: 0,
4901 is_primary: true,
4902 source_kind: DiagnosticSourceKind::Pushed,
4903 ..Diagnostic::default()
4904 }
4905 }
4906 ]
4907 );
4908
4909 assert_eq!(
4910 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4911 &[
4912 DiagnosticEntry {
4913 range: Point::new(1, 13)..Point::new(1, 15),
4914 diagnostic: Diagnostic {
4915 severity: DiagnosticSeverity::HINT,
4916 message: "error 2 hint 1".to_string(),
4917 group_id: 0,
4918 is_primary: false,
4919 source_kind: DiagnosticSourceKind::Pushed,
4920 ..Diagnostic::default()
4921 }
4922 },
4923 DiagnosticEntry {
4924 range: Point::new(1, 13)..Point::new(1, 15),
4925 diagnostic: Diagnostic {
4926 severity: DiagnosticSeverity::HINT,
4927 message: "error 2 hint 2".to_string(),
4928 group_id: 0,
4929 is_primary: false,
4930 source_kind: DiagnosticSourceKind::Pushed,
4931 ..Diagnostic::default()
4932 }
4933 },
4934 DiagnosticEntry {
4935 range: Point::new(2, 8)..Point::new(2, 17),
4936 diagnostic: Diagnostic {
4937 severity: DiagnosticSeverity::ERROR,
4938 message: "error 2".to_string(),
4939 group_id: 0,
4940 is_primary: true,
4941 source_kind: DiagnosticSourceKind::Pushed,
4942 ..Diagnostic::default()
4943 }
4944 }
4945 ]
4946 );
4947
4948 assert_eq!(
4949 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4950 &[
4951 DiagnosticEntry {
4952 range: Point::new(1, 8)..Point::new(1, 9),
4953 diagnostic: Diagnostic {
4954 severity: DiagnosticSeverity::WARNING,
4955 message: "error 1".to_string(),
4956 group_id: 1,
4957 is_primary: true,
4958 source_kind: DiagnosticSourceKind::Pushed,
4959 ..Diagnostic::default()
4960 }
4961 },
4962 DiagnosticEntry {
4963 range: Point::new(1, 8)..Point::new(1, 9),
4964 diagnostic: Diagnostic {
4965 severity: DiagnosticSeverity::HINT,
4966 message: "error 1 hint 1".to_string(),
4967 group_id: 1,
4968 is_primary: false,
4969 source_kind: DiagnosticSourceKind::Pushed,
4970 ..Diagnostic::default()
4971 }
4972 },
4973 ]
4974 );
4975}
4976
4977#[gpui::test]
4978async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4979 init_test(cx);
4980
4981 let fs = FakeFs::new(cx.executor());
4982 fs.insert_tree(
4983 path!("/dir"),
4984 json!({
4985 "one.rs": "const ONE: usize = 1;",
4986 "two": {
4987 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4988 }
4989
4990 }),
4991 )
4992 .await;
4993 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4994
4995 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4996 language_registry.add(rust_lang());
4997 let watched_paths = lsp::FileOperationRegistrationOptions {
4998 filters: vec![
4999 FileOperationFilter {
5000 scheme: Some("file".to_owned()),
5001 pattern: lsp::FileOperationPattern {
5002 glob: "**/*.rs".to_owned(),
5003 matches: Some(lsp::FileOperationPatternKind::File),
5004 options: None,
5005 },
5006 },
5007 FileOperationFilter {
5008 scheme: Some("file".to_owned()),
5009 pattern: lsp::FileOperationPattern {
5010 glob: "**/**".to_owned(),
5011 matches: Some(lsp::FileOperationPatternKind::Folder),
5012 options: None,
5013 },
5014 },
5015 ],
5016 };
5017 let mut fake_servers = language_registry.register_fake_lsp(
5018 "Rust",
5019 FakeLspAdapter {
5020 capabilities: lsp::ServerCapabilities {
5021 workspace: Some(lsp::WorkspaceServerCapabilities {
5022 workspace_folders: None,
5023 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5024 did_rename: Some(watched_paths.clone()),
5025 will_rename: Some(watched_paths),
5026 ..Default::default()
5027 }),
5028 }),
5029 ..Default::default()
5030 },
5031 ..Default::default()
5032 },
5033 );
5034
5035 let _ = project
5036 .update(cx, |project, cx| {
5037 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5038 })
5039 .await
5040 .unwrap();
5041
5042 let fake_server = fake_servers.next().await.unwrap();
5043 let response = project.update(cx, |project, cx| {
5044 let worktree = project.worktrees(cx).next().unwrap();
5045 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
5046 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
5047 });
5048 let expected_edit = lsp::WorkspaceEdit {
5049 changes: None,
5050 document_changes: Some(DocumentChanges::Edits({
5051 vec![TextDocumentEdit {
5052 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5053 range: lsp::Range {
5054 start: lsp::Position {
5055 line: 0,
5056 character: 1,
5057 },
5058 end: lsp::Position {
5059 line: 0,
5060 character: 3,
5061 },
5062 },
5063 new_text: "This is not a drill".to_owned(),
5064 })],
5065 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5066 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5067 version: Some(1337),
5068 },
5069 }]
5070 })),
5071 change_annotations: None,
5072 };
5073 let resolved_workspace_edit = Arc::new(OnceLock::new());
5074 fake_server
5075 .set_request_handler::<WillRenameFiles, _, _>({
5076 let resolved_workspace_edit = resolved_workspace_edit.clone();
5077 let expected_edit = expected_edit.clone();
5078 move |params, _| {
5079 let resolved_workspace_edit = resolved_workspace_edit.clone();
5080 let expected_edit = expected_edit.clone();
5081 async move {
5082 assert_eq!(params.files.len(), 1);
5083 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5084 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5085 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5086 Ok(Some(expected_edit))
5087 }
5088 }
5089 })
5090 .next()
5091 .await
5092 .unwrap();
5093 let _ = response.await.unwrap();
5094 fake_server
5095 .handle_notification::<DidRenameFiles, _>(|params, _| {
5096 assert_eq!(params.files.len(), 1);
5097 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5098 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5099 })
5100 .next()
5101 .await
5102 .unwrap();
5103 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5104}
5105
5106#[gpui::test]
5107async fn test_rename(cx: &mut gpui::TestAppContext) {
5108 // hi
5109 init_test(cx);
5110
5111 let fs = FakeFs::new(cx.executor());
5112 fs.insert_tree(
5113 path!("/dir"),
5114 json!({
5115 "one.rs": "const ONE: usize = 1;",
5116 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5117 }),
5118 )
5119 .await;
5120
5121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5122
5123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5124 language_registry.add(rust_lang());
5125 let mut fake_servers = language_registry.register_fake_lsp(
5126 "Rust",
5127 FakeLspAdapter {
5128 capabilities: lsp::ServerCapabilities {
5129 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5130 prepare_provider: Some(true),
5131 work_done_progress_options: Default::default(),
5132 })),
5133 ..Default::default()
5134 },
5135 ..Default::default()
5136 },
5137 );
5138
5139 let (buffer, _handle) = project
5140 .update(cx, |project, cx| {
5141 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5142 })
5143 .await
5144 .unwrap();
5145
5146 let fake_server = fake_servers.next().await.unwrap();
5147
5148 let response = project.update(cx, |project, cx| {
5149 project.prepare_rename(buffer.clone(), 7, cx)
5150 });
5151 fake_server
5152 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5153 assert_eq!(
5154 params.text_document.uri.as_str(),
5155 uri!("file:///dir/one.rs")
5156 );
5157 assert_eq!(params.position, lsp::Position::new(0, 7));
5158 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5159 lsp::Position::new(0, 6),
5160 lsp::Position::new(0, 9),
5161 ))))
5162 })
5163 .next()
5164 .await
5165 .unwrap();
5166 let response = response.await.unwrap();
5167 let PrepareRenameResponse::Success(range) = response else {
5168 panic!("{:?}", response);
5169 };
5170 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5171 assert_eq!(range, 6..9);
5172
5173 let response = project.update(cx, |project, cx| {
5174 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5175 });
5176 fake_server
5177 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5178 assert_eq!(
5179 params.text_document_position.text_document.uri.as_str(),
5180 uri!("file:///dir/one.rs")
5181 );
5182 assert_eq!(
5183 params.text_document_position.position,
5184 lsp::Position::new(0, 7)
5185 );
5186 assert_eq!(params.new_name, "THREE");
5187 Ok(Some(lsp::WorkspaceEdit {
5188 changes: Some(
5189 [
5190 (
5191 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5192 vec![lsp::TextEdit::new(
5193 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5194 "THREE".to_string(),
5195 )],
5196 ),
5197 (
5198 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5199 vec![
5200 lsp::TextEdit::new(
5201 lsp::Range::new(
5202 lsp::Position::new(0, 24),
5203 lsp::Position::new(0, 27),
5204 ),
5205 "THREE".to_string(),
5206 ),
5207 lsp::TextEdit::new(
5208 lsp::Range::new(
5209 lsp::Position::new(0, 35),
5210 lsp::Position::new(0, 38),
5211 ),
5212 "THREE".to_string(),
5213 ),
5214 ],
5215 ),
5216 ]
5217 .into_iter()
5218 .collect(),
5219 ),
5220 ..Default::default()
5221 }))
5222 })
5223 .next()
5224 .await
5225 .unwrap();
5226 let mut transaction = response.await.unwrap().0;
5227 assert_eq!(transaction.len(), 2);
5228 assert_eq!(
5229 transaction
5230 .remove_entry(&buffer)
5231 .unwrap()
5232 .0
5233 .update(cx, |buffer, _| buffer.text()),
5234 "const THREE: usize = 1;"
5235 );
5236 assert_eq!(
5237 transaction
5238 .into_keys()
5239 .next()
5240 .unwrap()
5241 .update(cx, |buffer, _| buffer.text()),
5242 "const TWO: usize = one::THREE + one::THREE;"
5243 );
5244}
5245
5246#[gpui::test]
5247async fn test_search(cx: &mut gpui::TestAppContext) {
5248 init_test(cx);
5249
5250 let fs = FakeFs::new(cx.executor());
5251 fs.insert_tree(
5252 path!("/dir"),
5253 json!({
5254 "one.rs": "const ONE: usize = 1;",
5255 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5256 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5257 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5258 }),
5259 )
5260 .await;
5261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5262 assert_eq!(
5263 search(
5264 &project,
5265 SearchQuery::text(
5266 "TWO",
5267 false,
5268 true,
5269 false,
5270 Default::default(),
5271 Default::default(),
5272 false,
5273 None
5274 )
5275 .unwrap(),
5276 cx
5277 )
5278 .await
5279 .unwrap(),
5280 HashMap::from_iter([
5281 (path!("dir/two.rs").to_string(), vec![6..9]),
5282 (path!("dir/three.rs").to_string(), vec![37..40])
5283 ])
5284 );
5285
5286 let buffer_4 = project
5287 .update(cx, |project, cx| {
5288 project.open_local_buffer(path!("/dir/four.rs"), cx)
5289 })
5290 .await
5291 .unwrap();
5292 buffer_4.update(cx, |buffer, cx| {
5293 let text = "two::TWO";
5294 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5295 });
5296
5297 assert_eq!(
5298 search(
5299 &project,
5300 SearchQuery::text(
5301 "TWO",
5302 false,
5303 true,
5304 false,
5305 Default::default(),
5306 Default::default(),
5307 false,
5308 None,
5309 )
5310 .unwrap(),
5311 cx
5312 )
5313 .await
5314 .unwrap(),
5315 HashMap::from_iter([
5316 (path!("dir/two.rs").to_string(), vec![6..9]),
5317 (path!("dir/three.rs").to_string(), vec![37..40]),
5318 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5319 ])
5320 );
5321}
5322
5323#[gpui::test]
5324async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5325 init_test(cx);
5326
5327 let search_query = "file";
5328
5329 let fs = FakeFs::new(cx.executor());
5330 fs.insert_tree(
5331 path!("/dir"),
5332 json!({
5333 "one.rs": r#"// Rust file one"#,
5334 "one.ts": r#"// TypeScript file one"#,
5335 "two.rs": r#"// Rust file two"#,
5336 "two.ts": r#"// TypeScript file two"#,
5337 }),
5338 )
5339 .await;
5340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5341
5342 assert!(
5343 search(
5344 &project,
5345 SearchQuery::text(
5346 search_query,
5347 false,
5348 true,
5349 false,
5350 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5351 Default::default(),
5352 false,
5353 None
5354 )
5355 .unwrap(),
5356 cx
5357 )
5358 .await
5359 .unwrap()
5360 .is_empty(),
5361 "If no inclusions match, no files should be returned"
5362 );
5363
5364 assert_eq!(
5365 search(
5366 &project,
5367 SearchQuery::text(
5368 search_query,
5369 false,
5370 true,
5371 false,
5372 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5373 Default::default(),
5374 false,
5375 None
5376 )
5377 .unwrap(),
5378 cx
5379 )
5380 .await
5381 .unwrap(),
5382 HashMap::from_iter([
5383 (path!("dir/one.rs").to_string(), vec![8..12]),
5384 (path!("dir/two.rs").to_string(), vec![8..12]),
5385 ]),
5386 "Rust only search should give only Rust files"
5387 );
5388
5389 assert_eq!(
5390 search(
5391 &project,
5392 SearchQuery::text(
5393 search_query,
5394 false,
5395 true,
5396 false,
5397 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5398 Default::default(),
5399 false,
5400 None,
5401 )
5402 .unwrap(),
5403 cx
5404 )
5405 .await
5406 .unwrap(),
5407 HashMap::from_iter([
5408 (path!("dir/one.ts").to_string(), vec![14..18]),
5409 (path!("dir/two.ts").to_string(), vec![14..18]),
5410 ]),
5411 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5412 );
5413
5414 assert_eq!(
5415 search(
5416 &project,
5417 SearchQuery::text(
5418 search_query,
5419 false,
5420 true,
5421 false,
5422 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5423 .unwrap(),
5424 Default::default(),
5425 false,
5426 None,
5427 )
5428 .unwrap(),
5429 cx
5430 )
5431 .await
5432 .unwrap(),
5433 HashMap::from_iter([
5434 (path!("dir/two.ts").to_string(), vec![14..18]),
5435 (path!("dir/one.rs").to_string(), vec![8..12]),
5436 (path!("dir/one.ts").to_string(), vec![14..18]),
5437 (path!("dir/two.rs").to_string(), vec![8..12]),
5438 ]),
5439 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5440 );
5441}
5442
5443#[gpui::test]
5444async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5445 init_test(cx);
5446
5447 let search_query = "file";
5448
5449 let fs = FakeFs::new(cx.executor());
5450 fs.insert_tree(
5451 path!("/dir"),
5452 json!({
5453 "one.rs": r#"// Rust file one"#,
5454 "one.ts": r#"// TypeScript file one"#,
5455 "two.rs": r#"// Rust file two"#,
5456 "two.ts": r#"// TypeScript file two"#,
5457 }),
5458 )
5459 .await;
5460 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5461
5462 assert_eq!(
5463 search(
5464 &project,
5465 SearchQuery::text(
5466 search_query,
5467 false,
5468 true,
5469 false,
5470 Default::default(),
5471 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5472 false,
5473 None,
5474 )
5475 .unwrap(),
5476 cx
5477 )
5478 .await
5479 .unwrap(),
5480 HashMap::from_iter([
5481 (path!("dir/one.rs").to_string(), vec![8..12]),
5482 (path!("dir/one.ts").to_string(), vec![14..18]),
5483 (path!("dir/two.rs").to_string(), vec![8..12]),
5484 (path!("dir/two.ts").to_string(), vec![14..18]),
5485 ]),
5486 "If no exclusions match, all files should be returned"
5487 );
5488
5489 assert_eq!(
5490 search(
5491 &project,
5492 SearchQuery::text(
5493 search_query,
5494 false,
5495 true,
5496 false,
5497 Default::default(),
5498 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5499 false,
5500 None,
5501 )
5502 .unwrap(),
5503 cx
5504 )
5505 .await
5506 .unwrap(),
5507 HashMap::from_iter([
5508 (path!("dir/one.ts").to_string(), vec![14..18]),
5509 (path!("dir/two.ts").to_string(), vec![14..18]),
5510 ]),
5511 "Rust exclusion search should give only TypeScript files"
5512 );
5513
5514 assert_eq!(
5515 search(
5516 &project,
5517 SearchQuery::text(
5518 search_query,
5519 false,
5520 true,
5521 false,
5522 Default::default(),
5523 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5524 false,
5525 None,
5526 )
5527 .unwrap(),
5528 cx
5529 )
5530 .await
5531 .unwrap(),
5532 HashMap::from_iter([
5533 (path!("dir/one.rs").to_string(), vec![8..12]),
5534 (path!("dir/two.rs").to_string(), vec![8..12]),
5535 ]),
5536 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5537 );
5538
5539 assert!(
5540 search(
5541 &project,
5542 SearchQuery::text(
5543 search_query,
5544 false,
5545 true,
5546 false,
5547 Default::default(),
5548 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5549 .unwrap(),
5550 false,
5551 None,
5552 )
5553 .unwrap(),
5554 cx
5555 )
5556 .await
5557 .unwrap()
5558 .is_empty(),
5559 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5560 );
5561}
5562
5563#[gpui::test]
5564async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5565 init_test(cx);
5566
5567 let search_query = "file";
5568
5569 let fs = FakeFs::new(cx.executor());
5570 fs.insert_tree(
5571 path!("/dir"),
5572 json!({
5573 "one.rs": r#"// Rust file one"#,
5574 "one.ts": r#"// TypeScript file one"#,
5575 "two.rs": r#"// Rust file two"#,
5576 "two.ts": r#"// TypeScript file two"#,
5577 }),
5578 )
5579 .await;
5580
5581 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5582 let _buffer = project.update(cx, |project, cx| {
5583 let buffer = project.create_local_buffer("file", None, cx);
5584 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5585 buffer
5586 });
5587
5588 assert_eq!(
5589 search(
5590 &project,
5591 SearchQuery::text(
5592 search_query,
5593 false,
5594 true,
5595 false,
5596 Default::default(),
5597 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5598 false,
5599 None,
5600 )
5601 .unwrap(),
5602 cx
5603 )
5604 .await
5605 .unwrap(),
5606 HashMap::from_iter([
5607 (path!("dir/one.rs").to_string(), vec![8..12]),
5608 (path!("dir/one.ts").to_string(), vec![14..18]),
5609 (path!("dir/two.rs").to_string(), vec![8..12]),
5610 (path!("dir/two.ts").to_string(), vec![14..18]),
5611 ]),
5612 "If no exclusions match, all files should be returned"
5613 );
5614
5615 assert_eq!(
5616 search(
5617 &project,
5618 SearchQuery::text(
5619 search_query,
5620 false,
5621 true,
5622 false,
5623 Default::default(),
5624 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5625 false,
5626 None,
5627 )
5628 .unwrap(),
5629 cx
5630 )
5631 .await
5632 .unwrap(),
5633 HashMap::from_iter([
5634 (path!("dir/one.ts").to_string(), vec![14..18]),
5635 (path!("dir/two.ts").to_string(), vec![14..18]),
5636 ]),
5637 "Rust exclusion search should give only TypeScript files"
5638 );
5639
5640 assert_eq!(
5641 search(
5642 &project,
5643 SearchQuery::text(
5644 search_query,
5645 false,
5646 true,
5647 false,
5648 Default::default(),
5649 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5650 false,
5651 None,
5652 )
5653 .unwrap(),
5654 cx
5655 )
5656 .await
5657 .unwrap(),
5658 HashMap::from_iter([
5659 (path!("dir/one.rs").to_string(), vec![8..12]),
5660 (path!("dir/two.rs").to_string(), vec![8..12]),
5661 ]),
5662 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5663 );
5664
5665 assert!(
5666 search(
5667 &project,
5668 SearchQuery::text(
5669 search_query,
5670 false,
5671 true,
5672 false,
5673 Default::default(),
5674 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5675 .unwrap(),
5676 false,
5677 None,
5678 )
5679 .unwrap(),
5680 cx
5681 )
5682 .await
5683 .unwrap()
5684 .is_empty(),
5685 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5686 );
5687}
5688
5689#[gpui::test]
5690async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5691 init_test(cx);
5692
5693 let search_query = "file";
5694
5695 let fs = FakeFs::new(cx.executor());
5696 fs.insert_tree(
5697 path!("/dir"),
5698 json!({
5699 "one.rs": r#"// Rust file one"#,
5700 "one.ts": r#"// TypeScript file one"#,
5701 "two.rs": r#"// Rust file two"#,
5702 "two.ts": r#"// TypeScript file two"#,
5703 }),
5704 )
5705 .await;
5706 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5707
5708 assert!(
5709 search(
5710 &project,
5711 SearchQuery::text(
5712 search_query,
5713 false,
5714 true,
5715 false,
5716 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5717 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5718 false,
5719 None,
5720 )
5721 .unwrap(),
5722 cx
5723 )
5724 .await
5725 .unwrap()
5726 .is_empty(),
5727 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5728 );
5729
5730 assert!(
5731 search(
5732 &project,
5733 SearchQuery::text(
5734 search_query,
5735 false,
5736 true,
5737 false,
5738 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5739 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5740 false,
5741 None,
5742 )
5743 .unwrap(),
5744 cx
5745 )
5746 .await
5747 .unwrap()
5748 .is_empty(),
5749 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5750 );
5751
5752 assert!(
5753 search(
5754 &project,
5755 SearchQuery::text(
5756 search_query,
5757 false,
5758 true,
5759 false,
5760 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5761 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5762 false,
5763 None,
5764 )
5765 .unwrap(),
5766 cx
5767 )
5768 .await
5769 .unwrap()
5770 .is_empty(),
5771 "Non-matching inclusions and exclusions should not change that."
5772 );
5773
5774 assert_eq!(
5775 search(
5776 &project,
5777 SearchQuery::text(
5778 search_query,
5779 false,
5780 true,
5781 false,
5782 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5783 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5784 false,
5785 None,
5786 )
5787 .unwrap(),
5788 cx
5789 )
5790 .await
5791 .unwrap(),
5792 HashMap::from_iter([
5793 (path!("dir/one.ts").to_string(), vec![14..18]),
5794 (path!("dir/two.ts").to_string(), vec![14..18]),
5795 ]),
5796 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5797 );
5798}
5799
5800#[gpui::test]
5801async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5802 init_test(cx);
5803
5804 let fs = FakeFs::new(cx.executor());
5805 fs.insert_tree(
5806 path!("/worktree-a"),
5807 json!({
5808 "haystack.rs": r#"// NEEDLE"#,
5809 "haystack.ts": r#"// NEEDLE"#,
5810 }),
5811 )
5812 .await;
5813 fs.insert_tree(
5814 path!("/worktree-b"),
5815 json!({
5816 "haystack.rs": r#"// NEEDLE"#,
5817 "haystack.ts": r#"// NEEDLE"#,
5818 }),
5819 )
5820 .await;
5821
5822 let project = Project::test(
5823 fs.clone(),
5824 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5825 cx,
5826 )
5827 .await;
5828
5829 assert_eq!(
5830 search(
5831 &project,
5832 SearchQuery::text(
5833 "NEEDLE",
5834 false,
5835 true,
5836 false,
5837 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5838 Default::default(),
5839 true,
5840 None,
5841 )
5842 .unwrap(),
5843 cx
5844 )
5845 .await
5846 .unwrap(),
5847 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5848 "should only return results from included worktree"
5849 );
5850 assert_eq!(
5851 search(
5852 &project,
5853 SearchQuery::text(
5854 "NEEDLE",
5855 false,
5856 true,
5857 false,
5858 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5859 Default::default(),
5860 true,
5861 None,
5862 )
5863 .unwrap(),
5864 cx
5865 )
5866 .await
5867 .unwrap(),
5868 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5869 "should only return results from included worktree"
5870 );
5871
5872 assert_eq!(
5873 search(
5874 &project,
5875 SearchQuery::text(
5876 "NEEDLE",
5877 false,
5878 true,
5879 false,
5880 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5881 Default::default(),
5882 false,
5883 None,
5884 )
5885 .unwrap(),
5886 cx
5887 )
5888 .await
5889 .unwrap(),
5890 HashMap::from_iter([
5891 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5892 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5893 ]),
5894 "should return results from both worktrees"
5895 );
5896}
5897
5898#[gpui::test]
5899async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5900 init_test(cx);
5901
5902 let fs = FakeFs::new(cx.background_executor.clone());
5903 fs.insert_tree(
5904 path!("/dir"),
5905 json!({
5906 ".git": {},
5907 ".gitignore": "**/target\n/node_modules\n",
5908 "target": {
5909 "index.txt": "index_key:index_value"
5910 },
5911 "node_modules": {
5912 "eslint": {
5913 "index.ts": "const eslint_key = 'eslint value'",
5914 "package.json": r#"{ "some_key": "some value" }"#,
5915 },
5916 "prettier": {
5917 "index.ts": "const prettier_key = 'prettier value'",
5918 "package.json": r#"{ "other_key": "other value" }"#,
5919 },
5920 },
5921 "package.json": r#"{ "main_key": "main value" }"#,
5922 }),
5923 )
5924 .await;
5925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5926
5927 let query = "key";
5928 assert_eq!(
5929 search(
5930 &project,
5931 SearchQuery::text(
5932 query,
5933 false,
5934 false,
5935 false,
5936 Default::default(),
5937 Default::default(),
5938 false,
5939 None,
5940 )
5941 .unwrap(),
5942 cx
5943 )
5944 .await
5945 .unwrap(),
5946 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5947 "Only one non-ignored file should have the query"
5948 );
5949
5950 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5951 assert_eq!(
5952 search(
5953 &project,
5954 SearchQuery::text(
5955 query,
5956 false,
5957 false,
5958 true,
5959 Default::default(),
5960 Default::default(),
5961 false,
5962 None,
5963 )
5964 .unwrap(),
5965 cx
5966 )
5967 .await
5968 .unwrap(),
5969 HashMap::from_iter([
5970 (path!("dir/package.json").to_string(), vec![8..11]),
5971 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5972 (
5973 path!("dir/node_modules/prettier/package.json").to_string(),
5974 vec![9..12]
5975 ),
5976 (
5977 path!("dir/node_modules/prettier/index.ts").to_string(),
5978 vec![15..18]
5979 ),
5980 (
5981 path!("dir/node_modules/eslint/index.ts").to_string(),
5982 vec![13..16]
5983 ),
5984 (
5985 path!("dir/node_modules/eslint/package.json").to_string(),
5986 vec![8..11]
5987 ),
5988 ]),
5989 "Unrestricted search with ignored directories should find every file with the query"
5990 );
5991
5992 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5993 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5994 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5995 assert_eq!(
5996 search(
5997 &project,
5998 SearchQuery::text(
5999 query,
6000 false,
6001 false,
6002 true,
6003 files_to_include,
6004 files_to_exclude,
6005 false,
6006 None,
6007 )
6008 .unwrap(),
6009 cx
6010 )
6011 .await
6012 .unwrap(),
6013 HashMap::from_iter([(
6014 path!("dir/node_modules/prettier/package.json").to_string(),
6015 vec![9..12]
6016 )]),
6017 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6018 );
6019}
6020
6021#[gpui::test]
6022async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6023 init_test(cx);
6024
6025 let fs = FakeFs::new(cx.executor());
6026 fs.insert_tree(
6027 path!("/dir"),
6028 json!({
6029 "one.rs": "// ПРИВЕТ? привет!",
6030 "two.rs": "// ПРИВЕТ.",
6031 "three.rs": "// привет",
6032 }),
6033 )
6034 .await;
6035 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6036
6037 let unicode_case_sensitive_query = SearchQuery::text(
6038 "привет",
6039 false,
6040 true,
6041 false,
6042 Default::default(),
6043 Default::default(),
6044 false,
6045 None,
6046 );
6047 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6048 assert_eq!(
6049 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6050 .await
6051 .unwrap(),
6052 HashMap::from_iter([
6053 (path!("dir/one.rs").to_string(), vec![17..29]),
6054 (path!("dir/three.rs").to_string(), vec![3..15]),
6055 ])
6056 );
6057
6058 let unicode_case_insensitive_query = SearchQuery::text(
6059 "привет",
6060 false,
6061 false,
6062 false,
6063 Default::default(),
6064 Default::default(),
6065 false,
6066 None,
6067 );
6068 assert_matches!(
6069 unicode_case_insensitive_query,
6070 Ok(SearchQuery::Regex { .. })
6071 );
6072 assert_eq!(
6073 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6074 .await
6075 .unwrap(),
6076 HashMap::from_iter([
6077 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6078 (path!("dir/two.rs").to_string(), vec![3..15]),
6079 (path!("dir/three.rs").to_string(), vec![3..15]),
6080 ])
6081 );
6082
6083 assert_eq!(
6084 search(
6085 &project,
6086 SearchQuery::text(
6087 "привет.",
6088 false,
6089 false,
6090 false,
6091 Default::default(),
6092 Default::default(),
6093 false,
6094 None,
6095 )
6096 .unwrap(),
6097 cx
6098 )
6099 .await
6100 .unwrap(),
6101 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6102 );
6103}
6104
6105#[gpui::test]
6106async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6107 init_test(cx);
6108
6109 let fs = FakeFs::new(cx.executor());
6110 fs.insert_tree(
6111 "/one/two",
6112 json!({
6113 "three": {
6114 "a.txt": "",
6115 "four": {}
6116 },
6117 "c.rs": ""
6118 }),
6119 )
6120 .await;
6121
6122 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6123 project
6124 .update(cx, |project, cx| {
6125 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6126 project.create_entry((id, "b.."), true, cx)
6127 })
6128 .await
6129 .unwrap()
6130 .into_included()
6131 .unwrap();
6132
6133 // Can't create paths outside the project
6134 let result = project
6135 .update(cx, |project, cx| {
6136 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6137 project.create_entry((id, "../../boop"), true, cx)
6138 })
6139 .await;
6140 assert!(result.is_err());
6141
6142 // Can't create paths with '..'
6143 let result = project
6144 .update(cx, |project, cx| {
6145 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6146 project.create_entry((id, "four/../beep"), true, cx)
6147 })
6148 .await;
6149 assert!(result.is_err());
6150
6151 assert_eq!(
6152 fs.paths(true),
6153 vec![
6154 PathBuf::from(path!("/")),
6155 PathBuf::from(path!("/one")),
6156 PathBuf::from(path!("/one/two")),
6157 PathBuf::from(path!("/one/two/c.rs")),
6158 PathBuf::from(path!("/one/two/three")),
6159 PathBuf::from(path!("/one/two/three/a.txt")),
6160 PathBuf::from(path!("/one/two/three/b..")),
6161 PathBuf::from(path!("/one/two/three/four")),
6162 ]
6163 );
6164
6165 // And we cannot open buffers with '..'
6166 let result = project
6167 .update(cx, |project, cx| {
6168 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6169 project.open_buffer((id, "../c.rs"), cx)
6170 })
6171 .await;
6172 assert!(result.is_err())
6173}
6174
6175#[gpui::test]
6176async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6177 init_test(cx);
6178
6179 let fs = FakeFs::new(cx.executor());
6180 fs.insert_tree(
6181 path!("/dir"),
6182 json!({
6183 "a.tsx": "a",
6184 }),
6185 )
6186 .await;
6187
6188 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6189
6190 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6191 language_registry.add(tsx_lang());
6192 let language_server_names = [
6193 "TypeScriptServer",
6194 "TailwindServer",
6195 "ESLintServer",
6196 "NoHoverCapabilitiesServer",
6197 ];
6198 let mut language_servers = [
6199 language_registry.register_fake_lsp(
6200 "tsx",
6201 FakeLspAdapter {
6202 name: language_server_names[0],
6203 capabilities: lsp::ServerCapabilities {
6204 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6205 ..lsp::ServerCapabilities::default()
6206 },
6207 ..FakeLspAdapter::default()
6208 },
6209 ),
6210 language_registry.register_fake_lsp(
6211 "tsx",
6212 FakeLspAdapter {
6213 name: language_server_names[1],
6214 capabilities: lsp::ServerCapabilities {
6215 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6216 ..lsp::ServerCapabilities::default()
6217 },
6218 ..FakeLspAdapter::default()
6219 },
6220 ),
6221 language_registry.register_fake_lsp(
6222 "tsx",
6223 FakeLspAdapter {
6224 name: language_server_names[2],
6225 capabilities: lsp::ServerCapabilities {
6226 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6227 ..lsp::ServerCapabilities::default()
6228 },
6229 ..FakeLspAdapter::default()
6230 },
6231 ),
6232 language_registry.register_fake_lsp(
6233 "tsx",
6234 FakeLspAdapter {
6235 name: language_server_names[3],
6236 capabilities: lsp::ServerCapabilities {
6237 hover_provider: None,
6238 ..lsp::ServerCapabilities::default()
6239 },
6240 ..FakeLspAdapter::default()
6241 },
6242 ),
6243 ];
6244
6245 let (buffer, _handle) = project
6246 .update(cx, |p, cx| {
6247 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6248 })
6249 .await
6250 .unwrap();
6251 cx.executor().run_until_parked();
6252
6253 let mut servers_with_hover_requests = HashMap::default();
6254 for i in 0..language_server_names.len() {
6255 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6256 panic!(
6257 "Failed to get language server #{i} with name {}",
6258 &language_server_names[i]
6259 )
6260 });
6261 let new_server_name = new_server.server.name();
6262 assert!(
6263 !servers_with_hover_requests.contains_key(&new_server_name),
6264 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6265 );
6266 match new_server_name.as_ref() {
6267 "TailwindServer" | "TypeScriptServer" => {
6268 servers_with_hover_requests.insert(
6269 new_server_name.clone(),
6270 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6271 move |_, _| {
6272 let name = new_server_name.clone();
6273 async move {
6274 Ok(Some(lsp::Hover {
6275 contents: lsp::HoverContents::Scalar(
6276 lsp::MarkedString::String(format!("{name} hover")),
6277 ),
6278 range: None,
6279 }))
6280 }
6281 },
6282 ),
6283 );
6284 }
6285 "ESLintServer" => {
6286 servers_with_hover_requests.insert(
6287 new_server_name,
6288 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6289 |_, _| async move { Ok(None) },
6290 ),
6291 );
6292 }
6293 "NoHoverCapabilitiesServer" => {
6294 let _never_handled = new_server
6295 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6296 panic!(
6297 "Should not call for hovers server with no corresponding capabilities"
6298 )
6299 });
6300 }
6301 unexpected => panic!("Unexpected server name: {unexpected}"),
6302 }
6303 }
6304
6305 let hover_task = project.update(cx, |project, cx| {
6306 project.hover(&buffer, Point::new(0, 0), cx)
6307 });
6308 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6309 |mut hover_request| async move {
6310 hover_request
6311 .next()
6312 .await
6313 .expect("All hover requests should have been triggered")
6314 },
6315 ))
6316 .await;
6317 assert_eq!(
6318 vec!["TailwindServer hover", "TypeScriptServer hover"],
6319 hover_task
6320 .await
6321 .into_iter()
6322 .flatten()
6323 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6324 .sorted()
6325 .collect::<Vec<_>>(),
6326 "Should receive hover responses from all related servers with hover capabilities"
6327 );
6328}
6329
6330#[gpui::test]
6331async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6332 init_test(cx);
6333
6334 let fs = FakeFs::new(cx.executor());
6335 fs.insert_tree(
6336 path!("/dir"),
6337 json!({
6338 "a.ts": "a",
6339 }),
6340 )
6341 .await;
6342
6343 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6344
6345 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6346 language_registry.add(typescript_lang());
6347 let mut fake_language_servers = language_registry.register_fake_lsp(
6348 "TypeScript",
6349 FakeLspAdapter {
6350 capabilities: lsp::ServerCapabilities {
6351 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6352 ..lsp::ServerCapabilities::default()
6353 },
6354 ..FakeLspAdapter::default()
6355 },
6356 );
6357
6358 let (buffer, _handle) = project
6359 .update(cx, |p, cx| {
6360 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6361 })
6362 .await
6363 .unwrap();
6364 cx.executor().run_until_parked();
6365
6366 let fake_server = fake_language_servers
6367 .next()
6368 .await
6369 .expect("failed to get the language server");
6370
6371 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6372 move |_, _| async move {
6373 Ok(Some(lsp::Hover {
6374 contents: lsp::HoverContents::Array(vec![
6375 lsp::MarkedString::String("".to_string()),
6376 lsp::MarkedString::String(" ".to_string()),
6377 lsp::MarkedString::String("\n\n\n".to_string()),
6378 ]),
6379 range: None,
6380 }))
6381 },
6382 );
6383
6384 let hover_task = project.update(cx, |project, cx| {
6385 project.hover(&buffer, Point::new(0, 0), cx)
6386 });
6387 let () = request_handled
6388 .next()
6389 .await
6390 .expect("All hover requests should have been triggered");
6391 assert_eq!(
6392 Vec::<String>::new(),
6393 hover_task
6394 .await
6395 .into_iter()
6396 .flatten()
6397 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6398 .sorted()
6399 .collect::<Vec<_>>(),
6400 "Empty hover parts should be ignored"
6401 );
6402}
6403
6404#[gpui::test]
6405async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6406 init_test(cx);
6407
6408 let fs = FakeFs::new(cx.executor());
6409 fs.insert_tree(
6410 path!("/dir"),
6411 json!({
6412 "a.ts": "a",
6413 }),
6414 )
6415 .await;
6416
6417 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6418
6419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6420 language_registry.add(typescript_lang());
6421 let mut fake_language_servers = language_registry.register_fake_lsp(
6422 "TypeScript",
6423 FakeLspAdapter {
6424 capabilities: lsp::ServerCapabilities {
6425 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6426 ..lsp::ServerCapabilities::default()
6427 },
6428 ..FakeLspAdapter::default()
6429 },
6430 );
6431
6432 let (buffer, _handle) = project
6433 .update(cx, |p, cx| {
6434 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6435 })
6436 .await
6437 .unwrap();
6438 cx.executor().run_until_parked();
6439
6440 let fake_server = fake_language_servers
6441 .next()
6442 .await
6443 .expect("failed to get the language server");
6444
6445 let mut request_handled = fake_server
6446 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6447 Ok(Some(vec![
6448 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6449 title: "organize imports".to_string(),
6450 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6451 ..lsp::CodeAction::default()
6452 }),
6453 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6454 title: "fix code".to_string(),
6455 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6456 ..lsp::CodeAction::default()
6457 }),
6458 ]))
6459 });
6460
6461 let code_actions_task = project.update(cx, |project, cx| {
6462 project.code_actions(
6463 &buffer,
6464 0..buffer.read(cx).len(),
6465 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6466 cx,
6467 )
6468 });
6469
6470 let () = request_handled
6471 .next()
6472 .await
6473 .expect("The code action request should have been triggered");
6474
6475 let code_actions = code_actions_task.await.unwrap().unwrap();
6476 assert_eq!(code_actions.len(), 1);
6477 assert_eq!(
6478 code_actions[0].lsp_action.action_kind(),
6479 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6480 );
6481}
6482
6483#[gpui::test]
6484async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6485 init_test(cx);
6486
6487 let fs = FakeFs::new(cx.executor());
6488 fs.insert_tree(
6489 path!("/dir"),
6490 json!({
6491 "a.tsx": "a",
6492 }),
6493 )
6494 .await;
6495
6496 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6497
6498 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6499 language_registry.add(tsx_lang());
6500 let language_server_names = [
6501 "TypeScriptServer",
6502 "TailwindServer",
6503 "ESLintServer",
6504 "NoActionsCapabilitiesServer",
6505 ];
6506
6507 let mut language_server_rxs = [
6508 language_registry.register_fake_lsp(
6509 "tsx",
6510 FakeLspAdapter {
6511 name: language_server_names[0],
6512 capabilities: lsp::ServerCapabilities {
6513 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6514 ..lsp::ServerCapabilities::default()
6515 },
6516 ..FakeLspAdapter::default()
6517 },
6518 ),
6519 language_registry.register_fake_lsp(
6520 "tsx",
6521 FakeLspAdapter {
6522 name: language_server_names[1],
6523 capabilities: lsp::ServerCapabilities {
6524 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6525 ..lsp::ServerCapabilities::default()
6526 },
6527 ..FakeLspAdapter::default()
6528 },
6529 ),
6530 language_registry.register_fake_lsp(
6531 "tsx",
6532 FakeLspAdapter {
6533 name: language_server_names[2],
6534 capabilities: lsp::ServerCapabilities {
6535 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6536 ..lsp::ServerCapabilities::default()
6537 },
6538 ..FakeLspAdapter::default()
6539 },
6540 ),
6541 language_registry.register_fake_lsp(
6542 "tsx",
6543 FakeLspAdapter {
6544 name: language_server_names[3],
6545 capabilities: lsp::ServerCapabilities {
6546 code_action_provider: None,
6547 ..lsp::ServerCapabilities::default()
6548 },
6549 ..FakeLspAdapter::default()
6550 },
6551 ),
6552 ];
6553
6554 let (buffer, _handle) = project
6555 .update(cx, |p, cx| {
6556 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6557 })
6558 .await
6559 .unwrap();
6560 cx.executor().run_until_parked();
6561
6562 let mut servers_with_actions_requests = HashMap::default();
6563 for i in 0..language_server_names.len() {
6564 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6565 panic!(
6566 "Failed to get language server #{i} with name {}",
6567 &language_server_names[i]
6568 )
6569 });
6570 let new_server_name = new_server.server.name();
6571
6572 assert!(
6573 !servers_with_actions_requests.contains_key(&new_server_name),
6574 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6575 );
6576 match new_server_name.0.as_ref() {
6577 "TailwindServer" | "TypeScriptServer" => {
6578 servers_with_actions_requests.insert(
6579 new_server_name.clone(),
6580 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6581 move |_, _| {
6582 let name = new_server_name.clone();
6583 async move {
6584 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6585 lsp::CodeAction {
6586 title: format!("{name} code action"),
6587 ..lsp::CodeAction::default()
6588 },
6589 )]))
6590 }
6591 },
6592 ),
6593 );
6594 }
6595 "ESLintServer" => {
6596 servers_with_actions_requests.insert(
6597 new_server_name,
6598 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6599 |_, _| async move { Ok(None) },
6600 ),
6601 );
6602 }
6603 "NoActionsCapabilitiesServer" => {
6604 let _never_handled = new_server
6605 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6606 panic!(
6607 "Should not call for code actions server with no corresponding capabilities"
6608 )
6609 });
6610 }
6611 unexpected => panic!("Unexpected server name: {unexpected}"),
6612 }
6613 }
6614
6615 let code_actions_task = project.update(cx, |project, cx| {
6616 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6617 });
6618
6619 // cx.run_until_parked();
6620 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6621 |mut code_actions_request| async move {
6622 code_actions_request
6623 .next()
6624 .await
6625 .expect("All code actions requests should have been triggered")
6626 },
6627 ))
6628 .await;
6629 assert_eq!(
6630 vec!["TailwindServer code action", "TypeScriptServer code action"],
6631 code_actions_task
6632 .await
6633 .unwrap()
6634 .unwrap()
6635 .into_iter()
6636 .map(|code_action| code_action.lsp_action.title().to_owned())
6637 .sorted()
6638 .collect::<Vec<_>>(),
6639 "Should receive code actions responses from all related servers with hover capabilities"
6640 );
6641}
6642
6643#[gpui::test]
6644async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6645 init_test(cx);
6646
6647 let fs = FakeFs::new(cx.executor());
6648 fs.insert_tree(
6649 "/dir",
6650 json!({
6651 "a.rs": "let a = 1;",
6652 "b.rs": "let b = 2;",
6653 "c.rs": "let c = 2;",
6654 }),
6655 )
6656 .await;
6657
6658 let project = Project::test(
6659 fs,
6660 [
6661 "/dir/a.rs".as_ref(),
6662 "/dir/b.rs".as_ref(),
6663 "/dir/c.rs".as_ref(),
6664 ],
6665 cx,
6666 )
6667 .await;
6668
6669 // check the initial state and get the worktrees
6670 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6671 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6672 assert_eq!(worktrees.len(), 3);
6673
6674 let worktree_a = worktrees[0].read(cx);
6675 let worktree_b = worktrees[1].read(cx);
6676 let worktree_c = worktrees[2].read(cx);
6677
6678 // check they start in the right order
6679 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6680 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6681 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6682
6683 (
6684 worktrees[0].clone(),
6685 worktrees[1].clone(),
6686 worktrees[2].clone(),
6687 )
6688 });
6689
6690 // move first worktree to after the second
6691 // [a, b, c] -> [b, a, c]
6692 project
6693 .update(cx, |project, cx| {
6694 let first = worktree_a.read(cx);
6695 let second = worktree_b.read(cx);
6696 project.move_worktree(first.id(), second.id(), cx)
6697 })
6698 .expect("moving first after second");
6699
6700 // check the state after moving
6701 project.update(cx, |project, cx| {
6702 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6703 assert_eq!(worktrees.len(), 3);
6704
6705 let first = worktrees[0].read(cx);
6706 let second = worktrees[1].read(cx);
6707 let third = worktrees[2].read(cx);
6708
6709 // check they are now in the right order
6710 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6711 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6712 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6713 });
6714
6715 // move the second worktree to before the first
6716 // [b, a, c] -> [a, b, c]
6717 project
6718 .update(cx, |project, cx| {
6719 let second = worktree_a.read(cx);
6720 let first = worktree_b.read(cx);
6721 project.move_worktree(first.id(), second.id(), cx)
6722 })
6723 .expect("moving second before first");
6724
6725 // check the state after moving
6726 project.update(cx, |project, cx| {
6727 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6728 assert_eq!(worktrees.len(), 3);
6729
6730 let first = worktrees[0].read(cx);
6731 let second = worktrees[1].read(cx);
6732 let third = worktrees[2].read(cx);
6733
6734 // check they are now in the right order
6735 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6736 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6737 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6738 });
6739
6740 // move the second worktree to after the third
6741 // [a, b, c] -> [a, c, b]
6742 project
6743 .update(cx, |project, cx| {
6744 let second = worktree_b.read(cx);
6745 let third = worktree_c.read(cx);
6746 project.move_worktree(second.id(), third.id(), cx)
6747 })
6748 .expect("moving second after third");
6749
6750 // check the state after moving
6751 project.update(cx, |project, cx| {
6752 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6753 assert_eq!(worktrees.len(), 3);
6754
6755 let first = worktrees[0].read(cx);
6756 let second = worktrees[1].read(cx);
6757 let third = worktrees[2].read(cx);
6758
6759 // check they are now in the right order
6760 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6761 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6762 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6763 });
6764
6765 // move the third worktree to before the second
6766 // [a, c, b] -> [a, b, c]
6767 project
6768 .update(cx, |project, cx| {
6769 let third = worktree_c.read(cx);
6770 let second = worktree_b.read(cx);
6771 project.move_worktree(third.id(), second.id(), cx)
6772 })
6773 .expect("moving third before second");
6774
6775 // check the state after moving
6776 project.update(cx, |project, cx| {
6777 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6778 assert_eq!(worktrees.len(), 3);
6779
6780 let first = worktrees[0].read(cx);
6781 let second = worktrees[1].read(cx);
6782 let third = worktrees[2].read(cx);
6783
6784 // check they are now in the right order
6785 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6786 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6787 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6788 });
6789
6790 // move the first worktree to after the third
6791 // [a, b, c] -> [b, c, a]
6792 project
6793 .update(cx, |project, cx| {
6794 let first = worktree_a.read(cx);
6795 let third = worktree_c.read(cx);
6796 project.move_worktree(first.id(), third.id(), cx)
6797 })
6798 .expect("moving first after third");
6799
6800 // check the state after moving
6801 project.update(cx, |project, cx| {
6802 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6803 assert_eq!(worktrees.len(), 3);
6804
6805 let first = worktrees[0].read(cx);
6806 let second = worktrees[1].read(cx);
6807 let third = worktrees[2].read(cx);
6808
6809 // check they are now in the right order
6810 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6811 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6812 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6813 });
6814
6815 // move the third worktree to before the first
6816 // [b, c, a] -> [a, b, c]
6817 project
6818 .update(cx, |project, cx| {
6819 let third = worktree_a.read(cx);
6820 let first = worktree_b.read(cx);
6821 project.move_worktree(third.id(), first.id(), cx)
6822 })
6823 .expect("moving third before first");
6824
6825 // check the state after moving
6826 project.update(cx, |project, cx| {
6827 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6828 assert_eq!(worktrees.len(), 3);
6829
6830 let first = worktrees[0].read(cx);
6831 let second = worktrees[1].read(cx);
6832 let third = worktrees[2].read(cx);
6833
6834 // check they are now in the right order
6835 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6836 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6837 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6838 });
6839}
6840
6841#[gpui::test]
6842async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6843 init_test(cx);
6844
6845 let staged_contents = r#"
6846 fn main() {
6847 println!("hello world");
6848 }
6849 "#
6850 .unindent();
6851 let file_contents = r#"
6852 // print goodbye
6853 fn main() {
6854 println!("goodbye world");
6855 }
6856 "#
6857 .unindent();
6858
6859 let fs = FakeFs::new(cx.background_executor.clone());
6860 fs.insert_tree(
6861 "/dir",
6862 json!({
6863 ".git": {},
6864 "src": {
6865 "main.rs": file_contents,
6866 }
6867 }),
6868 )
6869 .await;
6870
6871 fs.set_index_for_repo(
6872 Path::new("/dir/.git"),
6873 &[("src/main.rs".into(), staged_contents)],
6874 );
6875
6876 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6877
6878 let buffer = project
6879 .update(cx, |project, cx| {
6880 project.open_local_buffer("/dir/src/main.rs", cx)
6881 })
6882 .await
6883 .unwrap();
6884 let unstaged_diff = project
6885 .update(cx, |project, cx| {
6886 project.open_unstaged_diff(buffer.clone(), cx)
6887 })
6888 .await
6889 .unwrap();
6890
6891 cx.run_until_parked();
6892 unstaged_diff.update(cx, |unstaged_diff, cx| {
6893 let snapshot = buffer.read(cx).snapshot();
6894 assert_hunks(
6895 unstaged_diff.hunks(&snapshot, cx),
6896 &snapshot,
6897 &unstaged_diff.base_text_string().unwrap(),
6898 &[
6899 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6900 (
6901 2..3,
6902 " println!(\"hello world\");\n",
6903 " println!(\"goodbye world\");\n",
6904 DiffHunkStatus::modified_none(),
6905 ),
6906 ],
6907 );
6908 });
6909
6910 let staged_contents = r#"
6911 // print goodbye
6912 fn main() {
6913 }
6914 "#
6915 .unindent();
6916
6917 fs.set_index_for_repo(
6918 Path::new("/dir/.git"),
6919 &[("src/main.rs".into(), staged_contents)],
6920 );
6921
6922 cx.run_until_parked();
6923 unstaged_diff.update(cx, |unstaged_diff, cx| {
6924 let snapshot = buffer.read(cx).snapshot();
6925 assert_hunks(
6926 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6927 &snapshot,
6928 &unstaged_diff.base_text().text(),
6929 &[(
6930 2..3,
6931 "",
6932 " println!(\"goodbye world\");\n",
6933 DiffHunkStatus::added_none(),
6934 )],
6935 );
6936 });
6937}
6938
6939#[gpui::test]
6940async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6941 init_test(cx);
6942
6943 let committed_contents = r#"
6944 fn main() {
6945 println!("hello world");
6946 }
6947 "#
6948 .unindent();
6949 let staged_contents = r#"
6950 fn main() {
6951 println!("goodbye world");
6952 }
6953 "#
6954 .unindent();
6955 let file_contents = r#"
6956 // print goodbye
6957 fn main() {
6958 println!("goodbye world");
6959 }
6960 "#
6961 .unindent();
6962
6963 let fs = FakeFs::new(cx.background_executor.clone());
6964 fs.insert_tree(
6965 "/dir",
6966 json!({
6967 ".git": {},
6968 "src": {
6969 "modification.rs": file_contents,
6970 }
6971 }),
6972 )
6973 .await;
6974
6975 fs.set_head_for_repo(
6976 Path::new("/dir/.git"),
6977 &[
6978 ("src/modification.rs".into(), committed_contents),
6979 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6980 ],
6981 "deadbeef",
6982 );
6983 fs.set_index_for_repo(
6984 Path::new("/dir/.git"),
6985 &[
6986 ("src/modification.rs".into(), staged_contents),
6987 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6988 ],
6989 );
6990
6991 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6992 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6993 let language = rust_lang();
6994 language_registry.add(language.clone());
6995
6996 let buffer_1 = project
6997 .update(cx, |project, cx| {
6998 project.open_local_buffer("/dir/src/modification.rs", cx)
6999 })
7000 .await
7001 .unwrap();
7002 let diff_1 = project
7003 .update(cx, |project, cx| {
7004 project.open_uncommitted_diff(buffer_1.clone(), cx)
7005 })
7006 .await
7007 .unwrap();
7008 diff_1.read_with(cx, |diff, _| {
7009 assert_eq!(diff.base_text().language().cloned(), Some(language))
7010 });
7011 cx.run_until_parked();
7012 diff_1.update(cx, |diff, cx| {
7013 let snapshot = buffer_1.read(cx).snapshot();
7014 assert_hunks(
7015 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7016 &snapshot,
7017 &diff.base_text_string().unwrap(),
7018 &[
7019 (
7020 0..1,
7021 "",
7022 "// print goodbye\n",
7023 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7024 ),
7025 (
7026 2..3,
7027 " println!(\"hello world\");\n",
7028 " println!(\"goodbye world\");\n",
7029 DiffHunkStatus::modified_none(),
7030 ),
7031 ],
7032 );
7033 });
7034
7035 // Reset HEAD to a version that differs from both the buffer and the index.
7036 let committed_contents = r#"
7037 // print goodbye
7038 fn main() {
7039 }
7040 "#
7041 .unindent();
7042 fs.set_head_for_repo(
7043 Path::new("/dir/.git"),
7044 &[
7045 ("src/modification.rs".into(), committed_contents.clone()),
7046 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
7047 ],
7048 "deadbeef",
7049 );
7050
7051 // Buffer now has an unstaged hunk.
7052 cx.run_until_parked();
7053 diff_1.update(cx, |diff, cx| {
7054 let snapshot = buffer_1.read(cx).snapshot();
7055 assert_hunks(
7056 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7057 &snapshot,
7058 &diff.base_text().text(),
7059 &[(
7060 2..3,
7061 "",
7062 " println!(\"goodbye world\");\n",
7063 DiffHunkStatus::added_none(),
7064 )],
7065 );
7066 });
7067
7068 // Open a buffer for a file that's been deleted.
7069 let buffer_2 = project
7070 .update(cx, |project, cx| {
7071 project.open_local_buffer("/dir/src/deletion.rs", cx)
7072 })
7073 .await
7074 .unwrap();
7075 let diff_2 = project
7076 .update(cx, |project, cx| {
7077 project.open_uncommitted_diff(buffer_2.clone(), cx)
7078 })
7079 .await
7080 .unwrap();
7081 cx.run_until_parked();
7082 diff_2.update(cx, |diff, cx| {
7083 let snapshot = buffer_2.read(cx).snapshot();
7084 assert_hunks(
7085 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7086 &snapshot,
7087 &diff.base_text_string().unwrap(),
7088 &[(
7089 0..0,
7090 "// the-deleted-contents\n",
7091 "",
7092 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7093 )],
7094 );
7095 });
7096
7097 // Stage the deletion of this file
7098 fs.set_index_for_repo(
7099 Path::new("/dir/.git"),
7100 &[("src/modification.rs".into(), committed_contents.clone())],
7101 );
7102 cx.run_until_parked();
7103 diff_2.update(cx, |diff, cx| {
7104 let snapshot = buffer_2.read(cx).snapshot();
7105 assert_hunks(
7106 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7107 &snapshot,
7108 &diff.base_text_string().unwrap(),
7109 &[(
7110 0..0,
7111 "// the-deleted-contents\n",
7112 "",
7113 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7114 )],
7115 );
7116 });
7117}
7118
7119#[gpui::test]
7120async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7121 use DiffHunkSecondaryStatus::*;
7122 init_test(cx);
7123
7124 let committed_contents = r#"
7125 zero
7126 one
7127 two
7128 three
7129 four
7130 five
7131 "#
7132 .unindent();
7133 let file_contents = r#"
7134 one
7135 TWO
7136 three
7137 FOUR
7138 five
7139 "#
7140 .unindent();
7141
7142 let fs = FakeFs::new(cx.background_executor.clone());
7143 fs.insert_tree(
7144 "/dir",
7145 json!({
7146 ".git": {},
7147 "file.txt": file_contents.clone()
7148 }),
7149 )
7150 .await;
7151
7152 fs.set_head_and_index_for_repo(
7153 "/dir/.git".as_ref(),
7154 &[("file.txt".into(), committed_contents.clone())],
7155 );
7156
7157 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7158
7159 let buffer = project
7160 .update(cx, |project, cx| {
7161 project.open_local_buffer("/dir/file.txt", cx)
7162 })
7163 .await
7164 .unwrap();
7165 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7166 let uncommitted_diff = project
7167 .update(cx, |project, cx| {
7168 project.open_uncommitted_diff(buffer.clone(), cx)
7169 })
7170 .await
7171 .unwrap();
7172 let mut diff_events = cx.events(&uncommitted_diff);
7173
7174 // The hunks are initially unstaged.
7175 uncommitted_diff.read_with(cx, |diff, cx| {
7176 assert_hunks(
7177 diff.hunks(&snapshot, cx),
7178 &snapshot,
7179 &diff.base_text_string().unwrap(),
7180 &[
7181 (
7182 0..0,
7183 "zero\n",
7184 "",
7185 DiffHunkStatus::deleted(HasSecondaryHunk),
7186 ),
7187 (
7188 1..2,
7189 "two\n",
7190 "TWO\n",
7191 DiffHunkStatus::modified(HasSecondaryHunk),
7192 ),
7193 (
7194 3..4,
7195 "four\n",
7196 "FOUR\n",
7197 DiffHunkStatus::modified(HasSecondaryHunk),
7198 ),
7199 ],
7200 );
7201 });
7202
7203 // Stage a hunk. It appears as optimistically staged.
7204 uncommitted_diff.update(cx, |diff, cx| {
7205 let range =
7206 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7207 let hunks = diff
7208 .hunks_intersecting_range(range, &snapshot, cx)
7209 .collect::<Vec<_>>();
7210 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7211
7212 assert_hunks(
7213 diff.hunks(&snapshot, cx),
7214 &snapshot,
7215 &diff.base_text_string().unwrap(),
7216 &[
7217 (
7218 0..0,
7219 "zero\n",
7220 "",
7221 DiffHunkStatus::deleted(HasSecondaryHunk),
7222 ),
7223 (
7224 1..2,
7225 "two\n",
7226 "TWO\n",
7227 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7228 ),
7229 (
7230 3..4,
7231 "four\n",
7232 "FOUR\n",
7233 DiffHunkStatus::modified(HasSecondaryHunk),
7234 ),
7235 ],
7236 );
7237 });
7238
7239 // The diff emits a change event for the range of the staged hunk.
7240 assert!(matches!(
7241 diff_events.next().await.unwrap(),
7242 BufferDiffEvent::HunksStagedOrUnstaged(_)
7243 ));
7244 let event = diff_events.next().await.unwrap();
7245 if let BufferDiffEvent::DiffChanged {
7246 changed_range: Some(changed_range),
7247 } = event
7248 {
7249 let changed_range = changed_range.to_point(&snapshot);
7250 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7251 } else {
7252 panic!("Unexpected event {event:?}");
7253 }
7254
7255 // When the write to the index completes, it appears as staged.
7256 cx.run_until_parked();
7257 uncommitted_diff.update(cx, |diff, cx| {
7258 assert_hunks(
7259 diff.hunks(&snapshot, cx),
7260 &snapshot,
7261 &diff.base_text_string().unwrap(),
7262 &[
7263 (
7264 0..0,
7265 "zero\n",
7266 "",
7267 DiffHunkStatus::deleted(HasSecondaryHunk),
7268 ),
7269 (
7270 1..2,
7271 "two\n",
7272 "TWO\n",
7273 DiffHunkStatus::modified(NoSecondaryHunk),
7274 ),
7275 (
7276 3..4,
7277 "four\n",
7278 "FOUR\n",
7279 DiffHunkStatus::modified(HasSecondaryHunk),
7280 ),
7281 ],
7282 );
7283 });
7284
7285 // The diff emits a change event for the changed index text.
7286 let event = diff_events.next().await.unwrap();
7287 if let BufferDiffEvent::DiffChanged {
7288 changed_range: Some(changed_range),
7289 } = event
7290 {
7291 let changed_range = changed_range.to_point(&snapshot);
7292 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7293 } else {
7294 panic!("Unexpected event {event:?}");
7295 }
7296
7297 // Simulate a problem writing to the git index.
7298 fs.set_error_message_for_index_write(
7299 "/dir/.git".as_ref(),
7300 Some("failed to write git index".into()),
7301 );
7302
7303 // Stage another hunk.
7304 uncommitted_diff.update(cx, |diff, cx| {
7305 let range =
7306 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7307 let hunks = diff
7308 .hunks_intersecting_range(range, &snapshot, cx)
7309 .collect::<Vec<_>>();
7310 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7311
7312 assert_hunks(
7313 diff.hunks(&snapshot, cx),
7314 &snapshot,
7315 &diff.base_text_string().unwrap(),
7316 &[
7317 (
7318 0..0,
7319 "zero\n",
7320 "",
7321 DiffHunkStatus::deleted(HasSecondaryHunk),
7322 ),
7323 (
7324 1..2,
7325 "two\n",
7326 "TWO\n",
7327 DiffHunkStatus::modified(NoSecondaryHunk),
7328 ),
7329 (
7330 3..4,
7331 "four\n",
7332 "FOUR\n",
7333 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7334 ),
7335 ],
7336 );
7337 });
7338 assert!(matches!(
7339 diff_events.next().await.unwrap(),
7340 BufferDiffEvent::HunksStagedOrUnstaged(_)
7341 ));
7342 let event = diff_events.next().await.unwrap();
7343 if let BufferDiffEvent::DiffChanged {
7344 changed_range: Some(changed_range),
7345 } = event
7346 {
7347 let changed_range = changed_range.to_point(&snapshot);
7348 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7349 } else {
7350 panic!("Unexpected event {event:?}");
7351 }
7352
7353 // When the write fails, the hunk returns to being unstaged.
7354 cx.run_until_parked();
7355 uncommitted_diff.update(cx, |diff, cx| {
7356 assert_hunks(
7357 diff.hunks(&snapshot, cx),
7358 &snapshot,
7359 &diff.base_text_string().unwrap(),
7360 &[
7361 (
7362 0..0,
7363 "zero\n",
7364 "",
7365 DiffHunkStatus::deleted(HasSecondaryHunk),
7366 ),
7367 (
7368 1..2,
7369 "two\n",
7370 "TWO\n",
7371 DiffHunkStatus::modified(NoSecondaryHunk),
7372 ),
7373 (
7374 3..4,
7375 "four\n",
7376 "FOUR\n",
7377 DiffHunkStatus::modified(HasSecondaryHunk),
7378 ),
7379 ],
7380 );
7381 });
7382
7383 let event = diff_events.next().await.unwrap();
7384 if let BufferDiffEvent::DiffChanged {
7385 changed_range: Some(changed_range),
7386 } = event
7387 {
7388 let changed_range = changed_range.to_point(&snapshot);
7389 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7390 } else {
7391 panic!("Unexpected event {event:?}");
7392 }
7393
7394 // Allow writing to the git index to succeed again.
7395 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7396
7397 // Stage two hunks with separate operations.
7398 uncommitted_diff.update(cx, |diff, cx| {
7399 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7400 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7401 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7402 });
7403
7404 // Both staged hunks appear as pending.
7405 uncommitted_diff.update(cx, |diff, cx| {
7406 assert_hunks(
7407 diff.hunks(&snapshot, cx),
7408 &snapshot,
7409 &diff.base_text_string().unwrap(),
7410 &[
7411 (
7412 0..0,
7413 "zero\n",
7414 "",
7415 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7416 ),
7417 (
7418 1..2,
7419 "two\n",
7420 "TWO\n",
7421 DiffHunkStatus::modified(NoSecondaryHunk),
7422 ),
7423 (
7424 3..4,
7425 "four\n",
7426 "FOUR\n",
7427 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7428 ),
7429 ],
7430 );
7431 });
7432
7433 // Both staging operations take effect.
7434 cx.run_until_parked();
7435 uncommitted_diff.update(cx, |diff, cx| {
7436 assert_hunks(
7437 diff.hunks(&snapshot, cx),
7438 &snapshot,
7439 &diff.base_text_string().unwrap(),
7440 &[
7441 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7442 (
7443 1..2,
7444 "two\n",
7445 "TWO\n",
7446 DiffHunkStatus::modified(NoSecondaryHunk),
7447 ),
7448 (
7449 3..4,
7450 "four\n",
7451 "FOUR\n",
7452 DiffHunkStatus::modified(NoSecondaryHunk),
7453 ),
7454 ],
7455 );
7456 });
7457}
7458
7459#[gpui::test(seeds(340, 472))]
7460async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7461 use DiffHunkSecondaryStatus::*;
7462 init_test(cx);
7463
7464 let committed_contents = r#"
7465 zero
7466 one
7467 two
7468 three
7469 four
7470 five
7471 "#
7472 .unindent();
7473 let file_contents = r#"
7474 one
7475 TWO
7476 three
7477 FOUR
7478 five
7479 "#
7480 .unindent();
7481
7482 let fs = FakeFs::new(cx.background_executor.clone());
7483 fs.insert_tree(
7484 "/dir",
7485 json!({
7486 ".git": {},
7487 "file.txt": file_contents.clone()
7488 }),
7489 )
7490 .await;
7491
7492 fs.set_head_for_repo(
7493 "/dir/.git".as_ref(),
7494 &[("file.txt".into(), committed_contents.clone())],
7495 "deadbeef",
7496 );
7497 fs.set_index_for_repo(
7498 "/dir/.git".as_ref(),
7499 &[("file.txt".into(), committed_contents.clone())],
7500 );
7501
7502 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7503
7504 let buffer = project
7505 .update(cx, |project, cx| {
7506 project.open_local_buffer("/dir/file.txt", cx)
7507 })
7508 .await
7509 .unwrap();
7510 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7511 let uncommitted_diff = project
7512 .update(cx, |project, cx| {
7513 project.open_uncommitted_diff(buffer.clone(), cx)
7514 })
7515 .await
7516 .unwrap();
7517
7518 // The hunks are initially unstaged.
7519 uncommitted_diff.read_with(cx, |diff, cx| {
7520 assert_hunks(
7521 diff.hunks(&snapshot, cx),
7522 &snapshot,
7523 &diff.base_text_string().unwrap(),
7524 &[
7525 (
7526 0..0,
7527 "zero\n",
7528 "",
7529 DiffHunkStatus::deleted(HasSecondaryHunk),
7530 ),
7531 (
7532 1..2,
7533 "two\n",
7534 "TWO\n",
7535 DiffHunkStatus::modified(HasSecondaryHunk),
7536 ),
7537 (
7538 3..4,
7539 "four\n",
7540 "FOUR\n",
7541 DiffHunkStatus::modified(HasSecondaryHunk),
7542 ),
7543 ],
7544 );
7545 });
7546
7547 // Pause IO events
7548 fs.pause_events();
7549
7550 // Stage the first hunk.
7551 uncommitted_diff.update(cx, |diff, cx| {
7552 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7553 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7554 assert_hunks(
7555 diff.hunks(&snapshot, cx),
7556 &snapshot,
7557 &diff.base_text_string().unwrap(),
7558 &[
7559 (
7560 0..0,
7561 "zero\n",
7562 "",
7563 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7564 ),
7565 (
7566 1..2,
7567 "two\n",
7568 "TWO\n",
7569 DiffHunkStatus::modified(HasSecondaryHunk),
7570 ),
7571 (
7572 3..4,
7573 "four\n",
7574 "FOUR\n",
7575 DiffHunkStatus::modified(HasSecondaryHunk),
7576 ),
7577 ],
7578 );
7579 });
7580
7581 // Stage the second hunk *before* receiving the FS event for the first hunk.
7582 cx.run_until_parked();
7583 uncommitted_diff.update(cx, |diff, cx| {
7584 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7585 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7586 assert_hunks(
7587 diff.hunks(&snapshot, cx),
7588 &snapshot,
7589 &diff.base_text_string().unwrap(),
7590 &[
7591 (
7592 0..0,
7593 "zero\n",
7594 "",
7595 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7596 ),
7597 (
7598 1..2,
7599 "two\n",
7600 "TWO\n",
7601 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7602 ),
7603 (
7604 3..4,
7605 "four\n",
7606 "FOUR\n",
7607 DiffHunkStatus::modified(HasSecondaryHunk),
7608 ),
7609 ],
7610 );
7611 });
7612
7613 // Process the FS event for staging the first hunk (second event is still pending).
7614 fs.flush_events(1);
7615 cx.run_until_parked();
7616
7617 // Stage the third hunk before receiving the second FS event.
7618 uncommitted_diff.update(cx, |diff, cx| {
7619 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7620 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7621 });
7622
7623 // Wait for all remaining IO.
7624 cx.run_until_parked();
7625 fs.flush_events(fs.buffered_event_count());
7626
7627 // Now all hunks are staged.
7628 cx.run_until_parked();
7629 uncommitted_diff.update(cx, |diff, cx| {
7630 assert_hunks(
7631 diff.hunks(&snapshot, cx),
7632 &snapshot,
7633 &diff.base_text_string().unwrap(),
7634 &[
7635 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7636 (
7637 1..2,
7638 "two\n",
7639 "TWO\n",
7640 DiffHunkStatus::modified(NoSecondaryHunk),
7641 ),
7642 (
7643 3..4,
7644 "four\n",
7645 "FOUR\n",
7646 DiffHunkStatus::modified(NoSecondaryHunk),
7647 ),
7648 ],
7649 );
7650 });
7651}
7652
7653#[gpui::test(iterations = 25)]
7654async fn test_staging_random_hunks(
7655 mut rng: StdRng,
7656 executor: BackgroundExecutor,
7657 cx: &mut gpui::TestAppContext,
7658) {
7659 let operations = env::var("OPERATIONS")
7660 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7661 .unwrap_or(20);
7662
7663 // Try to induce races between diff recalculation and index writes.
7664 if rng.gen_bool(0.5) {
7665 executor.deprioritize(*CALCULATE_DIFF_TASK);
7666 }
7667
7668 use DiffHunkSecondaryStatus::*;
7669 init_test(cx);
7670
7671 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7672 let index_text = committed_text.clone();
7673 let buffer_text = (0..30)
7674 .map(|i| match i % 5 {
7675 0 => format!("line {i} (modified)\n"),
7676 _ => format!("line {i}\n"),
7677 })
7678 .collect::<String>();
7679
7680 let fs = FakeFs::new(cx.background_executor.clone());
7681 fs.insert_tree(
7682 path!("/dir"),
7683 json!({
7684 ".git": {},
7685 "file.txt": buffer_text.clone()
7686 }),
7687 )
7688 .await;
7689 fs.set_head_for_repo(
7690 path!("/dir/.git").as_ref(),
7691 &[("file.txt".into(), committed_text.clone())],
7692 "deadbeef",
7693 );
7694 fs.set_index_for_repo(
7695 path!("/dir/.git").as_ref(),
7696 &[("file.txt".into(), index_text.clone())],
7697 );
7698 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7699
7700 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7701 let buffer = project
7702 .update(cx, |project, cx| {
7703 project.open_local_buffer(path!("/dir/file.txt"), cx)
7704 })
7705 .await
7706 .unwrap();
7707 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7708 let uncommitted_diff = project
7709 .update(cx, |project, cx| {
7710 project.open_uncommitted_diff(buffer.clone(), cx)
7711 })
7712 .await
7713 .unwrap();
7714
7715 let mut hunks =
7716 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7717 assert_eq!(hunks.len(), 6);
7718
7719 for _i in 0..operations {
7720 let hunk_ix = rng.gen_range(0..hunks.len());
7721 let hunk = &mut hunks[hunk_ix];
7722 let row = hunk.range.start.row;
7723
7724 if hunk.status().has_secondary_hunk() {
7725 log::info!("staging hunk at {row}");
7726 uncommitted_diff.update(cx, |diff, cx| {
7727 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7728 });
7729 hunk.secondary_status = SecondaryHunkRemovalPending;
7730 } else {
7731 log::info!("unstaging hunk at {row}");
7732 uncommitted_diff.update(cx, |diff, cx| {
7733 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7734 });
7735 hunk.secondary_status = SecondaryHunkAdditionPending;
7736 }
7737
7738 for _ in 0..rng.gen_range(0..10) {
7739 log::info!("yielding");
7740 cx.executor().simulate_random_delay().await;
7741 }
7742 }
7743
7744 cx.executor().run_until_parked();
7745
7746 for hunk in &mut hunks {
7747 if hunk.secondary_status == SecondaryHunkRemovalPending {
7748 hunk.secondary_status = NoSecondaryHunk;
7749 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7750 hunk.secondary_status = HasSecondaryHunk;
7751 }
7752 }
7753
7754 log::info!(
7755 "index text:\n{}",
7756 repo.load_index_text("file.txt".into()).await.unwrap()
7757 );
7758
7759 uncommitted_diff.update(cx, |diff, cx| {
7760 let expected_hunks = hunks
7761 .iter()
7762 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7763 .collect::<Vec<_>>();
7764 let actual_hunks = diff
7765 .hunks(&snapshot, cx)
7766 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7767 .collect::<Vec<_>>();
7768 assert_eq!(actual_hunks, expected_hunks);
7769 });
7770}
7771
7772#[gpui::test]
7773async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7774 init_test(cx);
7775
7776 let committed_contents = r#"
7777 fn main() {
7778 println!("hello from HEAD");
7779 }
7780 "#
7781 .unindent();
7782 let file_contents = r#"
7783 fn main() {
7784 println!("hello from the working copy");
7785 }
7786 "#
7787 .unindent();
7788
7789 let fs = FakeFs::new(cx.background_executor.clone());
7790 fs.insert_tree(
7791 "/dir",
7792 json!({
7793 ".git": {},
7794 "src": {
7795 "main.rs": file_contents,
7796 }
7797 }),
7798 )
7799 .await;
7800
7801 fs.set_head_for_repo(
7802 Path::new("/dir/.git"),
7803 &[("src/main.rs".into(), committed_contents.clone())],
7804 "deadbeef",
7805 );
7806 fs.set_index_for_repo(
7807 Path::new("/dir/.git"),
7808 &[("src/main.rs".into(), committed_contents.clone())],
7809 );
7810
7811 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7812
7813 let buffer = project
7814 .update(cx, |project, cx| {
7815 project.open_local_buffer("/dir/src/main.rs", cx)
7816 })
7817 .await
7818 .unwrap();
7819 let uncommitted_diff = project
7820 .update(cx, |project, cx| {
7821 project.open_uncommitted_diff(buffer.clone(), cx)
7822 })
7823 .await
7824 .unwrap();
7825
7826 cx.run_until_parked();
7827 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7828 let snapshot = buffer.read(cx).snapshot();
7829 assert_hunks(
7830 uncommitted_diff.hunks(&snapshot, cx),
7831 &snapshot,
7832 &uncommitted_diff.base_text_string().unwrap(),
7833 &[(
7834 1..2,
7835 " println!(\"hello from HEAD\");\n",
7836 " println!(\"hello from the working copy\");\n",
7837 DiffHunkStatus {
7838 kind: DiffHunkStatusKind::Modified,
7839 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7840 },
7841 )],
7842 );
7843 });
7844}
7845
7846#[gpui::test]
7847async fn test_repository_and_path_for_project_path(
7848 background_executor: BackgroundExecutor,
7849 cx: &mut gpui::TestAppContext,
7850) {
7851 init_test(cx);
7852 let fs = FakeFs::new(background_executor);
7853 fs.insert_tree(
7854 path!("/root"),
7855 json!({
7856 "c.txt": "",
7857 "dir1": {
7858 ".git": {},
7859 "deps": {
7860 "dep1": {
7861 ".git": {},
7862 "src": {
7863 "a.txt": ""
7864 }
7865 }
7866 },
7867 "src": {
7868 "b.txt": ""
7869 }
7870 },
7871 }),
7872 )
7873 .await;
7874
7875 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7876 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7877 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7878 project
7879 .update(cx, |project, cx| project.git_scans_complete(cx))
7880 .await;
7881 cx.run_until_parked();
7882
7883 project.read_with(cx, |project, cx| {
7884 let git_store = project.git_store().read(cx);
7885 let pairs = [
7886 ("c.txt", None),
7887 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7888 (
7889 "dir1/deps/dep1/src/a.txt",
7890 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7891 ),
7892 ];
7893 let expected = pairs
7894 .iter()
7895 .map(|(path, result)| {
7896 (
7897 path,
7898 result.map(|(repo, repo_path)| {
7899 (Path::new(repo).into(), RepoPath::from(repo_path))
7900 }),
7901 )
7902 })
7903 .collect::<Vec<_>>();
7904 let actual = pairs
7905 .iter()
7906 .map(|(path, _)| {
7907 let project_path = (tree_id, Path::new(path)).into();
7908 let result = maybe!({
7909 let (repo, repo_path) =
7910 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7911 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7912 });
7913 (path, result)
7914 })
7915 .collect::<Vec<_>>();
7916 pretty_assertions::assert_eq!(expected, actual);
7917 });
7918
7919 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7920 .await
7921 .unwrap();
7922 cx.run_until_parked();
7923
7924 project.read_with(cx, |project, cx| {
7925 let git_store = project.git_store().read(cx);
7926 assert_eq!(
7927 git_store.repository_and_path_for_project_path(
7928 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7929 cx
7930 ),
7931 None
7932 );
7933 });
7934}
7935
7936#[gpui::test]
7937async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7938 init_test(cx);
7939 let fs = FakeFs::new(cx.background_executor.clone());
7940 fs.insert_tree(
7941 path!("/root"),
7942 json!({
7943 "home": {
7944 ".git": {},
7945 "project": {
7946 "a.txt": "A"
7947 },
7948 },
7949 }),
7950 )
7951 .await;
7952 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7953
7954 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7955 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7956 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7957
7958 project
7959 .update(cx, |project, cx| project.git_scans_complete(cx))
7960 .await;
7961 tree.flush_fs_events(cx).await;
7962
7963 project.read_with(cx, |project, cx| {
7964 let containing = project
7965 .git_store()
7966 .read(cx)
7967 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7968 assert!(containing.is_none());
7969 });
7970
7971 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7972 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7973 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7974 project
7975 .update(cx, |project, cx| project.git_scans_complete(cx))
7976 .await;
7977 tree.flush_fs_events(cx).await;
7978
7979 project.read_with(cx, |project, cx| {
7980 let containing = project
7981 .git_store()
7982 .read(cx)
7983 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7984 assert_eq!(
7985 containing
7986 .unwrap()
7987 .0
7988 .read(cx)
7989 .work_directory_abs_path
7990 .as_ref(),
7991 Path::new(path!("/root/home"))
7992 );
7993 });
7994}
7995
7996#[gpui::test]
7997async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7998 init_test(cx);
7999 cx.executor().allow_parking();
8000
8001 let root = TempTree::new(json!({
8002 "project": {
8003 "a.txt": "a", // Modified
8004 "b.txt": "bb", // Added
8005 "c.txt": "ccc", // Unchanged
8006 "d.txt": "dddd", // Deleted
8007 },
8008 }));
8009
8010 // Set up git repository before creating the project.
8011 let work_dir = root.path().join("project");
8012 let repo = git_init(work_dir.as_path());
8013 git_add("a.txt", &repo);
8014 git_add("c.txt", &repo);
8015 git_add("d.txt", &repo);
8016 git_commit("Initial commit", &repo);
8017 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8018 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8019
8020 let project = Project::test(
8021 Arc::new(RealFs::new(None, cx.executor())),
8022 [root.path()],
8023 cx,
8024 )
8025 .await;
8026
8027 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8028 tree.flush_fs_events(cx).await;
8029 project
8030 .update(cx, |project, cx| project.git_scans_complete(cx))
8031 .await;
8032 cx.executor().run_until_parked();
8033
8034 let repository = project.read_with(cx, |project, cx| {
8035 project.repositories(cx).values().next().unwrap().clone()
8036 });
8037
8038 // Check that the right git state is observed on startup
8039 repository.read_with(cx, |repository, _| {
8040 let entries = repository.cached_status().collect::<Vec<_>>();
8041 assert_eq!(
8042 entries,
8043 [
8044 StatusEntry {
8045 repo_path: "a.txt".into(),
8046 status: StatusCode::Modified.worktree(),
8047 },
8048 StatusEntry {
8049 repo_path: "b.txt".into(),
8050 status: FileStatus::Untracked,
8051 },
8052 StatusEntry {
8053 repo_path: "d.txt".into(),
8054 status: StatusCode::Deleted.worktree(),
8055 },
8056 ]
8057 );
8058 });
8059
8060 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8061
8062 tree.flush_fs_events(cx).await;
8063 project
8064 .update(cx, |project, cx| project.git_scans_complete(cx))
8065 .await;
8066 cx.executor().run_until_parked();
8067
8068 repository.read_with(cx, |repository, _| {
8069 let entries = repository.cached_status().collect::<Vec<_>>();
8070 assert_eq!(
8071 entries,
8072 [
8073 StatusEntry {
8074 repo_path: "a.txt".into(),
8075 status: StatusCode::Modified.worktree(),
8076 },
8077 StatusEntry {
8078 repo_path: "b.txt".into(),
8079 status: FileStatus::Untracked,
8080 },
8081 StatusEntry {
8082 repo_path: "c.txt".into(),
8083 status: StatusCode::Modified.worktree(),
8084 },
8085 StatusEntry {
8086 repo_path: "d.txt".into(),
8087 status: StatusCode::Deleted.worktree(),
8088 },
8089 ]
8090 );
8091 });
8092
8093 git_add("a.txt", &repo);
8094 git_add("c.txt", &repo);
8095 git_remove_index(Path::new("d.txt"), &repo);
8096 git_commit("Another commit", &repo);
8097 tree.flush_fs_events(cx).await;
8098 project
8099 .update(cx, |project, cx| project.git_scans_complete(cx))
8100 .await;
8101 cx.executor().run_until_parked();
8102
8103 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8104 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8105 tree.flush_fs_events(cx).await;
8106 project
8107 .update(cx, |project, cx| project.git_scans_complete(cx))
8108 .await;
8109 cx.executor().run_until_parked();
8110
8111 repository.read_with(cx, |repository, _cx| {
8112 let entries = repository.cached_status().collect::<Vec<_>>();
8113
8114 // Deleting an untracked entry, b.txt, should leave no status
8115 // a.txt was tracked, and so should have a status
8116 assert_eq!(
8117 entries,
8118 [StatusEntry {
8119 repo_path: "a.txt".into(),
8120 status: StatusCode::Deleted.worktree(),
8121 }]
8122 );
8123 });
8124}
8125
8126#[gpui::test]
8127async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8128 init_test(cx);
8129 cx.executor().allow_parking();
8130
8131 let root = TempTree::new(json!({
8132 "project": {
8133 "sub": {},
8134 "a.txt": "",
8135 },
8136 }));
8137
8138 let work_dir = root.path().join("project");
8139 let repo = git_init(work_dir.as_path());
8140 // a.txt exists in HEAD and the working copy but is deleted in the index.
8141 git_add("a.txt", &repo);
8142 git_commit("Initial commit", &repo);
8143 git_remove_index("a.txt".as_ref(), &repo);
8144 // `sub` is a nested git repository.
8145 let _sub = git_init(&work_dir.join("sub"));
8146
8147 let project = Project::test(
8148 Arc::new(RealFs::new(None, cx.executor())),
8149 [root.path()],
8150 cx,
8151 )
8152 .await;
8153
8154 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8155 tree.flush_fs_events(cx).await;
8156 project
8157 .update(cx, |project, cx| project.git_scans_complete(cx))
8158 .await;
8159 cx.executor().run_until_parked();
8160
8161 let repository = project.read_with(cx, |project, cx| {
8162 project
8163 .repositories(cx)
8164 .values()
8165 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8166 .unwrap()
8167 .clone()
8168 });
8169
8170 repository.read_with(cx, |repository, _cx| {
8171 let entries = repository.cached_status().collect::<Vec<_>>();
8172
8173 // `sub` doesn't appear in our computed statuses.
8174 // a.txt appears with a combined `DA` status.
8175 assert_eq!(
8176 entries,
8177 [StatusEntry {
8178 repo_path: "a.txt".into(),
8179 status: TrackedStatus {
8180 index_status: StatusCode::Deleted,
8181 worktree_status: StatusCode::Added
8182 }
8183 .into(),
8184 }]
8185 )
8186 });
8187}
8188
8189#[gpui::test]
8190async fn test_repository_subfolder_git_status(
8191 executor: gpui::BackgroundExecutor,
8192 cx: &mut gpui::TestAppContext,
8193) {
8194 init_test(cx);
8195
8196 let fs = FakeFs::new(executor);
8197 fs.insert_tree(
8198 path!("/root"),
8199 json!({
8200 "my-repo": {
8201 ".git": {},
8202 "a.txt": "a",
8203 "sub-folder-1": {
8204 "sub-folder-2": {
8205 "c.txt": "cc",
8206 "d": {
8207 "e.txt": "eee"
8208 }
8209 },
8210 }
8211 },
8212 }),
8213 )
8214 .await;
8215
8216 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8217 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8218
8219 fs.set_status_for_repo(
8220 path!("/root/my-repo/.git").as_ref(),
8221 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8222 );
8223
8224 let project = Project::test(
8225 fs.clone(),
8226 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8227 cx,
8228 )
8229 .await;
8230
8231 project
8232 .update(cx, |project, cx| project.git_scans_complete(cx))
8233 .await;
8234 cx.run_until_parked();
8235
8236 let repository = project.read_with(cx, |project, cx| {
8237 project.repositories(cx).values().next().unwrap().clone()
8238 });
8239
8240 // Ensure that the git status is loaded correctly
8241 repository.read_with(cx, |repository, _cx| {
8242 assert_eq!(
8243 repository.work_directory_abs_path,
8244 Path::new(path!("/root/my-repo")).into()
8245 );
8246
8247 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8248 assert_eq!(
8249 repository.status_for_path(&E_TXT.into()).unwrap().status,
8250 FileStatus::Untracked
8251 );
8252 });
8253
8254 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8255 project
8256 .update(cx, |project, cx| project.git_scans_complete(cx))
8257 .await;
8258 cx.run_until_parked();
8259
8260 repository.read_with(cx, |repository, _cx| {
8261 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8262 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8263 });
8264}
8265
8266// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8267#[cfg(any())]
8268#[gpui::test]
8269async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8270 init_test(cx);
8271 cx.executor().allow_parking();
8272
8273 let root = TempTree::new(json!({
8274 "project": {
8275 "a.txt": "a",
8276 },
8277 }));
8278 let root_path = root.path();
8279
8280 let repo = git_init(&root_path.join("project"));
8281 git_add("a.txt", &repo);
8282 git_commit("init", &repo);
8283
8284 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8285
8286 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8287 tree.flush_fs_events(cx).await;
8288 project
8289 .update(cx, |project, cx| project.git_scans_complete(cx))
8290 .await;
8291 cx.executor().run_until_parked();
8292
8293 let repository = project.read_with(cx, |project, cx| {
8294 project.repositories(cx).values().next().unwrap().clone()
8295 });
8296
8297 git_branch("other-branch", &repo);
8298 git_checkout("refs/heads/other-branch", &repo);
8299 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8300 git_add("a.txt", &repo);
8301 git_commit("capitalize", &repo);
8302 let commit = repo
8303 .head()
8304 .expect("Failed to get HEAD")
8305 .peel_to_commit()
8306 .expect("HEAD is not a commit");
8307 git_checkout("refs/heads/main", &repo);
8308 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8309 git_add("a.txt", &repo);
8310 git_commit("improve letter", &repo);
8311 git_cherry_pick(&commit, &repo);
8312 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8313 .expect("No CHERRY_PICK_HEAD");
8314 pretty_assertions::assert_eq!(
8315 git_status(&repo),
8316 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8317 );
8318 tree.flush_fs_events(cx).await;
8319 project
8320 .update(cx, |project, cx| project.git_scans_complete(cx))
8321 .await;
8322 cx.executor().run_until_parked();
8323 let conflicts = repository.update(cx, |repository, _| {
8324 repository
8325 .merge_conflicts
8326 .iter()
8327 .cloned()
8328 .collect::<Vec<_>>()
8329 });
8330 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8331
8332 git_add("a.txt", &repo);
8333 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8334 git_commit("whatevs", &repo);
8335 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8336 .expect("Failed to remove CHERRY_PICK_HEAD");
8337 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8338 tree.flush_fs_events(cx).await;
8339 let conflicts = repository.update(cx, |repository, _| {
8340 repository
8341 .merge_conflicts
8342 .iter()
8343 .cloned()
8344 .collect::<Vec<_>>()
8345 });
8346 pretty_assertions::assert_eq!(conflicts, []);
8347}
8348
8349#[gpui::test]
8350async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8351 init_test(cx);
8352 let fs = FakeFs::new(cx.background_executor.clone());
8353 fs.insert_tree(
8354 path!("/root"),
8355 json!({
8356 ".git": {},
8357 ".gitignore": "*.txt\n",
8358 "a.xml": "<a></a>",
8359 "b.txt": "Some text"
8360 }),
8361 )
8362 .await;
8363
8364 fs.set_head_and_index_for_repo(
8365 path!("/root/.git").as_ref(),
8366 &[
8367 (".gitignore".into(), "*.txt\n".into()),
8368 ("a.xml".into(), "<a></a>".into()),
8369 ],
8370 );
8371
8372 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8373
8374 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8375 tree.flush_fs_events(cx).await;
8376 project
8377 .update(cx, |project, cx| project.git_scans_complete(cx))
8378 .await;
8379 cx.executor().run_until_parked();
8380
8381 let repository = project.read_with(cx, |project, cx| {
8382 project.repositories(cx).values().next().unwrap().clone()
8383 });
8384
8385 // One file is unmodified, the other is ignored.
8386 cx.read(|cx| {
8387 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8388 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8389 });
8390
8391 // Change the gitignore, and stage the newly non-ignored file.
8392 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8393 .await
8394 .unwrap();
8395 fs.set_index_for_repo(
8396 Path::new(path!("/root/.git")),
8397 &[
8398 (".gitignore".into(), "*.txt\n".into()),
8399 ("a.xml".into(), "<a></a>".into()),
8400 ("b.txt".into(), "Some text".into()),
8401 ],
8402 );
8403
8404 cx.executor().run_until_parked();
8405 cx.read(|cx| {
8406 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8407 assert_entry_git_state(
8408 tree.read(cx),
8409 repository.read(cx),
8410 "b.txt",
8411 Some(StatusCode::Added),
8412 false,
8413 );
8414 });
8415}
8416
8417// NOTE:
8418// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8419// a directory which some program has already open.
8420// This is a limitation of the Windows.
8421// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8422#[gpui::test]
8423#[cfg_attr(target_os = "windows", ignore)]
8424async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8425 init_test(cx);
8426 cx.executor().allow_parking();
8427 let root = TempTree::new(json!({
8428 "projects": {
8429 "project1": {
8430 "a": "",
8431 "b": "",
8432 }
8433 },
8434
8435 }));
8436 let root_path = root.path();
8437
8438 let repo = git_init(&root_path.join("projects/project1"));
8439 git_add("a", &repo);
8440 git_commit("init", &repo);
8441 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8442
8443 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8444
8445 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8446 tree.flush_fs_events(cx).await;
8447 project
8448 .update(cx, |project, cx| project.git_scans_complete(cx))
8449 .await;
8450 cx.executor().run_until_parked();
8451
8452 let repository = project.read_with(cx, |project, cx| {
8453 project.repositories(cx).values().next().unwrap().clone()
8454 });
8455
8456 repository.read_with(cx, |repository, _| {
8457 assert_eq!(
8458 repository.work_directory_abs_path.as_ref(),
8459 root_path.join("projects/project1").as_path()
8460 );
8461 assert_eq!(
8462 repository
8463 .status_for_path(&"a".into())
8464 .map(|entry| entry.status),
8465 Some(StatusCode::Modified.worktree()),
8466 );
8467 assert_eq!(
8468 repository
8469 .status_for_path(&"b".into())
8470 .map(|entry| entry.status),
8471 Some(FileStatus::Untracked),
8472 );
8473 });
8474
8475 std::fs::rename(
8476 root_path.join("projects/project1"),
8477 root_path.join("projects/project2"),
8478 )
8479 .unwrap();
8480 tree.flush_fs_events(cx).await;
8481
8482 repository.read_with(cx, |repository, _| {
8483 assert_eq!(
8484 repository.work_directory_abs_path.as_ref(),
8485 root_path.join("projects/project2").as_path()
8486 );
8487 assert_eq!(
8488 repository.status_for_path(&"a".into()).unwrap().status,
8489 StatusCode::Modified.worktree(),
8490 );
8491 assert_eq!(
8492 repository.status_for_path(&"b".into()).unwrap().status,
8493 FileStatus::Untracked,
8494 );
8495 });
8496}
8497
8498// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8499// you can't rename a directory which some program has already open. This is a
8500// limitation of the Windows. See:
8501// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8502#[gpui::test]
8503#[cfg_attr(target_os = "windows", ignore)]
8504async fn test_file_status(cx: &mut gpui::TestAppContext) {
8505 init_test(cx);
8506 cx.executor().allow_parking();
8507 const IGNORE_RULE: &str = "**/target";
8508
8509 let root = TempTree::new(json!({
8510 "project": {
8511 "a.txt": "a",
8512 "b.txt": "bb",
8513 "c": {
8514 "d": {
8515 "e.txt": "eee"
8516 }
8517 },
8518 "f.txt": "ffff",
8519 "target": {
8520 "build_file": "???"
8521 },
8522 ".gitignore": IGNORE_RULE
8523 },
8524
8525 }));
8526 let root_path = root.path();
8527
8528 const A_TXT: &str = "a.txt";
8529 const B_TXT: &str = "b.txt";
8530 const E_TXT: &str = "c/d/e.txt";
8531 const F_TXT: &str = "f.txt";
8532 const DOTGITIGNORE: &str = ".gitignore";
8533 const BUILD_FILE: &str = "target/build_file";
8534
8535 // Set up git repository before creating the worktree.
8536 let work_dir = root.path().join("project");
8537 let mut repo = git_init(work_dir.as_path());
8538 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8539 git_add(A_TXT, &repo);
8540 git_add(E_TXT, &repo);
8541 git_add(DOTGITIGNORE, &repo);
8542 git_commit("Initial commit", &repo);
8543
8544 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8545
8546 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8547 tree.flush_fs_events(cx).await;
8548 project
8549 .update(cx, |project, cx| project.git_scans_complete(cx))
8550 .await;
8551 cx.executor().run_until_parked();
8552
8553 let repository = project.read_with(cx, |project, cx| {
8554 project.repositories(cx).values().next().unwrap().clone()
8555 });
8556
8557 // Check that the right git state is observed on startup
8558 repository.read_with(cx, |repository, _cx| {
8559 assert_eq!(
8560 repository.work_directory_abs_path.as_ref(),
8561 root_path.join("project").as_path()
8562 );
8563
8564 assert_eq!(
8565 repository.status_for_path(&B_TXT.into()).unwrap().status,
8566 FileStatus::Untracked,
8567 );
8568 assert_eq!(
8569 repository.status_for_path(&F_TXT.into()).unwrap().status,
8570 FileStatus::Untracked,
8571 );
8572 });
8573
8574 // Modify a file in the working copy.
8575 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8576 tree.flush_fs_events(cx).await;
8577 project
8578 .update(cx, |project, cx| project.git_scans_complete(cx))
8579 .await;
8580 cx.executor().run_until_parked();
8581
8582 // The worktree detects that the file's git status has changed.
8583 repository.read_with(cx, |repository, _| {
8584 assert_eq!(
8585 repository.status_for_path(&A_TXT.into()).unwrap().status,
8586 StatusCode::Modified.worktree(),
8587 );
8588 });
8589
8590 // Create a commit in the git repository.
8591 git_add(A_TXT, &repo);
8592 git_add(B_TXT, &repo);
8593 git_commit("Committing modified and added", &repo);
8594 tree.flush_fs_events(cx).await;
8595 project
8596 .update(cx, |project, cx| project.git_scans_complete(cx))
8597 .await;
8598 cx.executor().run_until_parked();
8599
8600 // The worktree detects that the files' git status have changed.
8601 repository.read_with(cx, |repository, _cx| {
8602 assert_eq!(
8603 repository.status_for_path(&F_TXT.into()).unwrap().status,
8604 FileStatus::Untracked,
8605 );
8606 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8607 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8608 });
8609
8610 // Modify files in the working copy and perform git operations on other files.
8611 git_reset(0, &repo);
8612 git_remove_index(Path::new(B_TXT), &repo);
8613 git_stash(&mut repo);
8614 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8615 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8616 tree.flush_fs_events(cx).await;
8617 project
8618 .update(cx, |project, cx| project.git_scans_complete(cx))
8619 .await;
8620 cx.executor().run_until_parked();
8621
8622 // Check that more complex repo changes are tracked
8623 repository.read_with(cx, |repository, _cx| {
8624 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8625 assert_eq!(
8626 repository.status_for_path(&B_TXT.into()).unwrap().status,
8627 FileStatus::Untracked,
8628 );
8629 assert_eq!(
8630 repository.status_for_path(&E_TXT.into()).unwrap().status,
8631 StatusCode::Modified.worktree(),
8632 );
8633 });
8634
8635 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8636 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8637 std::fs::write(
8638 work_dir.join(DOTGITIGNORE),
8639 [IGNORE_RULE, "f.txt"].join("\n"),
8640 )
8641 .unwrap();
8642
8643 git_add(Path::new(DOTGITIGNORE), &repo);
8644 git_commit("Committing modified git ignore", &repo);
8645
8646 tree.flush_fs_events(cx).await;
8647 cx.executor().run_until_parked();
8648
8649 let mut renamed_dir_name = "first_directory/second_directory";
8650 const RENAMED_FILE: &str = "rf.txt";
8651
8652 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8653 std::fs::write(
8654 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8655 "new-contents",
8656 )
8657 .unwrap();
8658
8659 tree.flush_fs_events(cx).await;
8660 project
8661 .update(cx, |project, cx| project.git_scans_complete(cx))
8662 .await;
8663 cx.executor().run_until_parked();
8664
8665 repository.read_with(cx, |repository, _cx| {
8666 assert_eq!(
8667 repository
8668 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8669 .unwrap()
8670 .status,
8671 FileStatus::Untracked,
8672 );
8673 });
8674
8675 renamed_dir_name = "new_first_directory/second_directory";
8676
8677 std::fs::rename(
8678 work_dir.join("first_directory"),
8679 work_dir.join("new_first_directory"),
8680 )
8681 .unwrap();
8682
8683 tree.flush_fs_events(cx).await;
8684 project
8685 .update(cx, |project, cx| project.git_scans_complete(cx))
8686 .await;
8687 cx.executor().run_until_parked();
8688
8689 repository.read_with(cx, |repository, _cx| {
8690 assert_eq!(
8691 repository
8692 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8693 .unwrap()
8694 .status,
8695 FileStatus::Untracked,
8696 );
8697 });
8698}
8699
8700#[gpui::test]
8701async fn test_repos_in_invisible_worktrees(
8702 executor: BackgroundExecutor,
8703 cx: &mut gpui::TestAppContext,
8704) {
8705 init_test(cx);
8706 let fs = FakeFs::new(executor);
8707 fs.insert_tree(
8708 path!("/root"),
8709 json!({
8710 "dir1": {
8711 ".git": {},
8712 "dep1": {
8713 ".git": {},
8714 "src": {
8715 "a.txt": "",
8716 },
8717 },
8718 "b.txt": "",
8719 },
8720 }),
8721 )
8722 .await;
8723
8724 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8725 let _visible_worktree =
8726 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8727 project
8728 .update(cx, |project, cx| project.git_scans_complete(cx))
8729 .await;
8730
8731 let repos = project.read_with(cx, |project, cx| {
8732 project
8733 .repositories(cx)
8734 .values()
8735 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8736 .collect::<Vec<_>>()
8737 });
8738 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8739
8740 let (_invisible_worktree, _) = project
8741 .update(cx, |project, cx| {
8742 project.worktree_store.update(cx, |worktree_store, cx| {
8743 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8744 })
8745 })
8746 .await
8747 .expect("failed to create worktree");
8748 project
8749 .update(cx, |project, cx| project.git_scans_complete(cx))
8750 .await;
8751
8752 let repos = project.read_with(cx, |project, cx| {
8753 project
8754 .repositories(cx)
8755 .values()
8756 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8757 .collect::<Vec<_>>()
8758 });
8759 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8760}
8761
8762#[gpui::test(iterations = 10)]
8763async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8764 init_test(cx);
8765 cx.update(|cx| {
8766 cx.update_global::<SettingsStore, _>(|store, cx| {
8767 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8768 project_settings.file_scan_exclusions = Some(Vec::new());
8769 });
8770 });
8771 });
8772 let fs = FakeFs::new(cx.background_executor.clone());
8773 fs.insert_tree(
8774 path!("/root"),
8775 json!({
8776 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8777 "tree": {
8778 ".git": {},
8779 ".gitignore": "ignored-dir\n",
8780 "tracked-dir": {
8781 "tracked-file1": "",
8782 "ancestor-ignored-file1": "",
8783 },
8784 "ignored-dir": {
8785 "ignored-file1": ""
8786 }
8787 }
8788 }),
8789 )
8790 .await;
8791 fs.set_head_and_index_for_repo(
8792 path!("/root/tree/.git").as_ref(),
8793 &[
8794 (".gitignore".into(), "ignored-dir\n".into()),
8795 ("tracked-dir/tracked-file1".into(), "".into()),
8796 ],
8797 );
8798
8799 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8800
8801 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8802 tree.flush_fs_events(cx).await;
8803 project
8804 .update(cx, |project, cx| project.git_scans_complete(cx))
8805 .await;
8806 cx.executor().run_until_parked();
8807
8808 let repository = project.read_with(cx, |project, cx| {
8809 project.repositories(cx).values().next().unwrap().clone()
8810 });
8811
8812 tree.read_with(cx, |tree, _| {
8813 tree.as_local()
8814 .unwrap()
8815 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8816 })
8817 .recv()
8818 .await;
8819
8820 cx.read(|cx| {
8821 assert_entry_git_state(
8822 tree.read(cx),
8823 repository.read(cx),
8824 "tracked-dir/tracked-file1",
8825 None,
8826 false,
8827 );
8828 assert_entry_git_state(
8829 tree.read(cx),
8830 repository.read(cx),
8831 "tracked-dir/ancestor-ignored-file1",
8832 None,
8833 false,
8834 );
8835 assert_entry_git_state(
8836 tree.read(cx),
8837 repository.read(cx),
8838 "ignored-dir/ignored-file1",
8839 None,
8840 true,
8841 );
8842 });
8843
8844 fs.create_file(
8845 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8846 Default::default(),
8847 )
8848 .await
8849 .unwrap();
8850 fs.set_index_for_repo(
8851 path!("/root/tree/.git").as_ref(),
8852 &[
8853 (".gitignore".into(), "ignored-dir\n".into()),
8854 ("tracked-dir/tracked-file1".into(), "".into()),
8855 ("tracked-dir/tracked-file2".into(), "".into()),
8856 ],
8857 );
8858 fs.create_file(
8859 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8860 Default::default(),
8861 )
8862 .await
8863 .unwrap();
8864 fs.create_file(
8865 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8866 Default::default(),
8867 )
8868 .await
8869 .unwrap();
8870
8871 cx.executor().run_until_parked();
8872 cx.read(|cx| {
8873 assert_entry_git_state(
8874 tree.read(cx),
8875 repository.read(cx),
8876 "tracked-dir/tracked-file2",
8877 Some(StatusCode::Added),
8878 false,
8879 );
8880 assert_entry_git_state(
8881 tree.read(cx),
8882 repository.read(cx),
8883 "tracked-dir/ancestor-ignored-file2",
8884 None,
8885 false,
8886 );
8887 assert_entry_git_state(
8888 tree.read(cx),
8889 repository.read(cx),
8890 "ignored-dir/ignored-file2",
8891 None,
8892 true,
8893 );
8894 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8895 });
8896}
8897
8898#[gpui::test]
8899async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8900 init_test(cx);
8901
8902 let fs = FakeFs::new(cx.executor());
8903 fs.insert_tree(
8904 path!("/project"),
8905 json!({
8906 ".git": {
8907 "worktrees": {
8908 "some-worktree": {
8909 "commondir": "../..\n",
8910 // For is_git_dir
8911 "HEAD": "",
8912 "config": ""
8913 }
8914 },
8915 "modules": {
8916 "subdir": {
8917 "some-submodule": {
8918 // For is_git_dir
8919 "HEAD": "",
8920 "config": "",
8921 }
8922 }
8923 }
8924 },
8925 "src": {
8926 "a.txt": "A",
8927 },
8928 "some-worktree": {
8929 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8930 "src": {
8931 "b.txt": "B",
8932 }
8933 },
8934 "subdir": {
8935 "some-submodule": {
8936 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8937 "c.txt": "C",
8938 }
8939 }
8940 }),
8941 )
8942 .await;
8943
8944 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8945 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8946 scan_complete.await;
8947
8948 let mut repositories = project.update(cx, |project, cx| {
8949 project
8950 .repositories(cx)
8951 .values()
8952 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8953 .collect::<Vec<_>>()
8954 });
8955 repositories.sort();
8956 pretty_assertions::assert_eq!(
8957 repositories,
8958 [
8959 Path::new(path!("/project")).into(),
8960 Path::new(path!("/project/some-worktree")).into(),
8961 Path::new(path!("/project/subdir/some-submodule")).into(),
8962 ]
8963 );
8964
8965 // Generate a git-related event for the worktree and check that it's refreshed.
8966 fs.with_git_state(
8967 path!("/project/some-worktree/.git").as_ref(),
8968 true,
8969 |state| {
8970 state
8971 .head_contents
8972 .insert("src/b.txt".into(), "b".to_owned());
8973 state
8974 .index_contents
8975 .insert("src/b.txt".into(), "b".to_owned());
8976 },
8977 )
8978 .unwrap();
8979 cx.run_until_parked();
8980
8981 let buffer = project
8982 .update(cx, |project, cx| {
8983 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8984 })
8985 .await
8986 .unwrap();
8987 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8988 let (repo, _) = project
8989 .git_store()
8990 .read(cx)
8991 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8992 .unwrap();
8993 pretty_assertions::assert_eq!(
8994 repo.read(cx).work_directory_abs_path,
8995 Path::new(path!("/project/some-worktree")).into(),
8996 );
8997 let barrier = repo.update(cx, |repo, _| repo.barrier());
8998 (repo.clone(), barrier)
8999 });
9000 barrier.await.unwrap();
9001 worktree_repo.update(cx, |repo, _| {
9002 pretty_assertions::assert_eq!(
9003 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
9004 StatusCode::Modified.worktree(),
9005 );
9006 });
9007
9008 // The same for the submodule.
9009 fs.with_git_state(
9010 path!("/project/subdir/some-submodule/.git").as_ref(),
9011 true,
9012 |state| {
9013 state.head_contents.insert("c.txt".into(), "c".to_owned());
9014 state.index_contents.insert("c.txt".into(), "c".to_owned());
9015 },
9016 )
9017 .unwrap();
9018 cx.run_until_parked();
9019
9020 let buffer = project
9021 .update(cx, |project, cx| {
9022 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9023 })
9024 .await
9025 .unwrap();
9026 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9027 let (repo, _) = project
9028 .git_store()
9029 .read(cx)
9030 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9031 .unwrap();
9032 pretty_assertions::assert_eq!(
9033 repo.read(cx).work_directory_abs_path,
9034 Path::new(path!("/project/subdir/some-submodule")).into(),
9035 );
9036 let barrier = repo.update(cx, |repo, _| repo.barrier());
9037 (repo.clone(), barrier)
9038 });
9039 barrier.await.unwrap();
9040 submodule_repo.update(cx, |repo, _| {
9041 pretty_assertions::assert_eq!(
9042 repo.status_for_path(&"c.txt".into()).unwrap().status,
9043 StatusCode::Modified.worktree(),
9044 );
9045 });
9046}
9047
9048#[gpui::test]
9049async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9050 init_test(cx);
9051 let fs = FakeFs::new(cx.background_executor.clone());
9052 fs.insert_tree(
9053 path!("/root"),
9054 json!({
9055 "project": {
9056 ".git": {},
9057 "child1": {
9058 "a.txt": "A",
9059 },
9060 "child2": {
9061 "b.txt": "B",
9062 }
9063 }
9064 }),
9065 )
9066 .await;
9067
9068 let project = Project::test(
9069 fs.clone(),
9070 [
9071 path!("/root/project/child1").as_ref(),
9072 path!("/root/project/child2").as_ref(),
9073 ],
9074 cx,
9075 )
9076 .await;
9077
9078 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9079 tree.flush_fs_events(cx).await;
9080 project
9081 .update(cx, |project, cx| project.git_scans_complete(cx))
9082 .await;
9083 cx.executor().run_until_parked();
9084
9085 let repos = project.read_with(cx, |project, cx| {
9086 project
9087 .repositories(cx)
9088 .values()
9089 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9090 .collect::<Vec<_>>()
9091 });
9092 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9093}
9094
9095async fn search(
9096 project: &Entity<Project>,
9097 query: SearchQuery,
9098 cx: &mut gpui::TestAppContext,
9099) -> Result<HashMap<String, Vec<Range<usize>>>> {
9100 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9101 let mut results = HashMap::default();
9102 while let Ok(search_result) = search_rx.recv().await {
9103 match search_result {
9104 SearchResult::Buffer { buffer, ranges } => {
9105 results.entry(buffer).or_insert(ranges);
9106 }
9107 SearchResult::LimitReached => {}
9108 }
9109 }
9110 Ok(results
9111 .into_iter()
9112 .map(|(buffer, ranges)| {
9113 buffer.update(cx, |buffer, cx| {
9114 let path = buffer
9115 .file()
9116 .unwrap()
9117 .full_path(cx)
9118 .to_string_lossy()
9119 .to_string();
9120 let ranges = ranges
9121 .into_iter()
9122 .map(|range| range.to_offset(buffer))
9123 .collect::<Vec<_>>();
9124 (path, ranges)
9125 })
9126 })
9127 .collect())
9128}
9129
9130pub fn init_test(cx: &mut gpui::TestAppContext) {
9131 zlog::init_test();
9132
9133 cx.update(|cx| {
9134 let settings_store = SettingsStore::test(cx);
9135 cx.set_global(settings_store);
9136 release_channel::init(SemanticVersion::default(), cx);
9137 language::init(cx);
9138 Project::init_settings(cx);
9139 });
9140}
9141
9142fn json_lang() -> Arc<Language> {
9143 Arc::new(Language::new(
9144 LanguageConfig {
9145 name: "JSON".into(),
9146 matcher: LanguageMatcher {
9147 path_suffixes: vec!["json".to_string()],
9148 ..Default::default()
9149 },
9150 ..Default::default()
9151 },
9152 None,
9153 ))
9154}
9155
9156fn js_lang() -> Arc<Language> {
9157 Arc::new(Language::new(
9158 LanguageConfig {
9159 name: "JavaScript".into(),
9160 matcher: LanguageMatcher {
9161 path_suffixes: vec!["js".to_string()],
9162 ..Default::default()
9163 },
9164 ..Default::default()
9165 },
9166 None,
9167 ))
9168}
9169
9170fn rust_lang() -> Arc<Language> {
9171 Arc::new(Language::new(
9172 LanguageConfig {
9173 name: "Rust".into(),
9174 matcher: LanguageMatcher {
9175 path_suffixes: vec!["rs".to_string()],
9176 ..Default::default()
9177 },
9178 ..Default::default()
9179 },
9180 Some(tree_sitter_rust::LANGUAGE.into()),
9181 ))
9182}
9183
9184fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9185 struct PythonMootToolchainLister(Arc<FakeFs>);
9186 #[async_trait]
9187 impl ToolchainLister for PythonMootToolchainLister {
9188 async fn list(
9189 &self,
9190 worktree_root: PathBuf,
9191 subroot_relative_path: Arc<Path>,
9192 _: Option<HashMap<String, String>>,
9193 ) -> ToolchainList {
9194 // This lister will always return a path .venv directories within ancestors
9195 let ancestors = subroot_relative_path
9196 .ancestors()
9197 .map(ToOwned::to_owned)
9198 .collect::<Vec<_>>();
9199 let mut toolchains = vec![];
9200 for ancestor in ancestors {
9201 let venv_path = worktree_root.join(ancestor).join(".venv");
9202 if self.0.is_dir(&venv_path).await {
9203 toolchains.push(Toolchain {
9204 name: SharedString::new("Python Venv"),
9205 path: venv_path.to_string_lossy().into_owned().into(),
9206 language_name: LanguageName(SharedString::new_static("Python")),
9207 as_json: serde_json::Value::Null,
9208 })
9209 }
9210 }
9211 ToolchainList {
9212 toolchains,
9213 ..Default::default()
9214 }
9215 }
9216 // Returns a term which we should use in UI to refer to a toolchain.
9217 fn term(&self) -> SharedString {
9218 SharedString::new_static("virtual environment")
9219 }
9220 /// Returns the name of the manifest file for this toolchain.
9221 fn manifest_name(&self) -> ManifestName {
9222 SharedString::new_static("pyproject.toml").into()
9223 }
9224 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9225 vec![]
9226 }
9227 }
9228 Arc::new(
9229 Language::new(
9230 LanguageConfig {
9231 name: "Python".into(),
9232 matcher: LanguageMatcher {
9233 path_suffixes: vec!["py".to_string()],
9234 ..Default::default()
9235 },
9236 ..Default::default()
9237 },
9238 None, // We're not testing Python parsing with this language.
9239 )
9240 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9241 "pyproject.toml",
9242 ))))
9243 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9244 )
9245}
9246
9247fn typescript_lang() -> Arc<Language> {
9248 Arc::new(Language::new(
9249 LanguageConfig {
9250 name: "TypeScript".into(),
9251 matcher: LanguageMatcher {
9252 path_suffixes: vec!["ts".to_string()],
9253 ..Default::default()
9254 },
9255 ..Default::default()
9256 },
9257 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9258 ))
9259}
9260
9261fn tsx_lang() -> Arc<Language> {
9262 Arc::new(Language::new(
9263 LanguageConfig {
9264 name: "tsx".into(),
9265 matcher: LanguageMatcher {
9266 path_suffixes: vec!["tsx".to_string()],
9267 ..Default::default()
9268 },
9269 ..Default::default()
9270 },
9271 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9272 ))
9273}
9274
9275fn get_all_tasks(
9276 project: &Entity<Project>,
9277 task_contexts: Arc<TaskContexts>,
9278 cx: &mut App,
9279) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9280 let new_tasks = project.update(cx, |project, cx| {
9281 project.task_store.update(cx, |task_store, cx| {
9282 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9283 this.used_and_current_resolved_tasks(task_contexts, cx)
9284 })
9285 })
9286 });
9287
9288 cx.background_spawn(async move {
9289 let (mut old, new) = new_tasks.await;
9290 old.extend(new);
9291 old
9292 })
9293}
9294
9295#[track_caller]
9296fn assert_entry_git_state(
9297 tree: &Worktree,
9298 repository: &Repository,
9299 path: &str,
9300 index_status: Option<StatusCode>,
9301 is_ignored: bool,
9302) {
9303 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9304 let entry = tree
9305 .entry_for_path(path)
9306 .unwrap_or_else(|| panic!("entry {path} not found"));
9307 let status = repository
9308 .status_for_path(&path.into())
9309 .map(|entry| entry.status);
9310 let expected = index_status.map(|index_status| {
9311 TrackedStatus {
9312 index_status,
9313 worktree_status: StatusCode::Unmodified,
9314 }
9315 .into()
9316 });
9317 assert_eq!(
9318 status, expected,
9319 "expected {path} to have git status: {expected:?}"
9320 );
9321 assert_eq!(
9322 entry.is_ignored, is_ignored,
9323 "expected {path} to have is_ignored: {is_ignored}"
9324 );
9325}
9326
9327#[track_caller]
9328fn git_init(path: &Path) -> git2::Repository {
9329 let mut init_opts = RepositoryInitOptions::new();
9330 init_opts.initial_head("main");
9331 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9332}
9333
9334#[track_caller]
9335fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9336 let path = path.as_ref();
9337 let mut index = repo.index().expect("Failed to get index");
9338 index.add_path(path).expect("Failed to add file");
9339 index.write().expect("Failed to write index");
9340}
9341
9342#[track_caller]
9343fn git_remove_index(path: &Path, repo: &git2::Repository) {
9344 let mut index = repo.index().expect("Failed to get index");
9345 index.remove_path(path).expect("Failed to add file");
9346 index.write().expect("Failed to write index");
9347}
9348
9349#[track_caller]
9350fn git_commit(msg: &'static str, repo: &git2::Repository) {
9351 use git2::Signature;
9352
9353 let signature = Signature::now("test", "test@zed.dev").unwrap();
9354 let oid = repo.index().unwrap().write_tree().unwrap();
9355 let tree = repo.find_tree(oid).unwrap();
9356 if let Ok(head) = repo.head() {
9357 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9358
9359 let parent_commit = parent_obj.as_commit().unwrap();
9360
9361 repo.commit(
9362 Some("HEAD"),
9363 &signature,
9364 &signature,
9365 msg,
9366 &tree,
9367 &[parent_commit],
9368 )
9369 .expect("Failed to commit with parent");
9370 } else {
9371 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9372 .expect("Failed to commit");
9373 }
9374}
9375
9376#[cfg(any())]
9377#[track_caller]
9378fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9379 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9380}
9381
9382#[track_caller]
9383fn git_stash(repo: &mut git2::Repository) {
9384 use git2::Signature;
9385
9386 let signature = Signature::now("test", "test@zed.dev").unwrap();
9387 repo.stash_save(&signature, "N/A", None)
9388 .expect("Failed to stash");
9389}
9390
9391#[track_caller]
9392fn git_reset(offset: usize, repo: &git2::Repository) {
9393 let head = repo.head().expect("Couldn't get repo head");
9394 let object = head.peel(git2::ObjectType::Commit).unwrap();
9395 let commit = object.as_commit().unwrap();
9396 let new_head = commit
9397 .parents()
9398 .inspect(|parnet| {
9399 parnet.message();
9400 })
9401 .nth(offset)
9402 .expect("Not enough history");
9403 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9404 .expect("Could not reset");
9405}
9406
9407#[cfg(any())]
9408#[track_caller]
9409fn git_branch(name: &str, repo: &git2::Repository) {
9410 let head = repo
9411 .head()
9412 .expect("Couldn't get repo head")
9413 .peel_to_commit()
9414 .expect("HEAD is not a commit");
9415 repo.branch(name, &head, false).expect("Failed to commit");
9416}
9417
9418#[cfg(any())]
9419#[track_caller]
9420fn git_checkout(name: &str, repo: &git2::Repository) {
9421 repo.set_head(name).expect("Failed to set head");
9422 repo.checkout_head(None).expect("Failed to check out head");
9423}
9424
9425#[cfg(any())]
9426#[track_caller]
9427fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9428 repo.statuses(None)
9429 .unwrap()
9430 .iter()
9431 .map(|status| (status.path().unwrap().to_string(), status.status()))
9432 .collect()
9433}
9434
9435#[gpui::test]
9436async fn test_find_project_path_abs(
9437 background_executor: BackgroundExecutor,
9438 cx: &mut gpui::TestAppContext,
9439) {
9440 // find_project_path should work with absolute paths
9441 init_test(cx);
9442
9443 let fs = FakeFs::new(background_executor);
9444 fs.insert_tree(
9445 path!("/root"),
9446 json!({
9447 "project1": {
9448 "file1.txt": "content1",
9449 "subdir": {
9450 "file2.txt": "content2"
9451 }
9452 },
9453 "project2": {
9454 "file3.txt": "content3"
9455 }
9456 }),
9457 )
9458 .await;
9459
9460 let project = Project::test(
9461 fs.clone(),
9462 [
9463 path!("/root/project1").as_ref(),
9464 path!("/root/project2").as_ref(),
9465 ],
9466 cx,
9467 )
9468 .await;
9469
9470 // Make sure the worktrees are fully initialized
9471 project
9472 .update(cx, |project, cx| project.git_scans_complete(cx))
9473 .await;
9474 cx.run_until_parked();
9475
9476 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9477 project.read_with(cx, |project, cx| {
9478 let worktrees: Vec<_> = project.worktrees(cx).collect();
9479 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9480 let id1 = worktrees[0].read(cx).id();
9481 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9482 let id2 = worktrees[1].read(cx).id();
9483 (abs_path1, id1, abs_path2, id2)
9484 });
9485
9486 project.update(cx, |project, cx| {
9487 let abs_path = project1_abs_path.join("file1.txt");
9488 let found_path = project.find_project_path(abs_path, cx).unwrap();
9489 assert_eq!(found_path.worktree_id, project1_id);
9490 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9491
9492 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9493 let found_path = project.find_project_path(abs_path, cx).unwrap();
9494 assert_eq!(found_path.worktree_id, project1_id);
9495 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9496
9497 let abs_path = project2_abs_path.join("file3.txt");
9498 let found_path = project.find_project_path(abs_path, cx).unwrap();
9499 assert_eq!(found_path.worktree_id, project2_id);
9500 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9501
9502 let abs_path = project1_abs_path.join("nonexistent.txt");
9503 let found_path = project.find_project_path(abs_path, cx);
9504 assert!(
9505 found_path.is_some(),
9506 "Should find project path for nonexistent file in worktree"
9507 );
9508
9509 // Test with an absolute path outside any worktree
9510 let abs_path = Path::new("/some/other/path");
9511 let found_path = project.find_project_path(abs_path, cx);
9512 assert!(
9513 found_path.is_none(),
9514 "Should not find project path for path outside any worktree"
9515 );
9516 });
9517}