1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::RepoPath,
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
25 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister,
26 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
27 tree_sitter_rust, tree_sitter_typescript,
28};
29use lsp::{
30 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
31 Uri, WillRenameFiles, notification::DidRenameFiles,
32};
33use parking_lot::Mutex;
34use paths::{config_dir, tasks_file};
35use postage::stream::Stream as _;
36use pretty_assertions::{assert_eq, assert_matches};
37use rand::{Rng as _, rngs::StdRng};
38use serde_json::json;
39#[cfg(not(windows))]
40use std::os;
41use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
42use task::{ResolvedTask, ShellKind, TaskContext};
43use unindent::Unindent as _;
44use util::{
45 TryFutureExt as _, assert_set_eq, maybe, path,
46 paths::PathMatcher,
47 test::{TempTree, marked_text_offsets},
48 uri,
49};
50use worktree::WorktreeModelHandle as _;
51
52#[gpui::test]
53async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
54 cx.executor().allow_parking();
55
56 let (tx, mut rx) = futures::channel::mpsc::unbounded();
57 let _thread = std::thread::spawn(move || {
58 #[cfg(not(target_os = "windows"))]
59 std::fs::metadata("/tmp").unwrap();
60 #[cfg(target_os = "windows")]
61 std::fs::metadata("C:/Windows").unwrap();
62 std::thread::sleep(Duration::from_millis(1000));
63 tx.unbounded_send(1).unwrap();
64 });
65 rx.next().await.unwrap();
66}
67
68#[gpui::test]
69async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
70 cx.executor().allow_parking();
71
72 let io_task = smol::unblock(move || {
73 println!("sleeping on thread {:?}", std::thread::current().id());
74 std::thread::sleep(Duration::from_millis(10));
75 1
76 });
77
78 let task = cx.foreground_executor().spawn(async move {
79 io_task.await;
80 });
81
82 task.await;
83}
84
85#[cfg(not(windows))]
86#[gpui::test]
87async fn test_symlinks(cx: &mut gpui::TestAppContext) {
88 init_test(cx);
89 cx.executor().allow_parking();
90
91 let dir = TempTree::new(json!({
92 "root": {
93 "apple": "",
94 "banana": {
95 "carrot": {
96 "date": "",
97 "endive": "",
98 }
99 },
100 "fennel": {
101 "grape": "",
102 }
103 }
104 }));
105
106 let root_link_path = dir.path().join("root_link");
107 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
108 os::unix::fs::symlink(
109 dir.path().join("root/fennel"),
110 dir.path().join("root/finnochio"),
111 )
112 .unwrap();
113
114 let project = Project::test(
115 Arc::new(RealFs::new(None, cx.executor())),
116 [root_link_path.as_ref()],
117 cx,
118 )
119 .await;
120
121 project.update(cx, |project, cx| {
122 let tree = project.worktrees(cx).next().unwrap().read(cx);
123 assert_eq!(tree.file_count(), 5);
124 assert_eq!(
125 tree.inode_for_path("fennel/grape"),
126 tree.inode_for_path("finnochio/grape")
127 );
128 });
129}
130
131#[gpui::test]
132async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
133 init_test(cx);
134
135 let dir = TempTree::new(json!({
136 ".editorconfig": r#"
137 root = true
138 [*.rs]
139 indent_style = tab
140 indent_size = 3
141 end_of_line = lf
142 insert_final_newline = true
143 trim_trailing_whitespace = true
144 max_line_length = 120
145 [*.js]
146 tab_width = 10
147 max_line_length = off
148 "#,
149 ".zed": {
150 "settings.json": r#"{
151 "tab_size": 8,
152 "hard_tabs": false,
153 "ensure_final_newline_on_save": false,
154 "remove_trailing_whitespace_on_save": false,
155 "preferred_line_length": 64,
156 "soft_wrap": "editor_width",
157 }"#,
158 },
159 "a.rs": "fn a() {\n A\n}",
160 "b": {
161 ".editorconfig": r#"
162 [*.rs]
163 indent_size = 2
164 max_line_length = off,
165 "#,
166 "b.rs": "fn b() {\n B\n}",
167 },
168 "c.js": "def c\n C\nend",
169 "README.json": "tabs are better\n",
170 }));
171
172 let path = dir.path();
173 let fs = FakeFs::new(cx.executor());
174 fs.insert_tree_from_real_fs(path, path).await;
175 let project = Project::test(fs, [path], cx).await;
176
177 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
178 language_registry.add(js_lang());
179 language_registry.add(json_lang());
180 language_registry.add(rust_lang());
181
182 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
183
184 cx.executor().run_until_parked();
185
186 cx.update(|cx| {
187 let tree = worktree.read(cx);
188 let settings_for = |path: &str| {
189 let file_entry = tree.entry_for_path(path).unwrap().clone();
190 let file = File::for_entry(file_entry, worktree.clone());
191 let file_language = project
192 .read(cx)
193 .languages()
194 .language_for_file_path(file.path.as_ref());
195 let file_language = cx
196 .background_executor()
197 .block(file_language)
198 .expect("Failed to get file language");
199 let file = file as _;
200 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
201 };
202
203 let settings_a = settings_for("a.rs");
204 let settings_b = settings_for("b/b.rs");
205 let settings_c = settings_for("c.js");
206 let settings_readme = settings_for("README.json");
207
208 // .editorconfig overrides .zed/settings
209 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
210 assert_eq!(settings_a.hard_tabs, true);
211 assert_eq!(settings_a.ensure_final_newline_on_save, true);
212 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
213 assert_eq!(settings_a.preferred_line_length, 120);
214
215 // .editorconfig in b/ overrides .editorconfig in root
216 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
217
218 // "indent_size" is not set, so "tab_width" is used
219 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
220
221 // When max_line_length is "off", default to .zed/settings.json
222 assert_eq!(settings_b.preferred_line_length, 64);
223 assert_eq!(settings_c.preferred_line_length, 64);
224
225 // README.md should not be affected by .editorconfig's globe "*.rs"
226 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
227 });
228}
229
230#[gpui::test]
231async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
232 init_test(cx);
233 cx.update(|cx| {
234 GitHostingProviderRegistry::default_global(cx);
235 git_hosting_providers::init(cx);
236 });
237
238 let fs = FakeFs::new(cx.executor());
239 let str_path = path!("/dir");
240 let path = Path::new(str_path);
241
242 fs.insert_tree(
243 path!("/dir"),
244 json!({
245 ".zed": {
246 "settings.json": r#"{
247 "git_hosting_providers": [
248 {
249 "provider": "gitlab",
250 "base_url": "https://google.com",
251 "name": "foo"
252 }
253 ]
254 }"#
255 },
256 }),
257 )
258 .await;
259
260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
261 let (_worktree, _) =
262 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
263 cx.executor().run_until_parked();
264
265 cx.update(|cx| {
266 let provider = GitHostingProviderRegistry::global(cx);
267 assert!(
268 provider
269 .list_hosting_providers()
270 .into_iter()
271 .any(|provider| provider.name() == "foo")
272 );
273 });
274
275 fs.atomic_write(
276 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
277 "{}".into(),
278 )
279 .await
280 .unwrap();
281
282 cx.run_until_parked();
283
284 cx.update(|cx| {
285 let provider = GitHostingProviderRegistry::global(cx);
286 assert!(
287 !provider
288 .list_hosting_providers()
289 .into_iter()
290 .any(|provider| provider.name() == "foo")
291 );
292 });
293}
294
295#[gpui::test]
296async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
297 init_test(cx);
298 TaskStore::init(None);
299
300 let fs = FakeFs::new(cx.executor());
301 fs.insert_tree(
302 path!("/dir"),
303 json!({
304 ".zed": {
305 "settings.json": r#"{ "tab_size": 8 }"#,
306 "tasks.json": r#"[{
307 "label": "cargo check all",
308 "command": "cargo",
309 "args": ["check", "--all"]
310 },]"#,
311 },
312 "a": {
313 "a.rs": "fn a() {\n A\n}"
314 },
315 "b": {
316 ".zed": {
317 "settings.json": r#"{ "tab_size": 2 }"#,
318 "tasks.json": r#"[{
319 "label": "cargo check",
320 "command": "cargo",
321 "args": ["check"]
322 },]"#,
323 },
324 "b.rs": "fn b() {\n B\n}"
325 }
326 }),
327 )
328 .await;
329
330 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
331 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
332
333 cx.executor().run_until_parked();
334 let worktree_id = cx.update(|cx| {
335 project.update(cx, |project, cx| {
336 project.worktrees(cx).next().unwrap().read(cx).id()
337 })
338 });
339
340 let mut task_contexts = TaskContexts::default();
341 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
342 let task_contexts = Arc::new(task_contexts);
343
344 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
345 id: worktree_id,
346 directory_in_worktree: PathBuf::from(".zed"),
347 id_base: "local worktree tasks from directory \".zed\"".into(),
348 };
349
350 let all_tasks = cx
351 .update(|cx| {
352 let tree = worktree.read(cx);
353
354 let file_a = File::for_entry(
355 tree.entry_for_path("a/a.rs").unwrap().clone(),
356 worktree.clone(),
357 ) as _;
358 let settings_a = language_settings(None, Some(&file_a), cx);
359 let file_b = File::for_entry(
360 tree.entry_for_path("b/b.rs").unwrap().clone(),
361 worktree.clone(),
362 ) as _;
363 let settings_b = language_settings(None, Some(&file_b), cx);
364
365 assert_eq!(settings_a.tab_size.get(), 8);
366 assert_eq!(settings_b.tab_size.get(), 2);
367
368 get_all_tasks(&project, task_contexts.clone(), cx)
369 })
370 .await
371 .into_iter()
372 .map(|(source_kind, task)| {
373 let resolved = task.resolved;
374 (
375 source_kind,
376 task.resolved_label,
377 resolved.args,
378 resolved.env,
379 )
380 })
381 .collect::<Vec<_>>();
382 assert_eq!(
383 all_tasks,
384 vec![
385 (
386 TaskSourceKind::Worktree {
387 id: worktree_id,
388 directory_in_worktree: PathBuf::from(path!("b/.zed")),
389 id_base: if cfg!(windows) {
390 "local worktree tasks from directory \"b\\\\.zed\"".into()
391 } else {
392 "local worktree tasks from directory \"b/.zed\"".into()
393 },
394 },
395 "cargo check".to_string(),
396 vec!["check".to_string()],
397 HashMap::default(),
398 ),
399 (
400 topmost_local_task_source_kind.clone(),
401 "cargo check all".to_string(),
402 vec!["check".to_string(), "--all".to_string()],
403 HashMap::default(),
404 ),
405 ]
406 );
407
408 let (_, resolved_task) = cx
409 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
410 .await
411 .into_iter()
412 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
413 .expect("should have one global task");
414 project.update(cx, |project, cx| {
415 let task_inventory = project
416 .task_store
417 .read(cx)
418 .task_inventory()
419 .cloned()
420 .unwrap();
421 task_inventory.update(cx, |inventory, _| {
422 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
423 inventory
424 .update_file_based_tasks(
425 TaskSettingsLocation::Global(tasks_file()),
426 Some(
427 &json!([{
428 "label": "cargo check unstable",
429 "command": "cargo",
430 "args": [
431 "check",
432 "--all",
433 "--all-targets"
434 ],
435 "env": {
436 "RUSTFLAGS": "-Zunstable-options"
437 }
438 }])
439 .to_string(),
440 ),
441 )
442 .unwrap();
443 });
444 });
445 cx.run_until_parked();
446
447 let all_tasks = cx
448 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
449 .await
450 .into_iter()
451 .map(|(source_kind, task)| {
452 let resolved = task.resolved;
453 (
454 source_kind,
455 task.resolved_label,
456 resolved.args,
457 resolved.env,
458 )
459 })
460 .collect::<Vec<_>>();
461 assert_eq!(
462 all_tasks,
463 vec![
464 (
465 topmost_local_task_source_kind.clone(),
466 "cargo check all".to_string(),
467 vec!["check".to_string(), "--all".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::Worktree {
472 id: worktree_id,
473 directory_in_worktree: PathBuf::from(path!("b/.zed")),
474 id_base: if cfg!(windows) {
475 "local worktree tasks from directory \"b\\\\.zed\"".into()
476 } else {
477 "local worktree tasks from directory \"b/.zed\"".into()
478 },
479 },
480 "cargo check".to_string(),
481 vec!["check".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::AbsPath {
486 abs_path: paths::tasks_file().clone(),
487 id_base: "global tasks.json".into(),
488 },
489 "cargo check unstable".to_string(),
490 vec![
491 "check".to_string(),
492 "--all".to_string(),
493 "--all-targets".to_string(),
494 ],
495 HashMap::from_iter(Some((
496 "RUSTFLAGS".to_string(),
497 "-Zunstable-options".to_string()
498 ))),
499 ),
500 ]
501 );
502}
503
504#[gpui::test]
505async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
506 init_test(cx);
507 TaskStore::init(None);
508
509 let fs = FakeFs::new(cx.executor());
510 fs.insert_tree(
511 path!("/dir"),
512 json!({
513 ".zed": {
514 "tasks.json": r#"[{
515 "label": "test worktree root",
516 "command": "echo $ZED_WORKTREE_ROOT"
517 }]"#,
518 },
519 "a": {
520 "a.rs": "fn a() {\n A\n}"
521 },
522 }),
523 )
524 .await;
525
526 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
527 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
528
529 cx.executor().run_until_parked();
530 let worktree_id = cx.update(|cx| {
531 project.update(cx, |project, cx| {
532 project.worktrees(cx).next().unwrap().read(cx).id()
533 })
534 });
535
536 let active_non_worktree_item_tasks = cx
537 .update(|cx| {
538 get_all_tasks(
539 &project,
540 Arc::new(TaskContexts {
541 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
542 active_worktree_context: None,
543 other_worktree_contexts: Vec::new(),
544 lsp_task_sources: HashMap::default(),
545 latest_selection: None,
546 }),
547 cx,
548 )
549 })
550 .await;
551 assert!(
552 active_non_worktree_item_tasks.is_empty(),
553 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
554 );
555
556 let active_worktree_tasks = cx
557 .update(|cx| {
558 get_all_tasks(
559 &project,
560 Arc::new(TaskContexts {
561 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
562 active_worktree_context: Some((worktree_id, {
563 let mut worktree_context = TaskContext::default();
564 worktree_context
565 .task_variables
566 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
567 worktree_context
568 })),
569 other_worktree_contexts: Vec::new(),
570 lsp_task_sources: HashMap::default(),
571 latest_selection: None,
572 }),
573 cx,
574 )
575 })
576 .await;
577 assert_eq!(
578 active_worktree_tasks
579 .into_iter()
580 .map(|(source_kind, task)| {
581 let resolved = task.resolved;
582 (source_kind, resolved.command.unwrap())
583 })
584 .collect::<Vec<_>>(),
585 vec![(
586 TaskSourceKind::Worktree {
587 id: worktree_id,
588 directory_in_worktree: PathBuf::from(path!(".zed")),
589 id_base: if cfg!(windows) {
590 "local worktree tasks from directory \".zed\"".into()
591 } else {
592 "local worktree tasks from directory \".zed\"".into()
593 },
594 },
595 "echo /dir".to_string(),
596 )]
597 );
598}
599
600#[gpui::test]
601async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
602 cx: &mut gpui::TestAppContext,
603) {
604 pub(crate) struct PyprojectTomlManifestProvider;
605
606 impl ManifestProvider for PyprojectTomlManifestProvider {
607 fn name(&self) -> ManifestName {
608 SharedString::new_static("pyproject.toml").into()
609 }
610
611 fn search(
612 &self,
613 ManifestQuery {
614 path,
615 depth,
616 delegate,
617 }: ManifestQuery,
618 ) -> Option<Arc<Path>> {
619 for path in path.ancestors().take(depth) {
620 let p = path.join("pyproject.toml");
621 if delegate.exists(&p, Some(false)) {
622 return Some(path.into());
623 }
624 }
625
626 None
627 }
628 }
629
630 init_test(cx);
631 let fs = FakeFs::new(cx.executor());
632
633 fs.insert_tree(
634 path!("/the-root"),
635 json!({
636 ".zed": {
637 "settings.json": r#"
638 {
639 "languages": {
640 "Python": {
641 "language_servers": ["ty"]
642 }
643 }
644 }"#
645 },
646 "project-a": {
647 ".venv": {},
648 "file.py": "",
649 "pyproject.toml": ""
650 },
651 "project-b": {
652 ".venv": {},
653 "source_file.py":"",
654 "another_file.py": "",
655 "pyproject.toml": ""
656 }
657 }),
658 )
659 .await;
660 cx.update(|cx| {
661 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
662 });
663
664 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
666 let _fake_python_server = language_registry.register_fake_lsp(
667 "Python",
668 FakeLspAdapter {
669 name: "ty",
670 capabilities: lsp::ServerCapabilities {
671 ..Default::default()
672 },
673 ..Default::default()
674 },
675 );
676
677 language_registry.add(python_lang(fs.clone()));
678 let (first_buffer, _handle) = project
679 .update(cx, |project, cx| {
680 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
681 })
682 .await
683 .unwrap();
684 cx.executor().run_until_parked();
685 let servers = project.update(cx, |project, cx| {
686 project.lsp_store.update(cx, |this, cx| {
687 first_buffer.update(cx, |buffer, cx| {
688 this.language_servers_for_local_buffer(buffer, cx)
689 .map(|(adapter, server)| (adapter.clone(), server.clone()))
690 .collect::<Vec<_>>()
691 })
692 })
693 });
694 cx.executor().run_until_parked();
695 assert_eq!(servers.len(), 1);
696 let (adapter, server) = servers.into_iter().next().unwrap();
697 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
698 assert_eq!(server.server_id(), LanguageServerId(0));
699 // `workspace_folders` are set to the rooting point.
700 assert_eq!(
701 server.workspace_folders(),
702 BTreeSet::from_iter(
703 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
704 )
705 );
706
707 let (second_project_buffer, _other_handle) = project
708 .update(cx, |project, cx| {
709 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
710 })
711 .await
712 .unwrap();
713 cx.executor().run_until_parked();
714 let servers = project.update(cx, |project, cx| {
715 project.lsp_store.update(cx, |this, cx| {
716 second_project_buffer.update(cx, |buffer, cx| {
717 this.language_servers_for_local_buffer(buffer, cx)
718 .map(|(adapter, server)| (adapter.clone(), server.clone()))
719 .collect::<Vec<_>>()
720 })
721 })
722 });
723 cx.executor().run_until_parked();
724 assert_eq!(servers.len(), 1);
725 let (adapter, server) = servers.into_iter().next().unwrap();
726 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
727 // We're not using venvs at all here, so both folders should fall under the same root.
728 assert_eq!(server.server_id(), LanguageServerId(0));
729 // Now, let's select a different toolchain for one of subprojects.
730
731 let Toolchains {
732 toolchains: available_toolchains_for_b,
733 root_path,
734 ..
735 } = project
736 .update(cx, |this, cx| {
737 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
738 this.available_toolchains(
739 ProjectPath {
740 worktree_id,
741 path: Arc::from("project-b/source_file.py".as_ref()),
742 },
743 LanguageName::new("Python"),
744 cx,
745 )
746 })
747 .await
748 .expect("A toolchain to be discovered");
749 assert_eq!(root_path.as_ref(), Path::new("project-b"));
750 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
751 let currently_active_toolchain = project
752 .update(cx, |this, cx| {
753 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
754 this.active_toolchain(
755 ProjectPath {
756 worktree_id,
757 path: Arc::from("project-b/source_file.py".as_ref()),
758 },
759 LanguageName::new("Python"),
760 cx,
761 )
762 })
763 .await;
764
765 assert!(currently_active_toolchain.is_none());
766 let _ = project
767 .update(cx, |this, cx| {
768 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
769 this.activate_toolchain(
770 ProjectPath {
771 worktree_id,
772 path: root_path,
773 },
774 available_toolchains_for_b
775 .toolchains
776 .into_iter()
777 .next()
778 .unwrap(),
779 cx,
780 )
781 })
782 .await
783 .unwrap();
784 cx.run_until_parked();
785 let servers = project.update(cx, |project, cx| {
786 project.lsp_store.update(cx, |this, cx| {
787 second_project_buffer.update(cx, |buffer, cx| {
788 this.language_servers_for_local_buffer(buffer, cx)
789 .map(|(adapter, server)| (adapter.clone(), server.clone()))
790 .collect::<Vec<_>>()
791 })
792 })
793 });
794 cx.executor().run_until_parked();
795 assert_eq!(servers.len(), 1);
796 let (adapter, server) = servers.into_iter().next().unwrap();
797 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
798 // There's a new language server in town.
799 assert_eq!(server.server_id(), LanguageServerId(1));
800}
801
802#[gpui::test]
803async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
804 init_test(cx);
805
806 let fs = FakeFs::new(cx.executor());
807 fs.insert_tree(
808 path!("/dir"),
809 json!({
810 "test.rs": "const A: i32 = 1;",
811 "test2.rs": "",
812 "Cargo.toml": "a = 1",
813 "package.json": "{\"a\": 1}",
814 }),
815 )
816 .await;
817
818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
819 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
820
821 let mut fake_rust_servers = language_registry.register_fake_lsp(
822 "Rust",
823 FakeLspAdapter {
824 name: "the-rust-language-server",
825 capabilities: lsp::ServerCapabilities {
826 completion_provider: Some(lsp::CompletionOptions {
827 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
828 ..Default::default()
829 }),
830 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
831 lsp::TextDocumentSyncOptions {
832 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
833 ..Default::default()
834 },
835 )),
836 ..Default::default()
837 },
838 ..Default::default()
839 },
840 );
841 let mut fake_json_servers = language_registry.register_fake_lsp(
842 "JSON",
843 FakeLspAdapter {
844 name: "the-json-language-server",
845 capabilities: lsp::ServerCapabilities {
846 completion_provider: Some(lsp::CompletionOptions {
847 trigger_characters: Some(vec![":".to_string()]),
848 ..Default::default()
849 }),
850 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
851 lsp::TextDocumentSyncOptions {
852 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
853 ..Default::default()
854 },
855 )),
856 ..Default::default()
857 },
858 ..Default::default()
859 },
860 );
861
862 // Open a buffer without an associated language server.
863 let (toml_buffer, _handle) = project
864 .update(cx, |project, cx| {
865 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
866 })
867 .await
868 .unwrap();
869
870 // Open a buffer with an associated language server before the language for it has been loaded.
871 let (rust_buffer, _handle2) = project
872 .update(cx, |project, cx| {
873 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
874 })
875 .await
876 .unwrap();
877 rust_buffer.update(cx, |buffer, _| {
878 assert_eq!(buffer.language().map(|l| l.name()), None);
879 });
880
881 // Now we add the languages to the project, and ensure they get assigned to all
882 // the relevant open buffers.
883 language_registry.add(json_lang());
884 language_registry.add(rust_lang());
885 cx.executor().run_until_parked();
886 rust_buffer.update(cx, |buffer, _| {
887 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
888 });
889
890 // A server is started up, and it is notified about Rust files.
891 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
892 assert_eq!(
893 fake_rust_server
894 .receive_notification::<lsp::notification::DidOpenTextDocument>()
895 .await
896 .text_document,
897 lsp::TextDocumentItem {
898 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
899 version: 0,
900 text: "const A: i32 = 1;".to_string(),
901 language_id: "rust".to_string(),
902 }
903 );
904
905 // The buffer is configured based on the language server's capabilities.
906 rust_buffer.update(cx, |buffer, _| {
907 assert_eq!(
908 buffer
909 .completion_triggers()
910 .iter()
911 .cloned()
912 .collect::<Vec<_>>(),
913 &[".".to_string(), "::".to_string()]
914 );
915 });
916 toml_buffer.update(cx, |buffer, _| {
917 assert!(buffer.completion_triggers().is_empty());
918 });
919
920 // Edit a buffer. The changes are reported to the language server.
921 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
922 assert_eq!(
923 fake_rust_server
924 .receive_notification::<lsp::notification::DidChangeTextDocument>()
925 .await
926 .text_document,
927 lsp::VersionedTextDocumentIdentifier::new(
928 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
929 1
930 )
931 );
932
933 // Open a third buffer with a different associated language server.
934 let (json_buffer, _json_handle) = project
935 .update(cx, |project, cx| {
936 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
937 })
938 .await
939 .unwrap();
940
941 // A json language server is started up and is only notified about the json buffer.
942 let mut fake_json_server = fake_json_servers.next().await.unwrap();
943 assert_eq!(
944 fake_json_server
945 .receive_notification::<lsp::notification::DidOpenTextDocument>()
946 .await
947 .text_document,
948 lsp::TextDocumentItem {
949 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
950 version: 0,
951 text: "{\"a\": 1}".to_string(),
952 language_id: "json".to_string(),
953 }
954 );
955
956 // This buffer is configured based on the second language server's
957 // capabilities.
958 json_buffer.update(cx, |buffer, _| {
959 assert_eq!(
960 buffer
961 .completion_triggers()
962 .iter()
963 .cloned()
964 .collect::<Vec<_>>(),
965 &[":".to_string()]
966 );
967 });
968
969 // When opening another buffer whose language server is already running,
970 // it is also configured based on the existing language server's capabilities.
971 let (rust_buffer2, _handle4) = project
972 .update(cx, |project, cx| {
973 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
974 })
975 .await
976 .unwrap();
977 rust_buffer2.update(cx, |buffer, _| {
978 assert_eq!(
979 buffer
980 .completion_triggers()
981 .iter()
982 .cloned()
983 .collect::<Vec<_>>(),
984 &[".".to_string(), "::".to_string()]
985 );
986 });
987
988 // Changes are reported only to servers matching the buffer's language.
989 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
990 rust_buffer2.update(cx, |buffer, cx| {
991 buffer.edit([(0..0, "let x = 1;")], None, cx)
992 });
993 assert_eq!(
994 fake_rust_server
995 .receive_notification::<lsp::notification::DidChangeTextDocument>()
996 .await
997 .text_document,
998 lsp::VersionedTextDocumentIdentifier::new(
999 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1000 1
1001 )
1002 );
1003
1004 // Save notifications are reported to all servers.
1005 project
1006 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1007 .await
1008 .unwrap();
1009 assert_eq!(
1010 fake_rust_server
1011 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1012 .await
1013 .text_document,
1014 lsp::TextDocumentIdentifier::new(
1015 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1016 )
1017 );
1018 assert_eq!(
1019 fake_json_server
1020 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1021 .await
1022 .text_document,
1023 lsp::TextDocumentIdentifier::new(
1024 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1025 )
1026 );
1027
1028 // Renames are reported only to servers matching the buffer's language.
1029 fs.rename(
1030 Path::new(path!("/dir/test2.rs")),
1031 Path::new(path!("/dir/test3.rs")),
1032 Default::default(),
1033 )
1034 .await
1035 .unwrap();
1036 assert_eq!(
1037 fake_rust_server
1038 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1039 .await
1040 .text_document,
1041 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1042 );
1043 assert_eq!(
1044 fake_rust_server
1045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1046 .await
1047 .text_document,
1048 lsp::TextDocumentItem {
1049 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1050 version: 0,
1051 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1052 language_id: "rust".to_string(),
1053 },
1054 );
1055
1056 rust_buffer2.update(cx, |buffer, cx| {
1057 buffer.update_diagnostics(
1058 LanguageServerId(0),
1059 DiagnosticSet::from_sorted_entries(
1060 vec![DiagnosticEntry {
1061 diagnostic: Default::default(),
1062 range: Anchor::MIN..Anchor::MAX,
1063 }],
1064 &buffer.snapshot(),
1065 ),
1066 cx,
1067 );
1068 assert_eq!(
1069 buffer
1070 .snapshot()
1071 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1072 .count(),
1073 1
1074 );
1075 });
1076
1077 // When the rename changes the extension of the file, the buffer gets closed on the old
1078 // language server and gets opened on the new one.
1079 fs.rename(
1080 Path::new(path!("/dir/test3.rs")),
1081 Path::new(path!("/dir/test3.json")),
1082 Default::default(),
1083 )
1084 .await
1085 .unwrap();
1086 assert_eq!(
1087 fake_rust_server
1088 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1089 .await
1090 .text_document,
1091 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1092 );
1093 assert_eq!(
1094 fake_json_server
1095 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1096 .await
1097 .text_document,
1098 lsp::TextDocumentItem {
1099 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1100 version: 0,
1101 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1102 language_id: "json".to_string(),
1103 },
1104 );
1105
1106 // We clear the diagnostics, since the language has changed.
1107 rust_buffer2.update(cx, |buffer, _| {
1108 assert_eq!(
1109 buffer
1110 .snapshot()
1111 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1112 .count(),
1113 0
1114 );
1115 });
1116
1117 // The renamed file's version resets after changing language server.
1118 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1119 assert_eq!(
1120 fake_json_server
1121 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1122 .await
1123 .text_document,
1124 lsp::VersionedTextDocumentIdentifier::new(
1125 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1126 1
1127 )
1128 );
1129
1130 // Restart language servers
1131 project.update(cx, |project, cx| {
1132 project.restart_language_servers_for_buffers(
1133 vec![rust_buffer.clone(), json_buffer.clone()],
1134 HashSet::default(),
1135 cx,
1136 );
1137 });
1138
1139 let mut rust_shutdown_requests = fake_rust_server
1140 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1141 let mut json_shutdown_requests = fake_json_server
1142 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1143 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1144
1145 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1146 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1147
1148 // Ensure rust document is reopened in new rust language server
1149 assert_eq!(
1150 fake_rust_server
1151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1152 .await
1153 .text_document,
1154 lsp::TextDocumentItem {
1155 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1156 version: 0,
1157 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1158 language_id: "rust".to_string(),
1159 }
1160 );
1161
1162 // Ensure json documents are reopened in new json language server
1163 assert_set_eq!(
1164 [
1165 fake_json_server
1166 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1167 .await
1168 .text_document,
1169 fake_json_server
1170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1171 .await
1172 .text_document,
1173 ],
1174 [
1175 lsp::TextDocumentItem {
1176 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1177 version: 0,
1178 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1179 language_id: "json".to_string(),
1180 },
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1183 version: 0,
1184 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 }
1187 ]
1188 );
1189
1190 // Close notifications are reported only to servers matching the buffer's language.
1191 cx.update(|_| drop(_json_handle));
1192 let close_message = lsp::DidCloseTextDocumentParams {
1193 text_document: lsp::TextDocumentIdentifier::new(
1194 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1195 ),
1196 };
1197 assert_eq!(
1198 fake_json_server
1199 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1200 .await,
1201 close_message,
1202 );
1203}
1204
1205#[gpui::test]
1206async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1207 init_test(cx);
1208
1209 let fs = FakeFs::new(cx.executor());
1210 fs.insert_tree(
1211 path!("/the-root"),
1212 json!({
1213 ".gitignore": "target\n",
1214 "Cargo.lock": "",
1215 "src": {
1216 "a.rs": "",
1217 "b.rs": "",
1218 },
1219 "target": {
1220 "x": {
1221 "out": {
1222 "x.rs": ""
1223 }
1224 },
1225 "y": {
1226 "out": {
1227 "y.rs": "",
1228 }
1229 },
1230 "z": {
1231 "out": {
1232 "z.rs": ""
1233 }
1234 }
1235 }
1236 }),
1237 )
1238 .await;
1239 fs.insert_tree(
1240 path!("/the-registry"),
1241 json!({
1242 "dep1": {
1243 "src": {
1244 "dep1.rs": "",
1245 }
1246 },
1247 "dep2": {
1248 "src": {
1249 "dep2.rs": "",
1250 }
1251 },
1252 }),
1253 )
1254 .await;
1255 fs.insert_tree(
1256 path!("/the/stdlib"),
1257 json!({
1258 "LICENSE": "",
1259 "src": {
1260 "string.rs": "",
1261 }
1262 }),
1263 )
1264 .await;
1265
1266 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1267 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1268 (project.languages().clone(), project.lsp_store())
1269 });
1270 language_registry.add(rust_lang());
1271 let mut fake_servers = language_registry.register_fake_lsp(
1272 "Rust",
1273 FakeLspAdapter {
1274 name: "the-language-server",
1275 ..Default::default()
1276 },
1277 );
1278
1279 cx.executor().run_until_parked();
1280
1281 // Start the language server by opening a buffer with a compatible file extension.
1282 project
1283 .update(cx, |project, cx| {
1284 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1285 })
1286 .await
1287 .unwrap();
1288
1289 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1290 project.update(cx, |project, cx| {
1291 let worktree = project.worktrees(cx).next().unwrap();
1292 assert_eq!(
1293 worktree
1294 .read(cx)
1295 .snapshot()
1296 .entries(true, 0)
1297 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1298 .collect::<Vec<_>>(),
1299 &[
1300 (Path::new(""), false),
1301 (Path::new(".gitignore"), false),
1302 (Path::new("Cargo.lock"), false),
1303 (Path::new("src"), false),
1304 (Path::new("src/a.rs"), false),
1305 (Path::new("src/b.rs"), false),
1306 (Path::new("target"), true),
1307 ]
1308 );
1309 });
1310
1311 let prev_read_dir_count = fs.read_dir_call_count();
1312
1313 let fake_server = fake_servers.next().await.unwrap();
1314 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1315 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1316 id
1317 });
1318
1319 // Simulate jumping to a definition in a dependency outside of the worktree.
1320 let _out_of_worktree_buffer = project
1321 .update(cx, |project, cx| {
1322 project.open_local_buffer_via_lsp(
1323 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1324 server_id,
1325 cx,
1326 )
1327 })
1328 .await
1329 .unwrap();
1330
1331 // Keep track of the FS events reported to the language server.
1332 let file_changes = Arc::new(Mutex::new(Vec::new()));
1333 fake_server
1334 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1335 registrations: vec![lsp::Registration {
1336 id: Default::default(),
1337 method: "workspace/didChangeWatchedFiles".to_string(),
1338 register_options: serde_json::to_value(
1339 lsp::DidChangeWatchedFilesRegistrationOptions {
1340 watchers: vec![
1341 lsp::FileSystemWatcher {
1342 glob_pattern: lsp::GlobPattern::String(
1343 path!("/the-root/Cargo.toml").to_string(),
1344 ),
1345 kind: None,
1346 },
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/src/*.{rs,c}").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/target/y/**/*.rs").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the/stdlib/src/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("**/Cargo.lock").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 ],
1372 },
1373 )
1374 .ok(),
1375 }],
1376 })
1377 .await
1378 .into_response()
1379 .unwrap();
1380 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1381 let file_changes = file_changes.clone();
1382 move |params, _| {
1383 let mut file_changes = file_changes.lock();
1384 file_changes.extend(params.changes);
1385 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1386 }
1387 });
1388
1389 cx.executor().run_until_parked();
1390 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1391 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1392
1393 let mut new_watched_paths = fs.watched_paths();
1394 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1395 assert_eq!(
1396 &new_watched_paths,
1397 &[
1398 Path::new(path!("/the-root")),
1399 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1400 Path::new(path!("/the/stdlib/src"))
1401 ]
1402 );
1403
1404 // Now the language server has asked us to watch an ignored directory path,
1405 // so we recursively load it.
1406 project.update(cx, |project, cx| {
1407 let worktree = project.visible_worktrees(cx).next().unwrap();
1408 assert_eq!(
1409 worktree
1410 .read(cx)
1411 .snapshot()
1412 .entries(true, 0)
1413 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1414 .collect::<Vec<_>>(),
1415 &[
1416 (Path::new(""), false),
1417 (Path::new(".gitignore"), false),
1418 (Path::new("Cargo.lock"), false),
1419 (Path::new("src"), false),
1420 (Path::new("src/a.rs"), false),
1421 (Path::new("src/b.rs"), false),
1422 (Path::new("target"), true),
1423 (Path::new("target/x"), true),
1424 (Path::new("target/y"), true),
1425 (Path::new("target/y/out"), true),
1426 (Path::new("target/y/out/y.rs"), true),
1427 (Path::new("target/z"), true),
1428 ]
1429 );
1430 });
1431
1432 // Perform some file system mutations, two of which match the watched patterns,
1433 // and one of which does not.
1434 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1435 .await
1436 .unwrap();
1437 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1438 .await
1439 .unwrap();
1440 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1441 .await
1442 .unwrap();
1443 fs.create_file(
1444 path!("/the-root/target/x/out/x2.rs").as_ref(),
1445 Default::default(),
1446 )
1447 .await
1448 .unwrap();
1449 fs.create_file(
1450 path!("/the-root/target/y/out/y2.rs").as_ref(),
1451 Default::default(),
1452 )
1453 .await
1454 .unwrap();
1455 fs.save(
1456 path!("/the-root/Cargo.lock").as_ref(),
1457 &"".into(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-stdlib/LICENSE").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the/stdlib/src/string.rs").as_ref(),
1471 &"".into(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476
1477 // The language server receives events for the FS mutations that match its watch patterns.
1478 cx.executor().run_until_parked();
1479 assert_eq!(
1480 &*file_changes.lock(),
1481 &[
1482 lsp::FileEvent {
1483 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1484 typ: lsp::FileChangeType::CHANGED,
1485 },
1486 lsp::FileEvent {
1487 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1488 typ: lsp::FileChangeType::DELETED,
1489 },
1490 lsp::FileEvent {
1491 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1492 typ: lsp::FileChangeType::CREATED,
1493 },
1494 lsp::FileEvent {
1495 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1496 typ: lsp::FileChangeType::CREATED,
1497 },
1498 lsp::FileEvent {
1499 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1500 typ: lsp::FileChangeType::CHANGED,
1501 },
1502 ]
1503 );
1504}
1505
1506#[gpui::test]
1507async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1508 init_test(cx);
1509
1510 let fs = FakeFs::new(cx.executor());
1511 fs.insert_tree(
1512 path!("/dir"),
1513 json!({
1514 "a.rs": "let a = 1;",
1515 "b.rs": "let b = 2;"
1516 }),
1517 )
1518 .await;
1519
1520 let project = Project::test(
1521 fs,
1522 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1523 cx,
1524 )
1525 .await;
1526 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1527
1528 let buffer_a = project
1529 .update(cx, |project, cx| {
1530 project.open_local_buffer(path!("/dir/a.rs"), cx)
1531 })
1532 .await
1533 .unwrap();
1534 let buffer_b = project
1535 .update(cx, |project, cx| {
1536 project.open_local_buffer(path!("/dir/b.rs"), cx)
1537 })
1538 .await
1539 .unwrap();
1540
1541 lsp_store.update(cx, |lsp_store, cx| {
1542 lsp_store
1543 .update_diagnostics(
1544 LanguageServerId(0),
1545 lsp::PublishDiagnosticsParams {
1546 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1547 version: None,
1548 diagnostics: vec![lsp::Diagnostic {
1549 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1550 severity: Some(lsp::DiagnosticSeverity::ERROR),
1551 message: "error 1".to_string(),
1552 ..Default::default()
1553 }],
1554 },
1555 None,
1556 DiagnosticSourceKind::Pushed,
1557 &[],
1558 cx,
1559 )
1560 .unwrap();
1561 lsp_store
1562 .update_diagnostics(
1563 LanguageServerId(0),
1564 lsp::PublishDiagnosticsParams {
1565 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1566 version: None,
1567 diagnostics: vec![lsp::Diagnostic {
1568 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1569 severity: Some(DiagnosticSeverity::WARNING),
1570 message: "error 2".to_string(),
1571 ..Default::default()
1572 }],
1573 },
1574 None,
1575 DiagnosticSourceKind::Pushed,
1576 &[],
1577 cx,
1578 )
1579 .unwrap();
1580 });
1581
1582 buffer_a.update(cx, |buffer, _| {
1583 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1584 assert_eq!(
1585 chunks
1586 .iter()
1587 .map(|(s, d)| (s.as_str(), *d))
1588 .collect::<Vec<_>>(),
1589 &[
1590 ("let ", None),
1591 ("a", Some(DiagnosticSeverity::ERROR)),
1592 (" = 1;", None),
1593 ]
1594 );
1595 });
1596 buffer_b.update(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let ", None),
1605 ("b", Some(DiagnosticSeverity::WARNING)),
1606 (" = 2;", None),
1607 ]
1608 );
1609 });
1610}
1611
1612#[gpui::test]
1613async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1614 init_test(cx);
1615
1616 let fs = FakeFs::new(cx.executor());
1617 fs.insert_tree(
1618 path!("/root"),
1619 json!({
1620 "dir": {
1621 ".git": {
1622 "HEAD": "ref: refs/heads/main",
1623 },
1624 ".gitignore": "b.rs",
1625 "a.rs": "let a = 1;",
1626 "b.rs": "let b = 2;",
1627 },
1628 "other.rs": "let b = c;"
1629 }),
1630 )
1631 .await;
1632
1633 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1634 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1635 let (worktree, _) = project
1636 .update(cx, |project, cx| {
1637 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1638 })
1639 .await
1640 .unwrap();
1641 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1642
1643 let (worktree, _) = project
1644 .update(cx, |project, cx| {
1645 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1646 })
1647 .await
1648 .unwrap();
1649 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1650
1651 let server_id = LanguageServerId(0);
1652 lsp_store.update(cx, |lsp_store, cx| {
1653 lsp_store
1654 .update_diagnostics(
1655 server_id,
1656 lsp::PublishDiagnosticsParams {
1657 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1658 version: None,
1659 diagnostics: vec![lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1661 severity: Some(lsp::DiagnosticSeverity::ERROR),
1662 message: "unused variable 'b'".to_string(),
1663 ..Default::default()
1664 }],
1665 },
1666 None,
1667 DiagnosticSourceKind::Pushed,
1668 &[],
1669 cx,
1670 )
1671 .unwrap();
1672 lsp_store
1673 .update_diagnostics(
1674 server_id,
1675 lsp::PublishDiagnosticsParams {
1676 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1677 version: None,
1678 diagnostics: vec![lsp::Diagnostic {
1679 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1680 severity: Some(lsp::DiagnosticSeverity::ERROR),
1681 message: "unknown variable 'c'".to_string(),
1682 ..Default::default()
1683 }],
1684 },
1685 None,
1686 DiagnosticSourceKind::Pushed,
1687 &[],
1688 cx,
1689 )
1690 .unwrap();
1691 });
1692
1693 let main_ignored_buffer = project
1694 .update(cx, |project, cx| {
1695 project.open_buffer((main_worktree_id, "b.rs"), cx)
1696 })
1697 .await
1698 .unwrap();
1699 main_ignored_buffer.update(cx, |buffer, _| {
1700 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1701 assert_eq!(
1702 chunks
1703 .iter()
1704 .map(|(s, d)| (s.as_str(), *d))
1705 .collect::<Vec<_>>(),
1706 &[
1707 ("let ", None),
1708 ("b", Some(DiagnosticSeverity::ERROR)),
1709 (" = 2;", None),
1710 ],
1711 "Gigitnored buffers should still get in-buffer diagnostics",
1712 );
1713 });
1714 let other_buffer = project
1715 .update(cx, |project, cx| {
1716 project.open_buffer((other_worktree_id, ""), cx)
1717 })
1718 .await
1719 .unwrap();
1720 other_buffer.update(cx, |buffer, _| {
1721 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1722 assert_eq!(
1723 chunks
1724 .iter()
1725 .map(|(s, d)| (s.as_str(), *d))
1726 .collect::<Vec<_>>(),
1727 &[
1728 ("let b = ", None),
1729 ("c", Some(DiagnosticSeverity::ERROR)),
1730 (";", None),
1731 ],
1732 "Buffers from hidden projects should still get in-buffer diagnostics"
1733 );
1734 });
1735
1736 project.update(cx, |project, cx| {
1737 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1738 assert_eq!(
1739 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1740 vec![(
1741 ProjectPath {
1742 worktree_id: main_worktree_id,
1743 path: Arc::from(Path::new("b.rs")),
1744 },
1745 server_id,
1746 DiagnosticSummary {
1747 error_count: 1,
1748 warning_count: 0,
1749 }
1750 )]
1751 );
1752 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1753 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1754 });
1755}
1756
1757#[gpui::test]
1758async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let progress_token = "the-progress-token";
1762
1763 let fs = FakeFs::new(cx.executor());
1764 fs.insert_tree(
1765 path!("/dir"),
1766 json!({
1767 "a.rs": "fn a() { A }",
1768 "b.rs": "const y: i32 = 1",
1769 }),
1770 )
1771 .await;
1772
1773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1775
1776 language_registry.add(rust_lang());
1777 let mut fake_servers = language_registry.register_fake_lsp(
1778 "Rust",
1779 FakeLspAdapter {
1780 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1781 disk_based_diagnostics_sources: vec!["disk".into()],
1782 ..Default::default()
1783 },
1784 );
1785
1786 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1787
1788 // Cause worktree to start the fake language server
1789 let _ = project
1790 .update(cx, |project, cx| {
1791 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1792 })
1793 .await
1794 .unwrap();
1795
1796 let mut events = cx.events(&project);
1797
1798 let fake_server = fake_servers.next().await.unwrap();
1799 assert_eq!(
1800 events.next().await.unwrap(),
1801 Event::LanguageServerAdded(
1802 LanguageServerId(0),
1803 fake_server.server.name(),
1804 Some(worktree_id)
1805 ),
1806 );
1807
1808 fake_server
1809 .start_progress(format!("{}/0", progress_token))
1810 .await;
1811 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1812 assert_eq!(
1813 events.next().await.unwrap(),
1814 Event::DiskBasedDiagnosticsStarted {
1815 language_server_id: LanguageServerId(0),
1816 }
1817 );
1818
1819 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1820 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1821 version: None,
1822 diagnostics: vec![lsp::Diagnostic {
1823 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1824 severity: Some(lsp::DiagnosticSeverity::ERROR),
1825 message: "undefined variable 'A'".to_string(),
1826 ..Default::default()
1827 }],
1828 });
1829 assert_eq!(
1830 events.next().await.unwrap(),
1831 Event::DiagnosticsUpdated {
1832 language_server_id: LanguageServerId(0),
1833 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1834 }
1835 );
1836
1837 fake_server.end_progress(format!("{}/0", progress_token));
1838 assert_eq!(
1839 events.next().await.unwrap(),
1840 Event::DiskBasedDiagnosticsFinished {
1841 language_server_id: LanguageServerId(0)
1842 }
1843 );
1844
1845 let buffer = project
1846 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1847 .await
1848 .unwrap();
1849
1850 buffer.update(cx, |buffer, _| {
1851 let snapshot = buffer.snapshot();
1852 let diagnostics = snapshot
1853 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1854 .collect::<Vec<_>>();
1855 assert_eq!(
1856 diagnostics,
1857 &[DiagnosticEntry {
1858 range: Point::new(0, 9)..Point::new(0, 10),
1859 diagnostic: Diagnostic {
1860 severity: lsp::DiagnosticSeverity::ERROR,
1861 message: "undefined variable 'A'".to_string(),
1862 group_id: 0,
1863 is_primary: true,
1864 source_kind: DiagnosticSourceKind::Pushed,
1865 ..Diagnostic::default()
1866 }
1867 }]
1868 )
1869 });
1870
1871 // Ensure publishing empty diagnostics twice only results in one update event.
1872 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1873 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1874 version: None,
1875 diagnostics: Default::default(),
1876 });
1877 assert_eq!(
1878 events.next().await.unwrap(),
1879 Event::DiagnosticsUpdated {
1880 language_server_id: LanguageServerId(0),
1881 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1882 }
1883 );
1884
1885 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1886 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1887 version: None,
1888 diagnostics: Default::default(),
1889 });
1890 cx.executor().run_until_parked();
1891 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1892}
1893
1894#[gpui::test]
1895async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1896 init_test(cx);
1897
1898 let progress_token = "the-progress-token";
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1902
1903 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1904
1905 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1906 language_registry.add(rust_lang());
1907 let mut fake_servers = language_registry.register_fake_lsp(
1908 "Rust",
1909 FakeLspAdapter {
1910 name: "the-language-server",
1911 disk_based_diagnostics_sources: vec!["disk".into()],
1912 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1913 ..FakeLspAdapter::default()
1914 },
1915 );
1916
1917 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1918
1919 let (buffer, _handle) = project
1920 .update(cx, |project, cx| {
1921 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1922 })
1923 .await
1924 .unwrap();
1925 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1926 // Simulate diagnostics starting to update.
1927 let fake_server = fake_servers.next().await.unwrap();
1928 fake_server.start_progress(progress_token).await;
1929
1930 // Restart the server before the diagnostics finish updating.
1931 project.update(cx, |project, cx| {
1932 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1933 });
1934 let mut events = cx.events(&project);
1935
1936 // Simulate the newly started server sending more diagnostics.
1937 let fake_server = fake_servers.next().await.unwrap();
1938 assert_eq!(
1939 events.next().await.unwrap(),
1940 Event::LanguageServerRemoved(LanguageServerId(0))
1941 );
1942 assert_eq!(
1943 events.next().await.unwrap(),
1944 Event::LanguageServerAdded(
1945 LanguageServerId(1),
1946 fake_server.server.name(),
1947 Some(worktree_id)
1948 )
1949 );
1950 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1951 fake_server.start_progress(progress_token).await;
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerBufferRegistered {
1955 server_id: LanguageServerId(1),
1956 buffer_id,
1957 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1958 name: Some(fake_server.server.name())
1959 }
1960 );
1961 assert_eq!(
1962 events.next().await.unwrap(),
1963 Event::DiskBasedDiagnosticsStarted {
1964 language_server_id: LanguageServerId(1)
1965 }
1966 );
1967 project.update(cx, |project, cx| {
1968 assert_eq!(
1969 project
1970 .language_servers_running_disk_based_diagnostics(cx)
1971 .collect::<Vec<_>>(),
1972 [LanguageServerId(1)]
1973 );
1974 });
1975
1976 // All diagnostics are considered done, despite the old server's diagnostic
1977 // task never completing.
1978 fake_server.end_progress(progress_token);
1979 assert_eq!(
1980 events.next().await.unwrap(),
1981 Event::DiskBasedDiagnosticsFinished {
1982 language_server_id: LanguageServerId(1)
1983 }
1984 );
1985 project.update(cx, |project, cx| {
1986 assert_eq!(
1987 project
1988 .language_servers_running_disk_based_diagnostics(cx)
1989 .collect::<Vec<_>>(),
1990 [] as [language::LanguageServerId; 0]
1991 );
1992 });
1993}
1994
1995#[gpui::test]
1996async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1997 init_test(cx);
1998
1999 let fs = FakeFs::new(cx.executor());
2000 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2001
2002 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2003
2004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2005 language_registry.add(rust_lang());
2006 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2007
2008 let (buffer, _) = project
2009 .update(cx, |project, cx| {
2010 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2011 })
2012 .await
2013 .unwrap();
2014
2015 // Publish diagnostics
2016 let fake_server = fake_servers.next().await.unwrap();
2017 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2018 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2019 version: None,
2020 diagnostics: vec![lsp::Diagnostic {
2021 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2022 severity: Some(lsp::DiagnosticSeverity::ERROR),
2023 message: "the message".to_string(),
2024 ..Default::default()
2025 }],
2026 });
2027
2028 cx.executor().run_until_parked();
2029 buffer.update(cx, |buffer, _| {
2030 assert_eq!(
2031 buffer
2032 .snapshot()
2033 .diagnostics_in_range::<_, usize>(0..1, false)
2034 .map(|entry| entry.diagnostic.message)
2035 .collect::<Vec<_>>(),
2036 ["the message".to_string()]
2037 );
2038 });
2039 project.update(cx, |project, cx| {
2040 assert_eq!(
2041 project.diagnostic_summary(false, cx),
2042 DiagnosticSummary {
2043 error_count: 1,
2044 warning_count: 0,
2045 }
2046 );
2047 });
2048
2049 project.update(cx, |project, cx| {
2050 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2051 });
2052
2053 // The diagnostics are cleared.
2054 cx.executor().run_until_parked();
2055 buffer.update(cx, |buffer, _| {
2056 assert_eq!(
2057 buffer
2058 .snapshot()
2059 .diagnostics_in_range::<_, usize>(0..1, false)
2060 .map(|entry| entry.diagnostic.message)
2061 .collect::<Vec<_>>(),
2062 Vec::<String>::new(),
2063 );
2064 });
2065 project.update(cx, |project, cx| {
2066 assert_eq!(
2067 project.diagnostic_summary(false, cx),
2068 DiagnosticSummary {
2069 error_count: 0,
2070 warning_count: 0,
2071 }
2072 );
2073 });
2074}
2075
2076#[gpui::test]
2077async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2078 init_test(cx);
2079
2080 let fs = FakeFs::new(cx.executor());
2081 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2082
2083 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2084 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2085
2086 language_registry.add(rust_lang());
2087 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2088
2089 let (buffer, _handle) = project
2090 .update(cx, |project, cx| {
2091 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2092 })
2093 .await
2094 .unwrap();
2095
2096 // Before restarting the server, report diagnostics with an unknown buffer version.
2097 let fake_server = fake_servers.next().await.unwrap();
2098 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2099 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2100 version: Some(10000),
2101 diagnostics: Vec::new(),
2102 });
2103 cx.executor().run_until_parked();
2104 project.update(cx, |project, cx| {
2105 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2106 });
2107
2108 let mut fake_server = fake_servers.next().await.unwrap();
2109 let notification = fake_server
2110 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2111 .await
2112 .text_document;
2113 assert_eq!(notification.version, 0);
2114}
2115
2116#[gpui::test]
2117async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2118 init_test(cx);
2119
2120 let progress_token = "the-progress-token";
2121
2122 let fs = FakeFs::new(cx.executor());
2123 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2124
2125 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2126
2127 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2128 language_registry.add(rust_lang());
2129 let mut fake_servers = language_registry.register_fake_lsp(
2130 "Rust",
2131 FakeLspAdapter {
2132 name: "the-language-server",
2133 disk_based_diagnostics_sources: vec!["disk".into()],
2134 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2135 ..Default::default()
2136 },
2137 );
2138
2139 let (buffer, _handle) = project
2140 .update(cx, |project, cx| {
2141 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2142 })
2143 .await
2144 .unwrap();
2145
2146 // Simulate diagnostics starting to update.
2147 let mut fake_server = fake_servers.next().await.unwrap();
2148 fake_server
2149 .start_progress_with(
2150 "another-token",
2151 lsp::WorkDoneProgressBegin {
2152 cancellable: Some(false),
2153 ..Default::default()
2154 },
2155 )
2156 .await;
2157 fake_server
2158 .start_progress_with(
2159 progress_token,
2160 lsp::WorkDoneProgressBegin {
2161 cancellable: Some(true),
2162 ..Default::default()
2163 },
2164 )
2165 .await;
2166 cx.executor().run_until_parked();
2167
2168 project.update(cx, |project, cx| {
2169 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2170 });
2171
2172 let cancel_notification = fake_server
2173 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2174 .await;
2175 assert_eq!(
2176 cancel_notification.token,
2177 NumberOrString::String(progress_token.into())
2178 );
2179}
2180
2181#[gpui::test]
2182async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2183 init_test(cx);
2184
2185 let fs = FakeFs::new(cx.executor());
2186 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2187 .await;
2188
2189 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2190 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2191
2192 let mut fake_rust_servers = language_registry.register_fake_lsp(
2193 "Rust",
2194 FakeLspAdapter {
2195 name: "rust-lsp",
2196 ..Default::default()
2197 },
2198 );
2199 let mut fake_js_servers = language_registry.register_fake_lsp(
2200 "JavaScript",
2201 FakeLspAdapter {
2202 name: "js-lsp",
2203 ..Default::default()
2204 },
2205 );
2206 language_registry.add(rust_lang());
2207 language_registry.add(js_lang());
2208
2209 let _rs_buffer = project
2210 .update(cx, |project, cx| {
2211 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2212 })
2213 .await
2214 .unwrap();
2215 let _js_buffer = project
2216 .update(cx, |project, cx| {
2217 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2218 })
2219 .await
2220 .unwrap();
2221
2222 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2223 assert_eq!(
2224 fake_rust_server_1
2225 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2226 .await
2227 .text_document
2228 .uri
2229 .as_str(),
2230 uri!("file:///dir/a.rs")
2231 );
2232
2233 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2234 assert_eq!(
2235 fake_js_server
2236 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2237 .await
2238 .text_document
2239 .uri
2240 .as_str(),
2241 uri!("file:///dir/b.js")
2242 );
2243
2244 // Disable Rust language server, ensuring only that server gets stopped.
2245 cx.update(|cx| {
2246 SettingsStore::update_global(cx, |settings, cx| {
2247 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2248 settings.languages.0.insert(
2249 "Rust".into(),
2250 LanguageSettingsContent {
2251 enable_language_server: Some(false),
2252 ..Default::default()
2253 },
2254 );
2255 });
2256 })
2257 });
2258 fake_rust_server_1
2259 .receive_notification::<lsp::notification::Exit>()
2260 .await;
2261
2262 // Enable Rust and disable JavaScript language servers, ensuring that the
2263 // former gets started again and that the latter stops.
2264 cx.update(|cx| {
2265 SettingsStore::update_global(cx, |settings, cx| {
2266 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2267 settings.languages.0.insert(
2268 LanguageName::new("Rust"),
2269 LanguageSettingsContent {
2270 enable_language_server: Some(true),
2271 ..Default::default()
2272 },
2273 );
2274 settings.languages.0.insert(
2275 LanguageName::new("JavaScript"),
2276 LanguageSettingsContent {
2277 enable_language_server: Some(false),
2278 ..Default::default()
2279 },
2280 );
2281 });
2282 })
2283 });
2284 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2285 assert_eq!(
2286 fake_rust_server_2
2287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2288 .await
2289 .text_document
2290 .uri
2291 .as_str(),
2292 uri!("file:///dir/a.rs")
2293 );
2294 fake_js_server
2295 .receive_notification::<lsp::notification::Exit>()
2296 .await;
2297}
2298
2299#[gpui::test(iterations = 3)]
2300async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2301 init_test(cx);
2302
2303 let text = "
2304 fn a() { A }
2305 fn b() { BB }
2306 fn c() { CCC }
2307 "
2308 .unindent();
2309
2310 let fs = FakeFs::new(cx.executor());
2311 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2312
2313 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2315
2316 language_registry.add(rust_lang());
2317 let mut fake_servers = language_registry.register_fake_lsp(
2318 "Rust",
2319 FakeLspAdapter {
2320 disk_based_diagnostics_sources: vec!["disk".into()],
2321 ..Default::default()
2322 },
2323 );
2324
2325 let buffer = project
2326 .update(cx, |project, cx| {
2327 project.open_local_buffer(path!("/dir/a.rs"), cx)
2328 })
2329 .await
2330 .unwrap();
2331
2332 let _handle = project.update(cx, |project, cx| {
2333 project.register_buffer_with_language_servers(&buffer, cx)
2334 });
2335
2336 let mut fake_server = fake_servers.next().await.unwrap();
2337 let open_notification = fake_server
2338 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2339 .await;
2340
2341 // Edit the buffer, moving the content down
2342 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2343 let change_notification_1 = fake_server
2344 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2345 .await;
2346 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2347
2348 // Report some diagnostics for the initial version of the buffer
2349 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2350 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2351 version: Some(open_notification.text_document.version),
2352 diagnostics: vec![
2353 lsp::Diagnostic {
2354 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2355 severity: Some(DiagnosticSeverity::ERROR),
2356 message: "undefined variable 'A'".to_string(),
2357 source: Some("disk".to_string()),
2358 ..Default::default()
2359 },
2360 lsp::Diagnostic {
2361 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2362 severity: Some(DiagnosticSeverity::ERROR),
2363 message: "undefined variable 'BB'".to_string(),
2364 source: Some("disk".to_string()),
2365 ..Default::default()
2366 },
2367 lsp::Diagnostic {
2368 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2369 severity: Some(DiagnosticSeverity::ERROR),
2370 source: Some("disk".to_string()),
2371 message: "undefined variable 'CCC'".to_string(),
2372 ..Default::default()
2373 },
2374 ],
2375 });
2376
2377 // The diagnostics have moved down since they were created.
2378 cx.executor().run_until_parked();
2379 buffer.update(cx, |buffer, _| {
2380 assert_eq!(
2381 buffer
2382 .snapshot()
2383 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2384 .collect::<Vec<_>>(),
2385 &[
2386 DiagnosticEntry {
2387 range: Point::new(3, 9)..Point::new(3, 11),
2388 diagnostic: Diagnostic {
2389 source: Some("disk".into()),
2390 severity: DiagnosticSeverity::ERROR,
2391 message: "undefined variable 'BB'".to_string(),
2392 is_disk_based: true,
2393 group_id: 1,
2394 is_primary: true,
2395 source_kind: DiagnosticSourceKind::Pushed,
2396 ..Diagnostic::default()
2397 },
2398 },
2399 DiagnosticEntry {
2400 range: Point::new(4, 9)..Point::new(4, 12),
2401 diagnostic: Diagnostic {
2402 source: Some("disk".into()),
2403 severity: DiagnosticSeverity::ERROR,
2404 message: "undefined variable 'CCC'".to_string(),
2405 is_disk_based: true,
2406 group_id: 2,
2407 is_primary: true,
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 }
2411 }
2412 ]
2413 );
2414 assert_eq!(
2415 chunks_with_diagnostics(buffer, 0..buffer.len()),
2416 [
2417 ("\n\nfn a() { ".to_string(), None),
2418 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2419 (" }\nfn b() { ".to_string(), None),
2420 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2421 (" }\nfn c() { ".to_string(), None),
2422 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2423 (" }\n".to_string(), None),
2424 ]
2425 );
2426 assert_eq!(
2427 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2428 [
2429 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2430 (" }\nfn c() { ".to_string(), None),
2431 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2432 ]
2433 );
2434 });
2435
2436 // Ensure overlapping diagnostics are highlighted correctly.
2437 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2438 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2439 version: Some(open_notification.text_document.version),
2440 diagnostics: vec![
2441 lsp::Diagnostic {
2442 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2443 severity: Some(DiagnosticSeverity::ERROR),
2444 message: "undefined variable 'A'".to_string(),
2445 source: Some("disk".to_string()),
2446 ..Default::default()
2447 },
2448 lsp::Diagnostic {
2449 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2450 severity: Some(DiagnosticSeverity::WARNING),
2451 message: "unreachable statement".to_string(),
2452 source: Some("disk".to_string()),
2453 ..Default::default()
2454 },
2455 ],
2456 });
2457
2458 cx.executor().run_until_parked();
2459 buffer.update(cx, |buffer, _| {
2460 assert_eq!(
2461 buffer
2462 .snapshot()
2463 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2464 .collect::<Vec<_>>(),
2465 &[
2466 DiagnosticEntry {
2467 range: Point::new(2, 9)..Point::new(2, 12),
2468 diagnostic: Diagnostic {
2469 source: Some("disk".into()),
2470 severity: DiagnosticSeverity::WARNING,
2471 message: "unreachable statement".to_string(),
2472 is_disk_based: true,
2473 group_id: 4,
2474 is_primary: true,
2475 source_kind: DiagnosticSourceKind::Pushed,
2476 ..Diagnostic::default()
2477 }
2478 },
2479 DiagnosticEntry {
2480 range: Point::new(2, 9)..Point::new(2, 10),
2481 diagnostic: Diagnostic {
2482 source: Some("disk".into()),
2483 severity: DiagnosticSeverity::ERROR,
2484 message: "undefined variable 'A'".to_string(),
2485 is_disk_based: true,
2486 group_id: 3,
2487 is_primary: true,
2488 source_kind: DiagnosticSourceKind::Pushed,
2489 ..Diagnostic::default()
2490 },
2491 }
2492 ]
2493 );
2494 assert_eq!(
2495 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2496 [
2497 ("fn a() { ".to_string(), None),
2498 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2499 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2500 ("\n".to_string(), None),
2501 ]
2502 );
2503 assert_eq!(
2504 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2505 [
2506 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2507 ("\n".to_string(), None),
2508 ]
2509 );
2510 });
2511
2512 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2513 // changes since the last save.
2514 buffer.update(cx, |buffer, cx| {
2515 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2516 buffer.edit(
2517 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2518 None,
2519 cx,
2520 );
2521 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2522 });
2523 let change_notification_2 = fake_server
2524 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2525 .await;
2526 assert!(
2527 change_notification_2.text_document.version > change_notification_1.text_document.version
2528 );
2529
2530 // Handle out-of-order diagnostics
2531 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2532 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2533 version: Some(change_notification_2.text_document.version),
2534 diagnostics: vec![
2535 lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2537 severity: Some(DiagnosticSeverity::ERROR),
2538 message: "undefined variable 'BB'".to_string(),
2539 source: Some("disk".to_string()),
2540 ..Default::default()
2541 },
2542 lsp::Diagnostic {
2543 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2544 severity: Some(DiagnosticSeverity::WARNING),
2545 message: "undefined variable 'A'".to_string(),
2546 source: Some("disk".to_string()),
2547 ..Default::default()
2548 },
2549 ],
2550 });
2551
2552 cx.executor().run_until_parked();
2553 buffer.update(cx, |buffer, _| {
2554 assert_eq!(
2555 buffer
2556 .snapshot()
2557 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2558 .collect::<Vec<_>>(),
2559 &[
2560 DiagnosticEntry {
2561 range: Point::new(2, 21)..Point::new(2, 22),
2562 diagnostic: Diagnostic {
2563 source: Some("disk".into()),
2564 severity: DiagnosticSeverity::WARNING,
2565 message: "undefined variable 'A'".to_string(),
2566 is_disk_based: true,
2567 group_id: 6,
2568 is_primary: true,
2569 source_kind: DiagnosticSourceKind::Pushed,
2570 ..Diagnostic::default()
2571 }
2572 },
2573 DiagnosticEntry {
2574 range: Point::new(3, 9)..Point::new(3, 14),
2575 diagnostic: Diagnostic {
2576 source: Some("disk".into()),
2577 severity: DiagnosticSeverity::ERROR,
2578 message: "undefined variable 'BB'".to_string(),
2579 is_disk_based: true,
2580 group_id: 5,
2581 is_primary: true,
2582 source_kind: DiagnosticSourceKind::Pushed,
2583 ..Diagnostic::default()
2584 },
2585 }
2586 ]
2587 );
2588 });
2589}
2590
2591#[gpui::test]
2592async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2593 init_test(cx);
2594
2595 let text = concat!(
2596 "let one = ;\n", //
2597 "let two = \n",
2598 "let three = 3;\n",
2599 );
2600
2601 let fs = FakeFs::new(cx.executor());
2602 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2603
2604 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2605 let buffer = project
2606 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2607 .await
2608 .unwrap();
2609
2610 project.update(cx, |project, cx| {
2611 project.lsp_store.update(cx, |lsp_store, cx| {
2612 lsp_store
2613 .update_diagnostic_entries(
2614 LanguageServerId(0),
2615 PathBuf::from("/dir/a.rs"),
2616 None,
2617 None,
2618 vec![
2619 DiagnosticEntry {
2620 range: Unclipped(PointUtf16::new(0, 10))
2621 ..Unclipped(PointUtf16::new(0, 10)),
2622 diagnostic: Diagnostic {
2623 severity: DiagnosticSeverity::ERROR,
2624 message: "syntax error 1".to_string(),
2625 source_kind: DiagnosticSourceKind::Pushed,
2626 ..Diagnostic::default()
2627 },
2628 },
2629 DiagnosticEntry {
2630 range: Unclipped(PointUtf16::new(1, 10))
2631 ..Unclipped(PointUtf16::new(1, 10)),
2632 diagnostic: Diagnostic {
2633 severity: DiagnosticSeverity::ERROR,
2634 message: "syntax error 2".to_string(),
2635 source_kind: DiagnosticSourceKind::Pushed,
2636 ..Diagnostic::default()
2637 },
2638 },
2639 ],
2640 cx,
2641 )
2642 .unwrap();
2643 })
2644 });
2645
2646 // An empty range is extended forward to include the following character.
2647 // At the end of a line, an empty range is extended backward to include
2648 // the preceding character.
2649 buffer.update(cx, |buffer, _| {
2650 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2651 assert_eq!(
2652 chunks
2653 .iter()
2654 .map(|(s, d)| (s.as_str(), *d))
2655 .collect::<Vec<_>>(),
2656 &[
2657 ("let one = ", None),
2658 (";", Some(DiagnosticSeverity::ERROR)),
2659 ("\nlet two =", None),
2660 (" ", Some(DiagnosticSeverity::ERROR)),
2661 ("\nlet three = 3;\n", None)
2662 ]
2663 );
2664 });
2665}
2666
2667#[gpui::test]
2668async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2669 init_test(cx);
2670
2671 let fs = FakeFs::new(cx.executor());
2672 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2673 .await;
2674
2675 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2676 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2677
2678 lsp_store.update(cx, |lsp_store, cx| {
2679 lsp_store
2680 .update_diagnostic_entries(
2681 LanguageServerId(0),
2682 Path::new("/dir/a.rs").to_owned(),
2683 None,
2684 None,
2685 vec![DiagnosticEntry {
2686 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2687 diagnostic: Diagnostic {
2688 severity: DiagnosticSeverity::ERROR,
2689 is_primary: true,
2690 message: "syntax error a1".to_string(),
2691 source_kind: DiagnosticSourceKind::Pushed,
2692 ..Diagnostic::default()
2693 },
2694 }],
2695 cx,
2696 )
2697 .unwrap();
2698 lsp_store
2699 .update_diagnostic_entries(
2700 LanguageServerId(1),
2701 Path::new("/dir/a.rs").to_owned(),
2702 None,
2703 None,
2704 vec![DiagnosticEntry {
2705 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2706 diagnostic: Diagnostic {
2707 severity: DiagnosticSeverity::ERROR,
2708 is_primary: true,
2709 message: "syntax error b1".to_string(),
2710 source_kind: DiagnosticSourceKind::Pushed,
2711 ..Diagnostic::default()
2712 },
2713 }],
2714 cx,
2715 )
2716 .unwrap();
2717
2718 assert_eq!(
2719 lsp_store.diagnostic_summary(false, cx),
2720 DiagnosticSummary {
2721 error_count: 2,
2722 warning_count: 0,
2723 }
2724 );
2725 });
2726}
2727
2728#[gpui::test]
2729async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2730 init_test(cx);
2731
2732 let text = "
2733 fn a() {
2734 f1();
2735 }
2736 fn b() {
2737 f2();
2738 }
2739 fn c() {
2740 f3();
2741 }
2742 "
2743 .unindent();
2744
2745 let fs = FakeFs::new(cx.executor());
2746 fs.insert_tree(
2747 path!("/dir"),
2748 json!({
2749 "a.rs": text.clone(),
2750 }),
2751 )
2752 .await;
2753
2754 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2755 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2756
2757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2758 language_registry.add(rust_lang());
2759 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2760
2761 let (buffer, _handle) = project
2762 .update(cx, |project, cx| {
2763 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2764 })
2765 .await
2766 .unwrap();
2767
2768 let mut fake_server = fake_servers.next().await.unwrap();
2769 let lsp_document_version = fake_server
2770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2771 .await
2772 .text_document
2773 .version;
2774
2775 // Simulate editing the buffer after the language server computes some edits.
2776 buffer.update(cx, |buffer, cx| {
2777 buffer.edit(
2778 [(
2779 Point::new(0, 0)..Point::new(0, 0),
2780 "// above first function\n",
2781 )],
2782 None,
2783 cx,
2784 );
2785 buffer.edit(
2786 [(
2787 Point::new(2, 0)..Point::new(2, 0),
2788 " // inside first function\n",
2789 )],
2790 None,
2791 cx,
2792 );
2793 buffer.edit(
2794 [(
2795 Point::new(6, 4)..Point::new(6, 4),
2796 "// inside second function ",
2797 )],
2798 None,
2799 cx,
2800 );
2801
2802 assert_eq!(
2803 buffer.text(),
2804 "
2805 // above first function
2806 fn a() {
2807 // inside first function
2808 f1();
2809 }
2810 fn b() {
2811 // inside second function f2();
2812 }
2813 fn c() {
2814 f3();
2815 }
2816 "
2817 .unindent()
2818 );
2819 });
2820
2821 let edits = lsp_store
2822 .update(cx, |lsp_store, cx| {
2823 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2824 &buffer,
2825 vec![
2826 // replace body of first function
2827 lsp::TextEdit {
2828 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2829 new_text: "
2830 fn a() {
2831 f10();
2832 }
2833 "
2834 .unindent(),
2835 },
2836 // edit inside second function
2837 lsp::TextEdit {
2838 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2839 new_text: "00".into(),
2840 },
2841 // edit inside third function via two distinct edits
2842 lsp::TextEdit {
2843 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2844 new_text: "4000".into(),
2845 },
2846 lsp::TextEdit {
2847 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2848 new_text: "".into(),
2849 },
2850 ],
2851 LanguageServerId(0),
2852 Some(lsp_document_version),
2853 cx,
2854 )
2855 })
2856 .await
2857 .unwrap();
2858
2859 buffer.update(cx, |buffer, cx| {
2860 for (range, new_text) in edits {
2861 buffer.edit([(range, new_text)], None, cx);
2862 }
2863 assert_eq!(
2864 buffer.text(),
2865 "
2866 // above first function
2867 fn a() {
2868 // inside first function
2869 f10();
2870 }
2871 fn b() {
2872 // inside second function f200();
2873 }
2874 fn c() {
2875 f4000();
2876 }
2877 "
2878 .unindent()
2879 );
2880 });
2881}
2882
2883#[gpui::test]
2884async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2885 init_test(cx);
2886
2887 let text = "
2888 use a::b;
2889 use a::c;
2890
2891 fn f() {
2892 b();
2893 c();
2894 }
2895 "
2896 .unindent();
2897
2898 let fs = FakeFs::new(cx.executor());
2899 fs.insert_tree(
2900 path!("/dir"),
2901 json!({
2902 "a.rs": text.clone(),
2903 }),
2904 )
2905 .await;
2906
2907 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2908 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2909 let buffer = project
2910 .update(cx, |project, cx| {
2911 project.open_local_buffer(path!("/dir/a.rs"), cx)
2912 })
2913 .await
2914 .unwrap();
2915
2916 // Simulate the language server sending us a small edit in the form of a very large diff.
2917 // Rust-analyzer does this when performing a merge-imports code action.
2918 let edits = lsp_store
2919 .update(cx, |lsp_store, cx| {
2920 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2921 &buffer,
2922 [
2923 // Replace the first use statement without editing the semicolon.
2924 lsp::TextEdit {
2925 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2926 new_text: "a::{b, c}".into(),
2927 },
2928 // Reinsert the remainder of the file between the semicolon and the final
2929 // newline of the file.
2930 lsp::TextEdit {
2931 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2932 new_text: "\n\n".into(),
2933 },
2934 lsp::TextEdit {
2935 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2936 new_text: "
2937 fn f() {
2938 b();
2939 c();
2940 }"
2941 .unindent(),
2942 },
2943 // Delete everything after the first newline of the file.
2944 lsp::TextEdit {
2945 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2946 new_text: "".into(),
2947 },
2948 ],
2949 LanguageServerId(0),
2950 None,
2951 cx,
2952 )
2953 })
2954 .await
2955 .unwrap();
2956
2957 buffer.update(cx, |buffer, cx| {
2958 let edits = edits
2959 .into_iter()
2960 .map(|(range, text)| {
2961 (
2962 range.start.to_point(buffer)..range.end.to_point(buffer),
2963 text,
2964 )
2965 })
2966 .collect::<Vec<_>>();
2967
2968 assert_eq!(
2969 edits,
2970 [
2971 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2972 (Point::new(1, 0)..Point::new(2, 0), "".into())
2973 ]
2974 );
2975
2976 for (range, new_text) in edits {
2977 buffer.edit([(range, new_text)], None, cx);
2978 }
2979 assert_eq!(
2980 buffer.text(),
2981 "
2982 use a::{b, c};
2983
2984 fn f() {
2985 b();
2986 c();
2987 }
2988 "
2989 .unindent()
2990 );
2991 });
2992}
2993
2994#[gpui::test]
2995async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2996 cx: &mut gpui::TestAppContext,
2997) {
2998 init_test(cx);
2999
3000 let text = "Path()";
3001
3002 let fs = FakeFs::new(cx.executor());
3003 fs.insert_tree(
3004 path!("/dir"),
3005 json!({
3006 "a.rs": text
3007 }),
3008 )
3009 .await;
3010
3011 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3012 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3013 let buffer = project
3014 .update(cx, |project, cx| {
3015 project.open_local_buffer(path!("/dir/a.rs"), cx)
3016 })
3017 .await
3018 .unwrap();
3019
3020 // Simulate the language server sending us a pair of edits at the same location,
3021 // with an insertion following a replacement (which violates the LSP spec).
3022 let edits = lsp_store
3023 .update(cx, |lsp_store, cx| {
3024 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3025 &buffer,
3026 [
3027 lsp::TextEdit {
3028 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3029 new_text: "Path".into(),
3030 },
3031 lsp::TextEdit {
3032 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3033 new_text: "from path import Path\n\n\n".into(),
3034 },
3035 ],
3036 LanguageServerId(0),
3037 None,
3038 cx,
3039 )
3040 })
3041 .await
3042 .unwrap();
3043
3044 buffer.update(cx, |buffer, cx| {
3045 buffer.edit(edits, None, cx);
3046 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3047 });
3048}
3049
3050#[gpui::test]
3051async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3052 init_test(cx);
3053
3054 let text = "
3055 use a::b;
3056 use a::c;
3057
3058 fn f() {
3059 b();
3060 c();
3061 }
3062 "
3063 .unindent();
3064
3065 let fs = FakeFs::new(cx.executor());
3066 fs.insert_tree(
3067 path!("/dir"),
3068 json!({
3069 "a.rs": text.clone(),
3070 }),
3071 )
3072 .await;
3073
3074 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3075 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3076 let buffer = project
3077 .update(cx, |project, cx| {
3078 project.open_local_buffer(path!("/dir/a.rs"), cx)
3079 })
3080 .await
3081 .unwrap();
3082
3083 // Simulate the language server sending us edits in a non-ordered fashion,
3084 // with ranges sometimes being inverted or pointing to invalid locations.
3085 let edits = lsp_store
3086 .update(cx, |lsp_store, cx| {
3087 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3088 &buffer,
3089 [
3090 lsp::TextEdit {
3091 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3092 new_text: "\n\n".into(),
3093 },
3094 lsp::TextEdit {
3095 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3096 new_text: "a::{b, c}".into(),
3097 },
3098 lsp::TextEdit {
3099 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3100 new_text: "".into(),
3101 },
3102 lsp::TextEdit {
3103 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3104 new_text: "
3105 fn f() {
3106 b();
3107 c();
3108 }"
3109 .unindent(),
3110 },
3111 ],
3112 LanguageServerId(0),
3113 None,
3114 cx,
3115 )
3116 })
3117 .await
3118 .unwrap();
3119
3120 buffer.update(cx, |buffer, cx| {
3121 let edits = edits
3122 .into_iter()
3123 .map(|(range, text)| {
3124 (
3125 range.start.to_point(buffer)..range.end.to_point(buffer),
3126 text,
3127 )
3128 })
3129 .collect::<Vec<_>>();
3130
3131 assert_eq!(
3132 edits,
3133 [
3134 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3135 (Point::new(1, 0)..Point::new(2, 0), "".into())
3136 ]
3137 );
3138
3139 for (range, new_text) in edits {
3140 buffer.edit([(range, new_text)], None, cx);
3141 }
3142 assert_eq!(
3143 buffer.text(),
3144 "
3145 use a::{b, c};
3146
3147 fn f() {
3148 b();
3149 c();
3150 }
3151 "
3152 .unindent()
3153 );
3154 });
3155}
3156
3157fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3158 buffer: &Buffer,
3159 range: Range<T>,
3160) -> Vec<(String, Option<DiagnosticSeverity>)> {
3161 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3162 for chunk in buffer.snapshot().chunks(range, true) {
3163 if chunks
3164 .last()
3165 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3166 {
3167 chunks.last_mut().unwrap().0.push_str(chunk.text);
3168 } else {
3169 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3170 }
3171 }
3172 chunks
3173}
3174
3175#[gpui::test(iterations = 10)]
3176async fn test_definition(cx: &mut gpui::TestAppContext) {
3177 init_test(cx);
3178
3179 let fs = FakeFs::new(cx.executor());
3180 fs.insert_tree(
3181 path!("/dir"),
3182 json!({
3183 "a.rs": "const fn a() { A }",
3184 "b.rs": "const y: i32 = crate::a()",
3185 }),
3186 )
3187 .await;
3188
3189 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3190
3191 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3192 language_registry.add(rust_lang());
3193 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3194
3195 let (buffer, _handle) = project
3196 .update(cx, |project, cx| {
3197 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3198 })
3199 .await
3200 .unwrap();
3201
3202 let fake_server = fake_servers.next().await.unwrap();
3203 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3204 let params = params.text_document_position_params;
3205 assert_eq!(
3206 params.text_document.uri.to_file_path().unwrap(),
3207 Path::new(path!("/dir/b.rs")),
3208 );
3209 assert_eq!(params.position, lsp::Position::new(0, 22));
3210
3211 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3212 lsp::Location::new(
3213 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3214 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3215 ),
3216 )))
3217 });
3218 let mut definitions = project
3219 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3220 .await
3221 .unwrap()
3222 .unwrap();
3223
3224 // Assert no new language server started
3225 cx.executor().run_until_parked();
3226 assert!(fake_servers.try_next().is_err());
3227
3228 assert_eq!(definitions.len(), 1);
3229 let definition = definitions.pop().unwrap();
3230 cx.update(|cx| {
3231 let target_buffer = definition.target.buffer.read(cx);
3232 assert_eq!(
3233 target_buffer
3234 .file()
3235 .unwrap()
3236 .as_local()
3237 .unwrap()
3238 .abs_path(cx),
3239 Path::new(path!("/dir/a.rs")),
3240 );
3241 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3242 assert_eq!(
3243 list_worktrees(&project, cx),
3244 [
3245 (path!("/dir/a.rs").as_ref(), false),
3246 (path!("/dir/b.rs").as_ref(), true)
3247 ],
3248 );
3249
3250 drop(definition);
3251 });
3252 cx.update(|cx| {
3253 assert_eq!(
3254 list_worktrees(&project, cx),
3255 [(path!("/dir/b.rs").as_ref(), true)]
3256 );
3257 });
3258
3259 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3260 project
3261 .read(cx)
3262 .worktrees(cx)
3263 .map(|worktree| {
3264 let worktree = worktree.read(cx);
3265 (
3266 worktree.as_local().unwrap().abs_path().as_ref(),
3267 worktree.is_visible(),
3268 )
3269 })
3270 .collect::<Vec<_>>()
3271 }
3272}
3273
3274#[gpui::test]
3275async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3276 init_test(cx);
3277
3278 let fs = FakeFs::new(cx.executor());
3279 fs.insert_tree(
3280 path!("/dir"),
3281 json!({
3282 "a.ts": "",
3283 }),
3284 )
3285 .await;
3286
3287 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3288
3289 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3290 language_registry.add(typescript_lang());
3291 let mut fake_language_servers = language_registry.register_fake_lsp(
3292 "TypeScript",
3293 FakeLspAdapter {
3294 capabilities: lsp::ServerCapabilities {
3295 completion_provider: Some(lsp::CompletionOptions {
3296 trigger_characters: Some(vec![".".to_string()]),
3297 ..Default::default()
3298 }),
3299 ..Default::default()
3300 },
3301 ..Default::default()
3302 },
3303 );
3304
3305 let (buffer, _handle) = project
3306 .update(cx, |p, cx| {
3307 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3308 })
3309 .await
3310 .unwrap();
3311
3312 let fake_server = fake_language_servers.next().await.unwrap();
3313
3314 // When text_edit exists, it takes precedence over insert_text and label
3315 let text = "let a = obj.fqn";
3316 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3317 let completions = project.update(cx, |project, cx| {
3318 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3319 });
3320
3321 fake_server
3322 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3323 Ok(Some(lsp::CompletionResponse::Array(vec![
3324 lsp::CompletionItem {
3325 label: "labelText".into(),
3326 insert_text: Some("insertText".into()),
3327 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3328 range: lsp::Range::new(
3329 lsp::Position::new(0, text.len() as u32 - 3),
3330 lsp::Position::new(0, text.len() as u32),
3331 ),
3332 new_text: "textEditText".into(),
3333 })),
3334 ..Default::default()
3335 },
3336 ])))
3337 })
3338 .next()
3339 .await;
3340
3341 let completions = completions
3342 .await
3343 .unwrap()
3344 .into_iter()
3345 .flat_map(|response| response.completions)
3346 .collect::<Vec<_>>();
3347 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3348
3349 assert_eq!(completions.len(), 1);
3350 assert_eq!(completions[0].new_text, "textEditText");
3351 assert_eq!(
3352 completions[0].replace_range.to_offset(&snapshot),
3353 text.len() - 3..text.len()
3354 );
3355}
3356
3357#[gpui::test]
3358async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3359 init_test(cx);
3360
3361 let fs = FakeFs::new(cx.executor());
3362 fs.insert_tree(
3363 path!("/dir"),
3364 json!({
3365 "a.ts": "",
3366 }),
3367 )
3368 .await;
3369
3370 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3371
3372 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3373 language_registry.add(typescript_lang());
3374 let mut fake_language_servers = language_registry.register_fake_lsp(
3375 "TypeScript",
3376 FakeLspAdapter {
3377 capabilities: lsp::ServerCapabilities {
3378 completion_provider: Some(lsp::CompletionOptions {
3379 trigger_characters: Some(vec![".".to_string()]),
3380 ..Default::default()
3381 }),
3382 ..Default::default()
3383 },
3384 ..Default::default()
3385 },
3386 );
3387
3388 let (buffer, _handle) = project
3389 .update(cx, |p, cx| {
3390 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3391 })
3392 .await
3393 .unwrap();
3394
3395 let fake_server = fake_language_servers.next().await.unwrap();
3396 let text = "let a = obj.fqn";
3397
3398 // Test 1: When text_edit is None but insert_text exists with default edit_range
3399 {
3400 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3401 let completions = project.update(cx, |project, cx| {
3402 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3403 });
3404
3405 fake_server
3406 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3407 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3408 is_incomplete: false,
3409 item_defaults: Some(lsp::CompletionListItemDefaults {
3410 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3411 lsp::Range::new(
3412 lsp::Position::new(0, text.len() as u32 - 3),
3413 lsp::Position::new(0, text.len() as u32),
3414 ),
3415 )),
3416 ..Default::default()
3417 }),
3418 items: vec![lsp::CompletionItem {
3419 label: "labelText".into(),
3420 insert_text: Some("insertText".into()),
3421 text_edit: None,
3422 ..Default::default()
3423 }],
3424 })))
3425 })
3426 .next()
3427 .await;
3428
3429 let completions = completions
3430 .await
3431 .unwrap()
3432 .into_iter()
3433 .flat_map(|response| response.completions)
3434 .collect::<Vec<_>>();
3435 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3436
3437 assert_eq!(completions.len(), 1);
3438 assert_eq!(completions[0].new_text, "insertText");
3439 assert_eq!(
3440 completions[0].replace_range.to_offset(&snapshot),
3441 text.len() - 3..text.len()
3442 );
3443 }
3444
3445 // Test 2: When both text_edit and insert_text are None with default edit_range
3446 {
3447 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3448 let completions = project.update(cx, |project, cx| {
3449 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3450 });
3451
3452 fake_server
3453 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3454 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3455 is_incomplete: false,
3456 item_defaults: Some(lsp::CompletionListItemDefaults {
3457 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3458 lsp::Range::new(
3459 lsp::Position::new(0, text.len() as u32 - 3),
3460 lsp::Position::new(0, text.len() as u32),
3461 ),
3462 )),
3463 ..Default::default()
3464 }),
3465 items: vec![lsp::CompletionItem {
3466 label: "labelText".into(),
3467 insert_text: None,
3468 text_edit: None,
3469 ..Default::default()
3470 }],
3471 })))
3472 })
3473 .next()
3474 .await;
3475
3476 let completions = completions
3477 .await
3478 .unwrap()
3479 .into_iter()
3480 .flat_map(|response| response.completions)
3481 .collect::<Vec<_>>();
3482 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3483
3484 assert_eq!(completions.len(), 1);
3485 assert_eq!(completions[0].new_text, "labelText");
3486 assert_eq!(
3487 completions[0].replace_range.to_offset(&snapshot),
3488 text.len() - 3..text.len()
3489 );
3490 }
3491}
3492
3493#[gpui::test]
3494async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3495 init_test(cx);
3496
3497 let fs = FakeFs::new(cx.executor());
3498 fs.insert_tree(
3499 path!("/dir"),
3500 json!({
3501 "a.ts": "",
3502 }),
3503 )
3504 .await;
3505
3506 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3507
3508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3509 language_registry.add(typescript_lang());
3510 let mut fake_language_servers = language_registry.register_fake_lsp(
3511 "TypeScript",
3512 FakeLspAdapter {
3513 capabilities: lsp::ServerCapabilities {
3514 completion_provider: Some(lsp::CompletionOptions {
3515 trigger_characters: Some(vec![":".to_string()]),
3516 ..Default::default()
3517 }),
3518 ..Default::default()
3519 },
3520 ..Default::default()
3521 },
3522 );
3523
3524 let (buffer, _handle) = project
3525 .update(cx, |p, cx| {
3526 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3527 })
3528 .await
3529 .unwrap();
3530
3531 let fake_server = fake_language_servers.next().await.unwrap();
3532
3533 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3534 let text = "let a = b.fqn";
3535 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3536 let completions = project.update(cx, |project, cx| {
3537 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3538 });
3539
3540 fake_server
3541 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3542 Ok(Some(lsp::CompletionResponse::Array(vec![
3543 lsp::CompletionItem {
3544 label: "fullyQualifiedName?".into(),
3545 insert_text: Some("fullyQualifiedName".into()),
3546 ..Default::default()
3547 },
3548 ])))
3549 })
3550 .next()
3551 .await;
3552 let completions = completions
3553 .await
3554 .unwrap()
3555 .into_iter()
3556 .flat_map(|response| response.completions)
3557 .collect::<Vec<_>>();
3558 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3559 assert_eq!(completions.len(), 1);
3560 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3561 assert_eq!(
3562 completions[0].replace_range.to_offset(&snapshot),
3563 text.len() - 3..text.len()
3564 );
3565
3566 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3567 let text = "let a = \"atoms/cmp\"";
3568 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3569 let completions = project.update(cx, |project, cx| {
3570 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3571 });
3572
3573 fake_server
3574 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3575 Ok(Some(lsp::CompletionResponse::Array(vec![
3576 lsp::CompletionItem {
3577 label: "component".into(),
3578 ..Default::default()
3579 },
3580 ])))
3581 })
3582 .next()
3583 .await;
3584 let completions = completions
3585 .await
3586 .unwrap()
3587 .into_iter()
3588 .flat_map(|response| response.completions)
3589 .collect::<Vec<_>>();
3590 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3591 assert_eq!(completions.len(), 1);
3592 assert_eq!(completions[0].new_text, "component");
3593 assert_eq!(
3594 completions[0].replace_range.to_offset(&snapshot),
3595 text.len() - 4..text.len() - 1
3596 );
3597}
3598
3599#[gpui::test]
3600async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3601 init_test(cx);
3602
3603 let fs = FakeFs::new(cx.executor());
3604 fs.insert_tree(
3605 path!("/dir"),
3606 json!({
3607 "a.ts": "",
3608 }),
3609 )
3610 .await;
3611
3612 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3613
3614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3615 language_registry.add(typescript_lang());
3616 let mut fake_language_servers = language_registry.register_fake_lsp(
3617 "TypeScript",
3618 FakeLspAdapter {
3619 capabilities: lsp::ServerCapabilities {
3620 completion_provider: Some(lsp::CompletionOptions {
3621 trigger_characters: Some(vec![":".to_string()]),
3622 ..Default::default()
3623 }),
3624 ..Default::default()
3625 },
3626 ..Default::default()
3627 },
3628 );
3629
3630 let (buffer, _handle) = project
3631 .update(cx, |p, cx| {
3632 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3633 })
3634 .await
3635 .unwrap();
3636
3637 let fake_server = fake_language_servers.next().await.unwrap();
3638
3639 let text = "let a = b.fqn";
3640 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3641 let completions = project.update(cx, |project, cx| {
3642 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3643 });
3644
3645 fake_server
3646 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3647 Ok(Some(lsp::CompletionResponse::Array(vec![
3648 lsp::CompletionItem {
3649 label: "fullyQualifiedName?".into(),
3650 insert_text: Some("fully\rQualified\r\nName".into()),
3651 ..Default::default()
3652 },
3653 ])))
3654 })
3655 .next()
3656 .await;
3657 let completions = completions
3658 .await
3659 .unwrap()
3660 .into_iter()
3661 .flat_map(|response| response.completions)
3662 .collect::<Vec<_>>();
3663 assert_eq!(completions.len(), 1);
3664 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3665}
3666
3667#[gpui::test(iterations = 10)]
3668async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3669 init_test(cx);
3670
3671 let fs = FakeFs::new(cx.executor());
3672 fs.insert_tree(
3673 path!("/dir"),
3674 json!({
3675 "a.ts": "a",
3676 }),
3677 )
3678 .await;
3679
3680 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3681
3682 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3683 language_registry.add(typescript_lang());
3684 let mut fake_language_servers = language_registry.register_fake_lsp(
3685 "TypeScript",
3686 FakeLspAdapter {
3687 capabilities: lsp::ServerCapabilities {
3688 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3689 lsp::CodeActionOptions {
3690 resolve_provider: Some(true),
3691 ..lsp::CodeActionOptions::default()
3692 },
3693 )),
3694 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3695 commands: vec!["_the/command".to_string()],
3696 ..lsp::ExecuteCommandOptions::default()
3697 }),
3698 ..lsp::ServerCapabilities::default()
3699 },
3700 ..FakeLspAdapter::default()
3701 },
3702 );
3703
3704 let (buffer, _handle) = project
3705 .update(cx, |p, cx| {
3706 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3707 })
3708 .await
3709 .unwrap();
3710
3711 let fake_server = fake_language_servers.next().await.unwrap();
3712
3713 // Language server returns code actions that contain commands, and not edits.
3714 let actions = project.update(cx, |project, cx| {
3715 project.code_actions(&buffer, 0..0, None, cx)
3716 });
3717 fake_server
3718 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3719 Ok(Some(vec![
3720 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3721 title: "The code action".into(),
3722 data: Some(serde_json::json!({
3723 "command": "_the/command",
3724 })),
3725 ..lsp::CodeAction::default()
3726 }),
3727 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3728 title: "two".into(),
3729 ..lsp::CodeAction::default()
3730 }),
3731 ]))
3732 })
3733 .next()
3734 .await;
3735
3736 let action = actions.await.unwrap().unwrap()[0].clone();
3737 let apply = project.update(cx, |project, cx| {
3738 project.apply_code_action(buffer.clone(), action, true, cx)
3739 });
3740
3741 // Resolving the code action does not populate its edits. In absence of
3742 // edits, we must execute the given command.
3743 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3744 |mut action, _| async move {
3745 if action.data.is_some() {
3746 action.command = Some(lsp::Command {
3747 title: "The command".into(),
3748 command: "_the/command".into(),
3749 arguments: Some(vec![json!("the-argument")]),
3750 });
3751 }
3752 Ok(action)
3753 },
3754 );
3755
3756 // While executing the command, the language server sends the editor
3757 // a `workspaceEdit` request.
3758 fake_server
3759 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3760 let fake = fake_server.clone();
3761 move |params, _| {
3762 assert_eq!(params.command, "_the/command");
3763 let fake = fake.clone();
3764 async move {
3765 fake.server
3766 .request::<lsp::request::ApplyWorkspaceEdit>(
3767 lsp::ApplyWorkspaceEditParams {
3768 label: None,
3769 edit: lsp::WorkspaceEdit {
3770 changes: Some(
3771 [(
3772 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3773 vec![lsp::TextEdit {
3774 range: lsp::Range::new(
3775 lsp::Position::new(0, 0),
3776 lsp::Position::new(0, 0),
3777 ),
3778 new_text: "X".into(),
3779 }],
3780 )]
3781 .into_iter()
3782 .collect(),
3783 ),
3784 ..Default::default()
3785 },
3786 },
3787 )
3788 .await
3789 .into_response()
3790 .unwrap();
3791 Ok(Some(json!(null)))
3792 }
3793 }
3794 })
3795 .next()
3796 .await;
3797
3798 // Applying the code action returns a project transaction containing the edits
3799 // sent by the language server in its `workspaceEdit` request.
3800 let transaction = apply.await.unwrap();
3801 assert!(transaction.0.contains_key(&buffer));
3802 buffer.update(cx, |buffer, cx| {
3803 assert_eq!(buffer.text(), "Xa");
3804 buffer.undo(cx);
3805 assert_eq!(buffer.text(), "a");
3806 });
3807}
3808
3809#[gpui::test(iterations = 10)]
3810async fn test_save_file(cx: &mut gpui::TestAppContext) {
3811 init_test(cx);
3812
3813 let fs = FakeFs::new(cx.executor());
3814 fs.insert_tree(
3815 path!("/dir"),
3816 json!({
3817 "file1": "the old contents",
3818 }),
3819 )
3820 .await;
3821
3822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3823 let buffer = project
3824 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3825 .await
3826 .unwrap();
3827 buffer.update(cx, |buffer, cx| {
3828 assert_eq!(buffer.text(), "the old contents");
3829 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3830 });
3831
3832 project
3833 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3834 .await
3835 .unwrap();
3836
3837 let new_text = fs
3838 .load(Path::new(path!("/dir/file1")))
3839 .await
3840 .unwrap()
3841 .replace("\r\n", "\n");
3842 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3843}
3844
3845#[gpui::test(iterations = 10)]
3846async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3847 // Issue: #24349
3848 init_test(cx);
3849
3850 let fs = FakeFs::new(cx.executor());
3851 fs.insert_tree(path!("/dir"), json!({})).await;
3852
3853 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3855
3856 language_registry.add(rust_lang());
3857 let mut fake_rust_servers = language_registry.register_fake_lsp(
3858 "Rust",
3859 FakeLspAdapter {
3860 name: "the-rust-language-server",
3861 capabilities: lsp::ServerCapabilities {
3862 completion_provider: Some(lsp::CompletionOptions {
3863 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3864 ..Default::default()
3865 }),
3866 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3867 lsp::TextDocumentSyncOptions {
3868 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3869 ..Default::default()
3870 },
3871 )),
3872 ..Default::default()
3873 },
3874 ..Default::default()
3875 },
3876 );
3877
3878 let buffer = project
3879 .update(cx, |this, cx| this.create_buffer(false, cx))
3880 .unwrap()
3881 .await;
3882 project.update(cx, |this, cx| {
3883 this.register_buffer_with_language_servers(&buffer, cx);
3884 buffer.update(cx, |buffer, cx| {
3885 assert!(!this.has_language_servers_for(buffer, cx));
3886 })
3887 });
3888
3889 project
3890 .update(cx, |this, cx| {
3891 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3892 this.save_buffer_as(
3893 buffer.clone(),
3894 ProjectPath {
3895 worktree_id,
3896 path: Arc::from("file.rs".as_ref()),
3897 },
3898 cx,
3899 )
3900 })
3901 .await
3902 .unwrap();
3903 // A server is started up, and it is notified about Rust files.
3904 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3905 assert_eq!(
3906 fake_rust_server
3907 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3908 .await
3909 .text_document,
3910 lsp::TextDocumentItem {
3911 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
3912 version: 0,
3913 text: "".to_string(),
3914 language_id: "rust".to_string(),
3915 }
3916 );
3917
3918 project.update(cx, |this, cx| {
3919 buffer.update(cx, |buffer, cx| {
3920 assert!(this.has_language_servers_for(buffer, cx));
3921 })
3922 });
3923}
3924
3925#[gpui::test(iterations = 30)]
3926async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3927 init_test(cx);
3928
3929 let fs = FakeFs::new(cx.executor());
3930 fs.insert_tree(
3931 path!("/dir"),
3932 json!({
3933 "file1": "the original contents",
3934 }),
3935 )
3936 .await;
3937
3938 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3939 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3940 let buffer = project
3941 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3942 .await
3943 .unwrap();
3944
3945 // Simulate buffer diffs being slow, so that they don't complete before
3946 // the next file change occurs.
3947 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3948
3949 // Change the buffer's file on disk, and then wait for the file change
3950 // to be detected by the worktree, so that the buffer starts reloading.
3951 fs.save(
3952 path!("/dir/file1").as_ref(),
3953 &"the first contents".into(),
3954 Default::default(),
3955 )
3956 .await
3957 .unwrap();
3958 worktree.next_event(cx).await;
3959
3960 // Change the buffer's file again. Depending on the random seed, the
3961 // previous file change may still be in progress.
3962 fs.save(
3963 path!("/dir/file1").as_ref(),
3964 &"the second contents".into(),
3965 Default::default(),
3966 )
3967 .await
3968 .unwrap();
3969 worktree.next_event(cx).await;
3970
3971 cx.executor().run_until_parked();
3972 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3973 buffer.read_with(cx, |buffer, _| {
3974 assert_eq!(buffer.text(), on_disk_text);
3975 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3976 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3977 });
3978}
3979
3980#[gpui::test(iterations = 30)]
3981async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3982 init_test(cx);
3983
3984 let fs = FakeFs::new(cx.executor());
3985 fs.insert_tree(
3986 path!("/dir"),
3987 json!({
3988 "file1": "the original contents",
3989 }),
3990 )
3991 .await;
3992
3993 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3994 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3995 let buffer = project
3996 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3997 .await
3998 .unwrap();
3999
4000 // Simulate buffer diffs being slow, so that they don't complete before
4001 // the next file change occurs.
4002 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4003
4004 // Change the buffer's file on disk, and then wait for the file change
4005 // to be detected by the worktree, so that the buffer starts reloading.
4006 fs.save(
4007 path!("/dir/file1").as_ref(),
4008 &"the first contents".into(),
4009 Default::default(),
4010 )
4011 .await
4012 .unwrap();
4013 worktree.next_event(cx).await;
4014
4015 cx.executor()
4016 .spawn(cx.executor().simulate_random_delay())
4017 .await;
4018
4019 // Perform a noop edit, causing the buffer's version to increase.
4020 buffer.update(cx, |buffer, cx| {
4021 buffer.edit([(0..0, " ")], None, cx);
4022 buffer.undo(cx);
4023 });
4024
4025 cx.executor().run_until_parked();
4026 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4027 buffer.read_with(cx, |buffer, _| {
4028 let buffer_text = buffer.text();
4029 if buffer_text == on_disk_text {
4030 assert!(
4031 !buffer.is_dirty() && !buffer.has_conflict(),
4032 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4033 );
4034 }
4035 // If the file change occurred while the buffer was processing the first
4036 // change, the buffer will be in a conflicting state.
4037 else {
4038 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4039 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4040 }
4041 });
4042}
4043
4044#[gpui::test]
4045async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4046 init_test(cx);
4047
4048 let fs = FakeFs::new(cx.executor());
4049 fs.insert_tree(
4050 path!("/dir"),
4051 json!({
4052 "file1": "the old contents",
4053 }),
4054 )
4055 .await;
4056
4057 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4058 let buffer = project
4059 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4060 .await
4061 .unwrap();
4062 buffer.update(cx, |buffer, cx| {
4063 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4064 });
4065
4066 project
4067 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4068 .await
4069 .unwrap();
4070
4071 let new_text = fs
4072 .load(Path::new(path!("/dir/file1")))
4073 .await
4074 .unwrap()
4075 .replace("\r\n", "\n");
4076 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4077}
4078
4079#[gpui::test]
4080async fn test_save_as(cx: &mut gpui::TestAppContext) {
4081 init_test(cx);
4082
4083 let fs = FakeFs::new(cx.executor());
4084 fs.insert_tree("/dir", json!({})).await;
4085
4086 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4087
4088 let languages = project.update(cx, |project, _| project.languages().clone());
4089 languages.add(rust_lang());
4090
4091 let buffer = project.update(cx, |project, cx| {
4092 project.create_local_buffer("", None, false, cx)
4093 });
4094 buffer.update(cx, |buffer, cx| {
4095 buffer.edit([(0..0, "abc")], None, cx);
4096 assert!(buffer.is_dirty());
4097 assert!(!buffer.has_conflict());
4098 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4099 });
4100 project
4101 .update(cx, |project, cx| {
4102 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4103 let path = ProjectPath {
4104 worktree_id,
4105 path: Arc::from(Path::new("file1.rs")),
4106 };
4107 project.save_buffer_as(buffer.clone(), path, cx)
4108 })
4109 .await
4110 .unwrap();
4111 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4112
4113 cx.executor().run_until_parked();
4114 buffer.update(cx, |buffer, cx| {
4115 assert_eq!(
4116 buffer.file().unwrap().full_path(cx),
4117 Path::new("dir/file1.rs")
4118 );
4119 assert!(!buffer.is_dirty());
4120 assert!(!buffer.has_conflict());
4121 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4122 });
4123
4124 let opened_buffer = project
4125 .update(cx, |project, cx| {
4126 project.open_local_buffer("/dir/file1.rs", cx)
4127 })
4128 .await
4129 .unwrap();
4130 assert_eq!(opened_buffer, buffer);
4131}
4132
4133#[gpui::test(retries = 5)]
4134async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4135 use worktree::WorktreeModelHandle as _;
4136
4137 init_test(cx);
4138 cx.executor().allow_parking();
4139
4140 let dir = TempTree::new(json!({
4141 "a": {
4142 "file1": "",
4143 "file2": "",
4144 "file3": "",
4145 },
4146 "b": {
4147 "c": {
4148 "file4": "",
4149 "file5": "",
4150 }
4151 }
4152 }));
4153
4154 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4155
4156 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4157 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4158 async move { buffer.await.unwrap() }
4159 };
4160 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4161 project.update(cx, |project, cx| {
4162 let tree = project.worktrees(cx).next().unwrap();
4163 tree.read(cx)
4164 .entry_for_path(path)
4165 .unwrap_or_else(|| panic!("no entry for path {}", path))
4166 .id
4167 })
4168 };
4169
4170 let buffer2 = buffer_for_path("a/file2", cx).await;
4171 let buffer3 = buffer_for_path("a/file3", cx).await;
4172 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4173 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4174
4175 let file2_id = id_for_path("a/file2", cx);
4176 let file3_id = id_for_path("a/file3", cx);
4177 let file4_id = id_for_path("b/c/file4", cx);
4178
4179 // Create a remote copy of this worktree.
4180 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4181 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4182
4183 let updates = Arc::new(Mutex::new(Vec::new()));
4184 tree.update(cx, |tree, cx| {
4185 let updates = updates.clone();
4186 tree.observe_updates(0, cx, move |update| {
4187 updates.lock().push(update);
4188 async { true }
4189 });
4190 });
4191
4192 let remote =
4193 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
4194
4195 cx.executor().run_until_parked();
4196
4197 cx.update(|cx| {
4198 assert!(!buffer2.read(cx).is_dirty());
4199 assert!(!buffer3.read(cx).is_dirty());
4200 assert!(!buffer4.read(cx).is_dirty());
4201 assert!(!buffer5.read(cx).is_dirty());
4202 });
4203
4204 // Rename and delete files and directories.
4205 tree.flush_fs_events(cx).await;
4206 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4207 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4208 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4209 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4210 tree.flush_fs_events(cx).await;
4211
4212 cx.update(|app| {
4213 assert_eq!(
4214 tree.read(app)
4215 .paths()
4216 .map(|p| p.to_str().unwrap())
4217 .collect::<Vec<_>>(),
4218 vec![
4219 "a",
4220 path!("a/file1"),
4221 path!("a/file2.new"),
4222 "b",
4223 "d",
4224 path!("d/file3"),
4225 path!("d/file4"),
4226 ]
4227 );
4228 });
4229
4230 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4231 assert_eq!(id_for_path("d/file3", cx), file3_id);
4232 assert_eq!(id_for_path("d/file4", cx), file4_id);
4233
4234 cx.update(|cx| {
4235 assert_eq!(
4236 buffer2.read(cx).file().unwrap().path().as_ref(),
4237 Path::new("a/file2.new")
4238 );
4239 assert_eq!(
4240 buffer3.read(cx).file().unwrap().path().as_ref(),
4241 Path::new("d/file3")
4242 );
4243 assert_eq!(
4244 buffer4.read(cx).file().unwrap().path().as_ref(),
4245 Path::new("d/file4")
4246 );
4247 assert_eq!(
4248 buffer5.read(cx).file().unwrap().path().as_ref(),
4249 Path::new("b/c/file5")
4250 );
4251
4252 assert_matches!(
4253 buffer2.read(cx).file().unwrap().disk_state(),
4254 DiskState::Present { .. }
4255 );
4256 assert_matches!(
4257 buffer3.read(cx).file().unwrap().disk_state(),
4258 DiskState::Present { .. }
4259 );
4260 assert_matches!(
4261 buffer4.read(cx).file().unwrap().disk_state(),
4262 DiskState::Present { .. }
4263 );
4264 assert_eq!(
4265 buffer5.read(cx).file().unwrap().disk_state(),
4266 DiskState::Deleted
4267 );
4268 });
4269
4270 // Update the remote worktree. Check that it becomes consistent with the
4271 // local worktree.
4272 cx.executor().run_until_parked();
4273
4274 remote.update(cx, |remote, _| {
4275 for update in updates.lock().drain(..) {
4276 remote.as_remote_mut().unwrap().update_from_remote(update);
4277 }
4278 });
4279 cx.executor().run_until_parked();
4280 remote.update(cx, |remote, _| {
4281 assert_eq!(
4282 remote
4283 .paths()
4284 .map(|p| p.to_str().unwrap())
4285 .collect::<Vec<_>>(),
4286 vec![
4287 "a",
4288 path!("a/file1"),
4289 path!("a/file2.new"),
4290 "b",
4291 "d",
4292 path!("d/file3"),
4293 path!("d/file4"),
4294 ]
4295 );
4296 });
4297}
4298
4299#[gpui::test(iterations = 10)]
4300async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4301 init_test(cx);
4302
4303 let fs = FakeFs::new(cx.executor());
4304 fs.insert_tree(
4305 path!("/dir"),
4306 json!({
4307 "a": {
4308 "file1": "",
4309 }
4310 }),
4311 )
4312 .await;
4313
4314 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4315 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4316 let tree_id = tree.update(cx, |tree, _| tree.id());
4317
4318 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4319 project.update(cx, |project, cx| {
4320 let tree = project.worktrees(cx).next().unwrap();
4321 tree.read(cx)
4322 .entry_for_path(path)
4323 .unwrap_or_else(|| panic!("no entry for path {}", path))
4324 .id
4325 })
4326 };
4327
4328 let dir_id = id_for_path("a", cx);
4329 let file_id = id_for_path("a/file1", cx);
4330 let buffer = project
4331 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4332 .await
4333 .unwrap();
4334 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4335
4336 project
4337 .update(cx, |project, cx| {
4338 project.rename_entry(dir_id, Path::new("b"), cx)
4339 })
4340 .unwrap()
4341 .await
4342 .into_included()
4343 .unwrap();
4344 cx.executor().run_until_parked();
4345
4346 assert_eq!(id_for_path("b", cx), dir_id);
4347 assert_eq!(id_for_path("b/file1", cx), file_id);
4348 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4349}
4350
4351#[gpui::test]
4352async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4353 init_test(cx);
4354
4355 let fs = FakeFs::new(cx.executor());
4356 fs.insert_tree(
4357 "/dir",
4358 json!({
4359 "a.txt": "a-contents",
4360 "b.txt": "b-contents",
4361 }),
4362 )
4363 .await;
4364
4365 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4366
4367 // Spawn multiple tasks to open paths, repeating some paths.
4368 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4369 (
4370 p.open_local_buffer("/dir/a.txt", cx),
4371 p.open_local_buffer("/dir/b.txt", cx),
4372 p.open_local_buffer("/dir/a.txt", cx),
4373 )
4374 });
4375
4376 let buffer_a_1 = buffer_a_1.await.unwrap();
4377 let buffer_a_2 = buffer_a_2.await.unwrap();
4378 let buffer_b = buffer_b.await.unwrap();
4379 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4380 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4381
4382 // There is only one buffer per path.
4383 let buffer_a_id = buffer_a_1.entity_id();
4384 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4385
4386 // Open the same path again while it is still open.
4387 drop(buffer_a_1);
4388 let buffer_a_3 = project
4389 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4390 .await
4391 .unwrap();
4392
4393 // There's still only one buffer per path.
4394 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4395}
4396
4397#[gpui::test]
4398async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4399 init_test(cx);
4400
4401 let fs = FakeFs::new(cx.executor());
4402 fs.insert_tree(
4403 path!("/dir"),
4404 json!({
4405 "file1": "abc",
4406 "file2": "def",
4407 "file3": "ghi",
4408 }),
4409 )
4410 .await;
4411
4412 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4413
4414 let buffer1 = project
4415 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4416 .await
4417 .unwrap();
4418 let events = Arc::new(Mutex::new(Vec::new()));
4419
4420 // initially, the buffer isn't dirty.
4421 buffer1.update(cx, |buffer, cx| {
4422 cx.subscribe(&buffer1, {
4423 let events = events.clone();
4424 move |_, _, event, _| match event {
4425 BufferEvent::Operation { .. } => {}
4426 _ => events.lock().push(event.clone()),
4427 }
4428 })
4429 .detach();
4430
4431 assert!(!buffer.is_dirty());
4432 assert!(events.lock().is_empty());
4433
4434 buffer.edit([(1..2, "")], None, cx);
4435 });
4436
4437 // after the first edit, the buffer is dirty, and emits a dirtied event.
4438 buffer1.update(cx, |buffer, cx| {
4439 assert!(buffer.text() == "ac");
4440 assert!(buffer.is_dirty());
4441 assert_eq!(
4442 *events.lock(),
4443 &[
4444 language::BufferEvent::Edited,
4445 language::BufferEvent::DirtyChanged
4446 ]
4447 );
4448 events.lock().clear();
4449 buffer.did_save(
4450 buffer.version(),
4451 buffer.file().unwrap().disk_state().mtime(),
4452 cx,
4453 );
4454 });
4455
4456 // after saving, the buffer is not dirty, and emits a saved event.
4457 buffer1.update(cx, |buffer, cx| {
4458 assert!(!buffer.is_dirty());
4459 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4460 events.lock().clear();
4461
4462 buffer.edit([(1..1, "B")], None, cx);
4463 buffer.edit([(2..2, "D")], None, cx);
4464 });
4465
4466 // after editing again, the buffer is dirty, and emits another dirty event.
4467 buffer1.update(cx, |buffer, cx| {
4468 assert!(buffer.text() == "aBDc");
4469 assert!(buffer.is_dirty());
4470 assert_eq!(
4471 *events.lock(),
4472 &[
4473 language::BufferEvent::Edited,
4474 language::BufferEvent::DirtyChanged,
4475 language::BufferEvent::Edited,
4476 ],
4477 );
4478 events.lock().clear();
4479
4480 // After restoring the buffer to its previously-saved state,
4481 // the buffer is not considered dirty anymore.
4482 buffer.edit([(1..3, "")], None, cx);
4483 assert!(buffer.text() == "ac");
4484 assert!(!buffer.is_dirty());
4485 });
4486
4487 assert_eq!(
4488 *events.lock(),
4489 &[
4490 language::BufferEvent::Edited,
4491 language::BufferEvent::DirtyChanged
4492 ]
4493 );
4494
4495 // When a file is deleted, it is not considered dirty.
4496 let events = Arc::new(Mutex::new(Vec::new()));
4497 let buffer2 = project
4498 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4499 .await
4500 .unwrap();
4501 buffer2.update(cx, |_, cx| {
4502 cx.subscribe(&buffer2, {
4503 let events = events.clone();
4504 move |_, _, event, _| match event {
4505 BufferEvent::Operation { .. } => {}
4506 _ => events.lock().push(event.clone()),
4507 }
4508 })
4509 .detach();
4510 });
4511
4512 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4513 .await
4514 .unwrap();
4515 cx.executor().run_until_parked();
4516 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4517 assert_eq!(
4518 mem::take(&mut *events.lock()),
4519 &[language::BufferEvent::FileHandleChanged]
4520 );
4521
4522 // Buffer becomes dirty when edited.
4523 buffer2.update(cx, |buffer, cx| {
4524 buffer.edit([(2..3, "")], None, cx);
4525 assert_eq!(buffer.is_dirty(), true);
4526 });
4527 assert_eq!(
4528 mem::take(&mut *events.lock()),
4529 &[
4530 language::BufferEvent::Edited,
4531 language::BufferEvent::DirtyChanged
4532 ]
4533 );
4534
4535 // Buffer becomes clean again when all of its content is removed, because
4536 // the file was deleted.
4537 buffer2.update(cx, |buffer, cx| {
4538 buffer.edit([(0..2, "")], None, cx);
4539 assert_eq!(buffer.is_empty(), true);
4540 assert_eq!(buffer.is_dirty(), false);
4541 });
4542 assert_eq!(
4543 *events.lock(),
4544 &[
4545 language::BufferEvent::Edited,
4546 language::BufferEvent::DirtyChanged
4547 ]
4548 );
4549
4550 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4551 let events = Arc::new(Mutex::new(Vec::new()));
4552 let buffer3 = project
4553 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4554 .await
4555 .unwrap();
4556 buffer3.update(cx, |_, cx| {
4557 cx.subscribe(&buffer3, {
4558 let events = events.clone();
4559 move |_, _, event, _| match event {
4560 BufferEvent::Operation { .. } => {}
4561 _ => events.lock().push(event.clone()),
4562 }
4563 })
4564 .detach();
4565 });
4566
4567 buffer3.update(cx, |buffer, cx| {
4568 buffer.edit([(0..0, "x")], None, cx);
4569 });
4570 events.lock().clear();
4571 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4572 .await
4573 .unwrap();
4574 cx.executor().run_until_parked();
4575 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4576 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4577}
4578
4579#[gpui::test]
4580async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4581 init_test(cx);
4582
4583 let (initial_contents, initial_offsets) =
4584 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4585 let fs = FakeFs::new(cx.executor());
4586 fs.insert_tree(
4587 path!("/dir"),
4588 json!({
4589 "the-file": initial_contents,
4590 }),
4591 )
4592 .await;
4593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4594 let buffer = project
4595 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4596 .await
4597 .unwrap();
4598
4599 let anchors = initial_offsets
4600 .iter()
4601 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4602 .collect::<Vec<_>>();
4603
4604 // Change the file on disk, adding two new lines of text, and removing
4605 // one line.
4606 buffer.update(cx, |buffer, _| {
4607 assert!(!buffer.is_dirty());
4608 assert!(!buffer.has_conflict());
4609 });
4610
4611 let (new_contents, new_offsets) =
4612 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4613 fs.save(
4614 path!("/dir/the-file").as_ref(),
4615 &new_contents.as_str().into(),
4616 LineEnding::Unix,
4617 )
4618 .await
4619 .unwrap();
4620
4621 // Because the buffer was not modified, it is reloaded from disk. Its
4622 // contents are edited according to the diff between the old and new
4623 // file contents.
4624 cx.executor().run_until_parked();
4625 buffer.update(cx, |buffer, _| {
4626 assert_eq!(buffer.text(), new_contents);
4627 assert!(!buffer.is_dirty());
4628 assert!(!buffer.has_conflict());
4629
4630 let anchor_offsets = anchors
4631 .iter()
4632 .map(|anchor| anchor.to_offset(&*buffer))
4633 .collect::<Vec<_>>();
4634 assert_eq!(anchor_offsets, new_offsets);
4635 });
4636
4637 // Modify the buffer
4638 buffer.update(cx, |buffer, cx| {
4639 buffer.edit([(0..0, " ")], None, cx);
4640 assert!(buffer.is_dirty());
4641 assert!(!buffer.has_conflict());
4642 });
4643
4644 // Change the file on disk again, adding blank lines to the beginning.
4645 fs.save(
4646 path!("/dir/the-file").as_ref(),
4647 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4648 LineEnding::Unix,
4649 )
4650 .await
4651 .unwrap();
4652
4653 // Because the buffer is modified, it doesn't reload from disk, but is
4654 // marked as having a conflict.
4655 cx.executor().run_until_parked();
4656 buffer.update(cx, |buffer, _| {
4657 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4658 assert!(buffer.has_conflict());
4659 });
4660}
4661
4662#[gpui::test]
4663async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4664 init_test(cx);
4665
4666 let fs = FakeFs::new(cx.executor());
4667 fs.insert_tree(
4668 path!("/dir"),
4669 json!({
4670 "file1": "a\nb\nc\n",
4671 "file2": "one\r\ntwo\r\nthree\r\n",
4672 }),
4673 )
4674 .await;
4675
4676 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4677 let buffer1 = project
4678 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4679 .await
4680 .unwrap();
4681 let buffer2 = project
4682 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4683 .await
4684 .unwrap();
4685
4686 buffer1.update(cx, |buffer, _| {
4687 assert_eq!(buffer.text(), "a\nb\nc\n");
4688 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4689 });
4690 buffer2.update(cx, |buffer, _| {
4691 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4692 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4693 });
4694
4695 // Change a file's line endings on disk from unix to windows. The buffer's
4696 // state updates correctly.
4697 fs.save(
4698 path!("/dir/file1").as_ref(),
4699 &"aaa\nb\nc\n".into(),
4700 LineEnding::Windows,
4701 )
4702 .await
4703 .unwrap();
4704 cx.executor().run_until_parked();
4705 buffer1.update(cx, |buffer, _| {
4706 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4707 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4708 });
4709
4710 // Save a file with windows line endings. The file is written correctly.
4711 buffer2.update(cx, |buffer, cx| {
4712 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4713 });
4714 project
4715 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4716 .await
4717 .unwrap();
4718 assert_eq!(
4719 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4720 "one\r\ntwo\r\nthree\r\nfour\r\n",
4721 );
4722}
4723
4724#[gpui::test]
4725async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4726 init_test(cx);
4727
4728 let fs = FakeFs::new(cx.executor());
4729 fs.insert_tree(
4730 path!("/dir"),
4731 json!({
4732 "a.rs": "
4733 fn foo(mut v: Vec<usize>) {
4734 for x in &v {
4735 v.push(1);
4736 }
4737 }
4738 "
4739 .unindent(),
4740 }),
4741 )
4742 .await;
4743
4744 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4745 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4746 let buffer = project
4747 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4748 .await
4749 .unwrap();
4750
4751 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4752 let message = lsp::PublishDiagnosticsParams {
4753 uri: buffer_uri.clone(),
4754 diagnostics: vec![
4755 lsp::Diagnostic {
4756 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4757 severity: Some(DiagnosticSeverity::WARNING),
4758 message: "error 1".to_string(),
4759 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4760 location: lsp::Location {
4761 uri: buffer_uri.clone(),
4762 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4763 },
4764 message: "error 1 hint 1".to_string(),
4765 }]),
4766 ..Default::default()
4767 },
4768 lsp::Diagnostic {
4769 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4770 severity: Some(DiagnosticSeverity::HINT),
4771 message: "error 1 hint 1".to_string(),
4772 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4773 location: lsp::Location {
4774 uri: buffer_uri.clone(),
4775 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4776 },
4777 message: "original diagnostic".to_string(),
4778 }]),
4779 ..Default::default()
4780 },
4781 lsp::Diagnostic {
4782 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4783 severity: Some(DiagnosticSeverity::ERROR),
4784 message: "error 2".to_string(),
4785 related_information: Some(vec![
4786 lsp::DiagnosticRelatedInformation {
4787 location: lsp::Location {
4788 uri: buffer_uri.clone(),
4789 range: lsp::Range::new(
4790 lsp::Position::new(1, 13),
4791 lsp::Position::new(1, 15),
4792 ),
4793 },
4794 message: "error 2 hint 1".to_string(),
4795 },
4796 lsp::DiagnosticRelatedInformation {
4797 location: lsp::Location {
4798 uri: buffer_uri.clone(),
4799 range: lsp::Range::new(
4800 lsp::Position::new(1, 13),
4801 lsp::Position::new(1, 15),
4802 ),
4803 },
4804 message: "error 2 hint 2".to_string(),
4805 },
4806 ]),
4807 ..Default::default()
4808 },
4809 lsp::Diagnostic {
4810 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4811 severity: Some(DiagnosticSeverity::HINT),
4812 message: "error 2 hint 1".to_string(),
4813 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4814 location: lsp::Location {
4815 uri: buffer_uri.clone(),
4816 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4817 },
4818 message: "original diagnostic".to_string(),
4819 }]),
4820 ..Default::default()
4821 },
4822 lsp::Diagnostic {
4823 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4824 severity: Some(DiagnosticSeverity::HINT),
4825 message: "error 2 hint 2".to_string(),
4826 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4827 location: lsp::Location {
4828 uri: buffer_uri,
4829 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4830 },
4831 message: "original diagnostic".to_string(),
4832 }]),
4833 ..Default::default()
4834 },
4835 ],
4836 version: None,
4837 };
4838
4839 lsp_store
4840 .update(cx, |lsp_store, cx| {
4841 lsp_store.update_diagnostics(
4842 LanguageServerId(0),
4843 message,
4844 None,
4845 DiagnosticSourceKind::Pushed,
4846 &[],
4847 cx,
4848 )
4849 })
4850 .unwrap();
4851 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4852
4853 assert_eq!(
4854 buffer
4855 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4856 .collect::<Vec<_>>(),
4857 &[
4858 DiagnosticEntry {
4859 range: Point::new(1, 8)..Point::new(1, 9),
4860 diagnostic: Diagnostic {
4861 severity: DiagnosticSeverity::WARNING,
4862 message: "error 1".to_string(),
4863 group_id: 1,
4864 is_primary: true,
4865 source_kind: DiagnosticSourceKind::Pushed,
4866 ..Diagnostic::default()
4867 }
4868 },
4869 DiagnosticEntry {
4870 range: Point::new(1, 8)..Point::new(1, 9),
4871 diagnostic: Diagnostic {
4872 severity: DiagnosticSeverity::HINT,
4873 message: "error 1 hint 1".to_string(),
4874 group_id: 1,
4875 is_primary: false,
4876 source_kind: DiagnosticSourceKind::Pushed,
4877 ..Diagnostic::default()
4878 }
4879 },
4880 DiagnosticEntry {
4881 range: Point::new(1, 13)..Point::new(1, 15),
4882 diagnostic: Diagnostic {
4883 severity: DiagnosticSeverity::HINT,
4884 message: "error 2 hint 1".to_string(),
4885 group_id: 0,
4886 is_primary: false,
4887 source_kind: DiagnosticSourceKind::Pushed,
4888 ..Diagnostic::default()
4889 }
4890 },
4891 DiagnosticEntry {
4892 range: Point::new(1, 13)..Point::new(1, 15),
4893 diagnostic: Diagnostic {
4894 severity: DiagnosticSeverity::HINT,
4895 message: "error 2 hint 2".to_string(),
4896 group_id: 0,
4897 is_primary: false,
4898 source_kind: DiagnosticSourceKind::Pushed,
4899 ..Diagnostic::default()
4900 }
4901 },
4902 DiagnosticEntry {
4903 range: Point::new(2, 8)..Point::new(2, 17),
4904 diagnostic: Diagnostic {
4905 severity: DiagnosticSeverity::ERROR,
4906 message: "error 2".to_string(),
4907 group_id: 0,
4908 is_primary: true,
4909 source_kind: DiagnosticSourceKind::Pushed,
4910 ..Diagnostic::default()
4911 }
4912 }
4913 ]
4914 );
4915
4916 assert_eq!(
4917 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4918 &[
4919 DiagnosticEntry {
4920 range: Point::new(1, 13)..Point::new(1, 15),
4921 diagnostic: Diagnostic {
4922 severity: DiagnosticSeverity::HINT,
4923 message: "error 2 hint 1".to_string(),
4924 group_id: 0,
4925 is_primary: false,
4926 source_kind: DiagnosticSourceKind::Pushed,
4927 ..Diagnostic::default()
4928 }
4929 },
4930 DiagnosticEntry {
4931 range: Point::new(1, 13)..Point::new(1, 15),
4932 diagnostic: Diagnostic {
4933 severity: DiagnosticSeverity::HINT,
4934 message: "error 2 hint 2".to_string(),
4935 group_id: 0,
4936 is_primary: false,
4937 source_kind: DiagnosticSourceKind::Pushed,
4938 ..Diagnostic::default()
4939 }
4940 },
4941 DiagnosticEntry {
4942 range: Point::new(2, 8)..Point::new(2, 17),
4943 diagnostic: Diagnostic {
4944 severity: DiagnosticSeverity::ERROR,
4945 message: "error 2".to_string(),
4946 group_id: 0,
4947 is_primary: true,
4948 source_kind: DiagnosticSourceKind::Pushed,
4949 ..Diagnostic::default()
4950 }
4951 }
4952 ]
4953 );
4954
4955 assert_eq!(
4956 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4957 &[
4958 DiagnosticEntry {
4959 range: Point::new(1, 8)..Point::new(1, 9),
4960 diagnostic: Diagnostic {
4961 severity: DiagnosticSeverity::WARNING,
4962 message: "error 1".to_string(),
4963 group_id: 1,
4964 is_primary: true,
4965 source_kind: DiagnosticSourceKind::Pushed,
4966 ..Diagnostic::default()
4967 }
4968 },
4969 DiagnosticEntry {
4970 range: Point::new(1, 8)..Point::new(1, 9),
4971 diagnostic: Diagnostic {
4972 severity: DiagnosticSeverity::HINT,
4973 message: "error 1 hint 1".to_string(),
4974 group_id: 1,
4975 is_primary: false,
4976 source_kind: DiagnosticSourceKind::Pushed,
4977 ..Diagnostic::default()
4978 }
4979 },
4980 ]
4981 );
4982}
4983
4984#[gpui::test]
4985async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4986 init_test(cx);
4987
4988 let fs = FakeFs::new(cx.executor());
4989 fs.insert_tree(
4990 path!("/dir"),
4991 json!({
4992 "one.rs": "const ONE: usize = 1;",
4993 "two": {
4994 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4995 }
4996
4997 }),
4998 )
4999 .await;
5000 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5001
5002 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5003 language_registry.add(rust_lang());
5004 let watched_paths = lsp::FileOperationRegistrationOptions {
5005 filters: vec![
5006 FileOperationFilter {
5007 scheme: Some("file".to_owned()),
5008 pattern: lsp::FileOperationPattern {
5009 glob: "**/*.rs".to_owned(),
5010 matches: Some(lsp::FileOperationPatternKind::File),
5011 options: None,
5012 },
5013 },
5014 FileOperationFilter {
5015 scheme: Some("file".to_owned()),
5016 pattern: lsp::FileOperationPattern {
5017 glob: "**/**".to_owned(),
5018 matches: Some(lsp::FileOperationPatternKind::Folder),
5019 options: None,
5020 },
5021 },
5022 ],
5023 };
5024 let mut fake_servers = language_registry.register_fake_lsp(
5025 "Rust",
5026 FakeLspAdapter {
5027 capabilities: lsp::ServerCapabilities {
5028 workspace: Some(lsp::WorkspaceServerCapabilities {
5029 workspace_folders: None,
5030 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5031 did_rename: Some(watched_paths.clone()),
5032 will_rename: Some(watched_paths),
5033 ..Default::default()
5034 }),
5035 }),
5036 ..Default::default()
5037 },
5038 ..Default::default()
5039 },
5040 );
5041
5042 let _ = project
5043 .update(cx, |project, cx| {
5044 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5045 })
5046 .await
5047 .unwrap();
5048
5049 let fake_server = fake_servers.next().await.unwrap();
5050 let response = project.update(cx, |project, cx| {
5051 let worktree = project.worktrees(cx).next().unwrap();
5052 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
5053 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
5054 });
5055 let expected_edit = lsp::WorkspaceEdit {
5056 changes: None,
5057 document_changes: Some(DocumentChanges::Edits({
5058 vec![TextDocumentEdit {
5059 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5060 range: lsp::Range {
5061 start: lsp::Position {
5062 line: 0,
5063 character: 1,
5064 },
5065 end: lsp::Position {
5066 line: 0,
5067 character: 3,
5068 },
5069 },
5070 new_text: "This is not a drill".to_owned(),
5071 })],
5072 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5073 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5074 version: Some(1337),
5075 },
5076 }]
5077 })),
5078 change_annotations: None,
5079 };
5080 let resolved_workspace_edit = Arc::new(OnceLock::new());
5081 fake_server
5082 .set_request_handler::<WillRenameFiles, _, _>({
5083 let resolved_workspace_edit = resolved_workspace_edit.clone();
5084 let expected_edit = expected_edit.clone();
5085 move |params, _| {
5086 let resolved_workspace_edit = resolved_workspace_edit.clone();
5087 let expected_edit = expected_edit.clone();
5088 async move {
5089 assert_eq!(params.files.len(), 1);
5090 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5091 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5092 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5093 Ok(Some(expected_edit))
5094 }
5095 }
5096 })
5097 .next()
5098 .await
5099 .unwrap();
5100 let _ = response.await.unwrap();
5101 fake_server
5102 .handle_notification::<DidRenameFiles, _>(|params, _| {
5103 assert_eq!(params.files.len(), 1);
5104 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5105 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5106 })
5107 .next()
5108 .await
5109 .unwrap();
5110 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5111}
5112
5113#[gpui::test]
5114async fn test_rename(cx: &mut gpui::TestAppContext) {
5115 // hi
5116 init_test(cx);
5117
5118 let fs = FakeFs::new(cx.executor());
5119 fs.insert_tree(
5120 path!("/dir"),
5121 json!({
5122 "one.rs": "const ONE: usize = 1;",
5123 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5124 }),
5125 )
5126 .await;
5127
5128 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5129
5130 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5131 language_registry.add(rust_lang());
5132 let mut fake_servers = language_registry.register_fake_lsp(
5133 "Rust",
5134 FakeLspAdapter {
5135 capabilities: lsp::ServerCapabilities {
5136 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5137 prepare_provider: Some(true),
5138 work_done_progress_options: Default::default(),
5139 })),
5140 ..Default::default()
5141 },
5142 ..Default::default()
5143 },
5144 );
5145
5146 let (buffer, _handle) = project
5147 .update(cx, |project, cx| {
5148 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5149 })
5150 .await
5151 .unwrap();
5152
5153 let fake_server = fake_servers.next().await.unwrap();
5154
5155 let response = project.update(cx, |project, cx| {
5156 project.prepare_rename(buffer.clone(), 7, cx)
5157 });
5158 fake_server
5159 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5160 assert_eq!(
5161 params.text_document.uri.as_str(),
5162 uri!("file:///dir/one.rs")
5163 );
5164 assert_eq!(params.position, lsp::Position::new(0, 7));
5165 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5166 lsp::Position::new(0, 6),
5167 lsp::Position::new(0, 9),
5168 ))))
5169 })
5170 .next()
5171 .await
5172 .unwrap();
5173 let response = response.await.unwrap();
5174 let PrepareRenameResponse::Success(range) = response else {
5175 panic!("{:?}", response);
5176 };
5177 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5178 assert_eq!(range, 6..9);
5179
5180 let response = project.update(cx, |project, cx| {
5181 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5182 });
5183 fake_server
5184 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5185 assert_eq!(
5186 params.text_document_position.text_document.uri.as_str(),
5187 uri!("file:///dir/one.rs")
5188 );
5189 assert_eq!(
5190 params.text_document_position.position,
5191 lsp::Position::new(0, 7)
5192 );
5193 assert_eq!(params.new_name, "THREE");
5194 Ok(Some(lsp::WorkspaceEdit {
5195 changes: Some(
5196 [
5197 (
5198 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5199 vec![lsp::TextEdit::new(
5200 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5201 "THREE".to_string(),
5202 )],
5203 ),
5204 (
5205 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5206 vec![
5207 lsp::TextEdit::new(
5208 lsp::Range::new(
5209 lsp::Position::new(0, 24),
5210 lsp::Position::new(0, 27),
5211 ),
5212 "THREE".to_string(),
5213 ),
5214 lsp::TextEdit::new(
5215 lsp::Range::new(
5216 lsp::Position::new(0, 35),
5217 lsp::Position::new(0, 38),
5218 ),
5219 "THREE".to_string(),
5220 ),
5221 ],
5222 ),
5223 ]
5224 .into_iter()
5225 .collect(),
5226 ),
5227 ..Default::default()
5228 }))
5229 })
5230 .next()
5231 .await
5232 .unwrap();
5233 let mut transaction = response.await.unwrap().0;
5234 assert_eq!(transaction.len(), 2);
5235 assert_eq!(
5236 transaction
5237 .remove_entry(&buffer)
5238 .unwrap()
5239 .0
5240 .update(cx, |buffer, _| buffer.text()),
5241 "const THREE: usize = 1;"
5242 );
5243 assert_eq!(
5244 transaction
5245 .into_keys()
5246 .next()
5247 .unwrap()
5248 .update(cx, |buffer, _| buffer.text()),
5249 "const TWO: usize = one::THREE + one::THREE;"
5250 );
5251}
5252
5253#[gpui::test]
5254async fn test_search(cx: &mut gpui::TestAppContext) {
5255 init_test(cx);
5256
5257 let fs = FakeFs::new(cx.executor());
5258 fs.insert_tree(
5259 path!("/dir"),
5260 json!({
5261 "one.rs": "const ONE: usize = 1;",
5262 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5263 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5264 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5265 }),
5266 )
5267 .await;
5268 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5269 assert_eq!(
5270 search(
5271 &project,
5272 SearchQuery::text(
5273 "TWO",
5274 false,
5275 true,
5276 false,
5277 Default::default(),
5278 Default::default(),
5279 false,
5280 None
5281 )
5282 .unwrap(),
5283 cx
5284 )
5285 .await
5286 .unwrap(),
5287 HashMap::from_iter([
5288 (path!("dir/two.rs").to_string(), vec![6..9]),
5289 (path!("dir/three.rs").to_string(), vec![37..40])
5290 ])
5291 );
5292
5293 let buffer_4 = project
5294 .update(cx, |project, cx| {
5295 project.open_local_buffer(path!("/dir/four.rs"), cx)
5296 })
5297 .await
5298 .unwrap();
5299 buffer_4.update(cx, |buffer, cx| {
5300 let text = "two::TWO";
5301 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5302 });
5303
5304 assert_eq!(
5305 search(
5306 &project,
5307 SearchQuery::text(
5308 "TWO",
5309 false,
5310 true,
5311 false,
5312 Default::default(),
5313 Default::default(),
5314 false,
5315 None,
5316 )
5317 .unwrap(),
5318 cx
5319 )
5320 .await
5321 .unwrap(),
5322 HashMap::from_iter([
5323 (path!("dir/two.rs").to_string(), vec![6..9]),
5324 (path!("dir/three.rs").to_string(), vec![37..40]),
5325 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5326 ])
5327 );
5328}
5329
5330#[gpui::test]
5331async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5332 init_test(cx);
5333
5334 let search_query = "file";
5335
5336 let fs = FakeFs::new(cx.executor());
5337 fs.insert_tree(
5338 path!("/dir"),
5339 json!({
5340 "one.rs": r#"// Rust file one"#,
5341 "one.ts": r#"// TypeScript file one"#,
5342 "two.rs": r#"// Rust file two"#,
5343 "two.ts": r#"// TypeScript file two"#,
5344 }),
5345 )
5346 .await;
5347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5348
5349 assert!(
5350 search(
5351 &project,
5352 SearchQuery::text(
5353 search_query,
5354 false,
5355 true,
5356 false,
5357 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5358 Default::default(),
5359 false,
5360 None
5361 )
5362 .unwrap(),
5363 cx
5364 )
5365 .await
5366 .unwrap()
5367 .is_empty(),
5368 "If no inclusions match, no files should be returned"
5369 );
5370
5371 assert_eq!(
5372 search(
5373 &project,
5374 SearchQuery::text(
5375 search_query,
5376 false,
5377 true,
5378 false,
5379 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5380 Default::default(),
5381 false,
5382 None
5383 )
5384 .unwrap(),
5385 cx
5386 )
5387 .await
5388 .unwrap(),
5389 HashMap::from_iter([
5390 (path!("dir/one.rs").to_string(), vec![8..12]),
5391 (path!("dir/two.rs").to_string(), vec![8..12]),
5392 ]),
5393 "Rust only search should give only Rust files"
5394 );
5395
5396 assert_eq!(
5397 search(
5398 &project,
5399 SearchQuery::text(
5400 search_query,
5401 false,
5402 true,
5403 false,
5404 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5405 Default::default(),
5406 false,
5407 None,
5408 )
5409 .unwrap(),
5410 cx
5411 )
5412 .await
5413 .unwrap(),
5414 HashMap::from_iter([
5415 (path!("dir/one.ts").to_string(), vec![14..18]),
5416 (path!("dir/two.ts").to_string(), vec![14..18]),
5417 ]),
5418 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5419 );
5420
5421 assert_eq!(
5422 search(
5423 &project,
5424 SearchQuery::text(
5425 search_query,
5426 false,
5427 true,
5428 false,
5429 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5430 .unwrap(),
5431 Default::default(),
5432 false,
5433 None,
5434 )
5435 .unwrap(),
5436 cx
5437 )
5438 .await
5439 .unwrap(),
5440 HashMap::from_iter([
5441 (path!("dir/two.ts").to_string(), vec![14..18]),
5442 (path!("dir/one.rs").to_string(), vec![8..12]),
5443 (path!("dir/one.ts").to_string(), vec![14..18]),
5444 (path!("dir/two.rs").to_string(), vec![8..12]),
5445 ]),
5446 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5447 );
5448}
5449
5450#[gpui::test]
5451async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5452 init_test(cx);
5453
5454 let search_query = "file";
5455
5456 let fs = FakeFs::new(cx.executor());
5457 fs.insert_tree(
5458 path!("/dir"),
5459 json!({
5460 "one.rs": r#"// Rust file one"#,
5461 "one.ts": r#"// TypeScript file one"#,
5462 "two.rs": r#"// Rust file two"#,
5463 "two.ts": r#"// TypeScript file two"#,
5464 }),
5465 )
5466 .await;
5467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5468
5469 assert_eq!(
5470 search(
5471 &project,
5472 SearchQuery::text(
5473 search_query,
5474 false,
5475 true,
5476 false,
5477 Default::default(),
5478 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5479 false,
5480 None,
5481 )
5482 .unwrap(),
5483 cx
5484 )
5485 .await
5486 .unwrap(),
5487 HashMap::from_iter([
5488 (path!("dir/one.rs").to_string(), vec![8..12]),
5489 (path!("dir/one.ts").to_string(), vec![14..18]),
5490 (path!("dir/two.rs").to_string(), vec![8..12]),
5491 (path!("dir/two.ts").to_string(), vec![14..18]),
5492 ]),
5493 "If no exclusions match, all files should be returned"
5494 );
5495
5496 assert_eq!(
5497 search(
5498 &project,
5499 SearchQuery::text(
5500 search_query,
5501 false,
5502 true,
5503 false,
5504 Default::default(),
5505 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5506 false,
5507 None,
5508 )
5509 .unwrap(),
5510 cx
5511 )
5512 .await
5513 .unwrap(),
5514 HashMap::from_iter([
5515 (path!("dir/one.ts").to_string(), vec![14..18]),
5516 (path!("dir/two.ts").to_string(), vec![14..18]),
5517 ]),
5518 "Rust exclusion search should give only TypeScript files"
5519 );
5520
5521 assert_eq!(
5522 search(
5523 &project,
5524 SearchQuery::text(
5525 search_query,
5526 false,
5527 true,
5528 false,
5529 Default::default(),
5530 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5531 false,
5532 None,
5533 )
5534 .unwrap(),
5535 cx
5536 )
5537 .await
5538 .unwrap(),
5539 HashMap::from_iter([
5540 (path!("dir/one.rs").to_string(), vec![8..12]),
5541 (path!("dir/two.rs").to_string(), vec![8..12]),
5542 ]),
5543 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5544 );
5545
5546 assert!(
5547 search(
5548 &project,
5549 SearchQuery::text(
5550 search_query,
5551 false,
5552 true,
5553 false,
5554 Default::default(),
5555 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5556 .unwrap(),
5557 false,
5558 None,
5559 )
5560 .unwrap(),
5561 cx
5562 )
5563 .await
5564 .unwrap()
5565 .is_empty(),
5566 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5567 );
5568}
5569
5570#[gpui::test]
5571async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5572 init_test(cx);
5573
5574 let search_query = "file";
5575
5576 let fs = FakeFs::new(cx.executor());
5577 fs.insert_tree(
5578 path!("/dir"),
5579 json!({
5580 "one.rs": r#"// Rust file one"#,
5581 "one.ts": r#"// TypeScript file one"#,
5582 "two.rs": r#"// Rust file two"#,
5583 "two.ts": r#"// TypeScript file two"#,
5584 }),
5585 )
5586 .await;
5587
5588 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5589 let _buffer = project.update(cx, |project, cx| {
5590 project.create_local_buffer("file", None, false, cx)
5591 });
5592
5593 assert_eq!(
5594 search(
5595 &project,
5596 SearchQuery::text(
5597 search_query,
5598 false,
5599 true,
5600 false,
5601 Default::default(),
5602 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5603 false,
5604 None,
5605 )
5606 .unwrap(),
5607 cx
5608 )
5609 .await
5610 .unwrap(),
5611 HashMap::from_iter([
5612 (path!("dir/one.rs").to_string(), vec![8..12]),
5613 (path!("dir/one.ts").to_string(), vec![14..18]),
5614 (path!("dir/two.rs").to_string(), vec![8..12]),
5615 (path!("dir/two.ts").to_string(), vec![14..18]),
5616 ]),
5617 "If no exclusions match, all files should be returned"
5618 );
5619
5620 assert_eq!(
5621 search(
5622 &project,
5623 SearchQuery::text(
5624 search_query,
5625 false,
5626 true,
5627 false,
5628 Default::default(),
5629 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5630 false,
5631 None,
5632 )
5633 .unwrap(),
5634 cx
5635 )
5636 .await
5637 .unwrap(),
5638 HashMap::from_iter([
5639 (path!("dir/one.ts").to_string(), vec![14..18]),
5640 (path!("dir/two.ts").to_string(), vec![14..18]),
5641 ]),
5642 "Rust exclusion search should give only TypeScript files"
5643 );
5644
5645 assert_eq!(
5646 search(
5647 &project,
5648 SearchQuery::text(
5649 search_query,
5650 false,
5651 true,
5652 false,
5653 Default::default(),
5654 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5655 false,
5656 None,
5657 )
5658 .unwrap(),
5659 cx
5660 )
5661 .await
5662 .unwrap(),
5663 HashMap::from_iter([
5664 (path!("dir/one.rs").to_string(), vec![8..12]),
5665 (path!("dir/two.rs").to_string(), vec![8..12]),
5666 ]),
5667 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5668 );
5669
5670 assert!(
5671 search(
5672 &project,
5673 SearchQuery::text(
5674 search_query,
5675 false,
5676 true,
5677 false,
5678 Default::default(),
5679 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5680 .unwrap(),
5681 false,
5682 None,
5683 )
5684 .unwrap(),
5685 cx
5686 )
5687 .await
5688 .unwrap()
5689 .is_empty(),
5690 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5691 );
5692}
5693
5694#[gpui::test]
5695async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5696 init_test(cx);
5697
5698 let search_query = "file";
5699
5700 let fs = FakeFs::new(cx.executor());
5701 fs.insert_tree(
5702 path!("/dir"),
5703 json!({
5704 "one.rs": r#"// Rust file one"#,
5705 "one.ts": r#"// TypeScript file one"#,
5706 "two.rs": r#"// Rust file two"#,
5707 "two.ts": r#"// TypeScript file two"#,
5708 }),
5709 )
5710 .await;
5711 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5712
5713 assert!(
5714 search(
5715 &project,
5716 SearchQuery::text(
5717 search_query,
5718 false,
5719 true,
5720 false,
5721 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5722 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5723 false,
5724 None,
5725 )
5726 .unwrap(),
5727 cx
5728 )
5729 .await
5730 .unwrap()
5731 .is_empty(),
5732 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5733 );
5734
5735 assert!(
5736 search(
5737 &project,
5738 SearchQuery::text(
5739 search_query,
5740 false,
5741 true,
5742 false,
5743 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5744 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5745 false,
5746 None,
5747 )
5748 .unwrap(),
5749 cx
5750 )
5751 .await
5752 .unwrap()
5753 .is_empty(),
5754 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5755 );
5756
5757 assert!(
5758 search(
5759 &project,
5760 SearchQuery::text(
5761 search_query,
5762 false,
5763 true,
5764 false,
5765 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5766 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5767 false,
5768 None,
5769 )
5770 .unwrap(),
5771 cx
5772 )
5773 .await
5774 .unwrap()
5775 .is_empty(),
5776 "Non-matching inclusions and exclusions should not change that."
5777 );
5778
5779 assert_eq!(
5780 search(
5781 &project,
5782 SearchQuery::text(
5783 search_query,
5784 false,
5785 true,
5786 false,
5787 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5788 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5789 false,
5790 None,
5791 )
5792 .unwrap(),
5793 cx
5794 )
5795 .await
5796 .unwrap(),
5797 HashMap::from_iter([
5798 (path!("dir/one.ts").to_string(), vec![14..18]),
5799 (path!("dir/two.ts").to_string(), vec![14..18]),
5800 ]),
5801 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5802 );
5803}
5804
5805#[gpui::test]
5806async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5807 init_test(cx);
5808
5809 let fs = FakeFs::new(cx.executor());
5810 fs.insert_tree(
5811 path!("/worktree-a"),
5812 json!({
5813 "haystack.rs": r#"// NEEDLE"#,
5814 "haystack.ts": r#"// NEEDLE"#,
5815 }),
5816 )
5817 .await;
5818 fs.insert_tree(
5819 path!("/worktree-b"),
5820 json!({
5821 "haystack.rs": r#"// NEEDLE"#,
5822 "haystack.ts": r#"// NEEDLE"#,
5823 }),
5824 )
5825 .await;
5826
5827 let project = Project::test(
5828 fs.clone(),
5829 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5830 cx,
5831 )
5832 .await;
5833
5834 assert_eq!(
5835 search(
5836 &project,
5837 SearchQuery::text(
5838 "NEEDLE",
5839 false,
5840 true,
5841 false,
5842 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5843 Default::default(),
5844 true,
5845 None,
5846 )
5847 .unwrap(),
5848 cx
5849 )
5850 .await
5851 .unwrap(),
5852 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5853 "should only return results from included worktree"
5854 );
5855 assert_eq!(
5856 search(
5857 &project,
5858 SearchQuery::text(
5859 "NEEDLE",
5860 false,
5861 true,
5862 false,
5863 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5864 Default::default(),
5865 true,
5866 None,
5867 )
5868 .unwrap(),
5869 cx
5870 )
5871 .await
5872 .unwrap(),
5873 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5874 "should only return results from included worktree"
5875 );
5876
5877 assert_eq!(
5878 search(
5879 &project,
5880 SearchQuery::text(
5881 "NEEDLE",
5882 false,
5883 true,
5884 false,
5885 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5886 Default::default(),
5887 false,
5888 None,
5889 )
5890 .unwrap(),
5891 cx
5892 )
5893 .await
5894 .unwrap(),
5895 HashMap::from_iter([
5896 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5897 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5898 ]),
5899 "should return results from both worktrees"
5900 );
5901}
5902
5903#[gpui::test]
5904async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5905 init_test(cx);
5906
5907 let fs = FakeFs::new(cx.background_executor.clone());
5908 fs.insert_tree(
5909 path!("/dir"),
5910 json!({
5911 ".git": {},
5912 ".gitignore": "**/target\n/node_modules\n",
5913 "target": {
5914 "index.txt": "index_key:index_value"
5915 },
5916 "node_modules": {
5917 "eslint": {
5918 "index.ts": "const eslint_key = 'eslint value'",
5919 "package.json": r#"{ "some_key": "some value" }"#,
5920 },
5921 "prettier": {
5922 "index.ts": "const prettier_key = 'prettier value'",
5923 "package.json": r#"{ "other_key": "other value" }"#,
5924 },
5925 },
5926 "package.json": r#"{ "main_key": "main value" }"#,
5927 }),
5928 )
5929 .await;
5930 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5931
5932 let query = "key";
5933 assert_eq!(
5934 search(
5935 &project,
5936 SearchQuery::text(
5937 query,
5938 false,
5939 false,
5940 false,
5941 Default::default(),
5942 Default::default(),
5943 false,
5944 None,
5945 )
5946 .unwrap(),
5947 cx
5948 )
5949 .await
5950 .unwrap(),
5951 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5952 "Only one non-ignored file should have the query"
5953 );
5954
5955 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5956 assert_eq!(
5957 search(
5958 &project,
5959 SearchQuery::text(
5960 query,
5961 false,
5962 false,
5963 true,
5964 Default::default(),
5965 Default::default(),
5966 false,
5967 None,
5968 )
5969 .unwrap(),
5970 cx
5971 )
5972 .await
5973 .unwrap(),
5974 HashMap::from_iter([
5975 (path!("dir/package.json").to_string(), vec![8..11]),
5976 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5977 (
5978 path!("dir/node_modules/prettier/package.json").to_string(),
5979 vec![9..12]
5980 ),
5981 (
5982 path!("dir/node_modules/prettier/index.ts").to_string(),
5983 vec![15..18]
5984 ),
5985 (
5986 path!("dir/node_modules/eslint/index.ts").to_string(),
5987 vec![13..16]
5988 ),
5989 (
5990 path!("dir/node_modules/eslint/package.json").to_string(),
5991 vec![8..11]
5992 ),
5993 ]),
5994 "Unrestricted search with ignored directories should find every file with the query"
5995 );
5996
5997 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5998 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5999 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6000 assert_eq!(
6001 search(
6002 &project,
6003 SearchQuery::text(
6004 query,
6005 false,
6006 false,
6007 true,
6008 files_to_include,
6009 files_to_exclude,
6010 false,
6011 None,
6012 )
6013 .unwrap(),
6014 cx
6015 )
6016 .await
6017 .unwrap(),
6018 HashMap::from_iter([(
6019 path!("dir/node_modules/prettier/package.json").to_string(),
6020 vec![9..12]
6021 )]),
6022 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6023 );
6024}
6025
6026#[gpui::test]
6027async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6028 init_test(cx);
6029
6030 let fs = FakeFs::new(cx.executor());
6031 fs.insert_tree(
6032 path!("/dir"),
6033 json!({
6034 "one.rs": "// ПРИВЕТ? привет!",
6035 "two.rs": "// ПРИВЕТ.",
6036 "three.rs": "// привет",
6037 }),
6038 )
6039 .await;
6040 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6041
6042 let unicode_case_sensitive_query = SearchQuery::text(
6043 "привет",
6044 false,
6045 true,
6046 false,
6047 Default::default(),
6048 Default::default(),
6049 false,
6050 None,
6051 );
6052 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6053 assert_eq!(
6054 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6055 .await
6056 .unwrap(),
6057 HashMap::from_iter([
6058 (path!("dir/one.rs").to_string(), vec![17..29]),
6059 (path!("dir/three.rs").to_string(), vec![3..15]),
6060 ])
6061 );
6062
6063 let unicode_case_insensitive_query = SearchQuery::text(
6064 "привет",
6065 false,
6066 false,
6067 false,
6068 Default::default(),
6069 Default::default(),
6070 false,
6071 None,
6072 );
6073 assert_matches!(
6074 unicode_case_insensitive_query,
6075 Ok(SearchQuery::Regex { .. })
6076 );
6077 assert_eq!(
6078 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6079 .await
6080 .unwrap(),
6081 HashMap::from_iter([
6082 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6083 (path!("dir/two.rs").to_string(), vec![3..15]),
6084 (path!("dir/three.rs").to_string(), vec![3..15]),
6085 ])
6086 );
6087
6088 assert_eq!(
6089 search(
6090 &project,
6091 SearchQuery::text(
6092 "привет.",
6093 false,
6094 false,
6095 false,
6096 Default::default(),
6097 Default::default(),
6098 false,
6099 None,
6100 )
6101 .unwrap(),
6102 cx
6103 )
6104 .await
6105 .unwrap(),
6106 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6107 );
6108}
6109
6110#[gpui::test]
6111async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6112 init_test(cx);
6113
6114 let fs = FakeFs::new(cx.executor());
6115 fs.insert_tree(
6116 "/one/two",
6117 json!({
6118 "three": {
6119 "a.txt": "",
6120 "four": {}
6121 },
6122 "c.rs": ""
6123 }),
6124 )
6125 .await;
6126
6127 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6128 project
6129 .update(cx, |project, cx| {
6130 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6131 project.create_entry((id, "b.."), true, cx)
6132 })
6133 .await
6134 .unwrap()
6135 .into_included()
6136 .unwrap();
6137
6138 // Can't create paths outside the project
6139 let result = project
6140 .update(cx, |project, cx| {
6141 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6142 project.create_entry((id, "../../boop"), true, cx)
6143 })
6144 .await;
6145 assert!(result.is_err());
6146
6147 // Can't create paths with '..'
6148 let result = project
6149 .update(cx, |project, cx| {
6150 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6151 project.create_entry((id, "four/../beep"), true, cx)
6152 })
6153 .await;
6154 assert!(result.is_err());
6155
6156 assert_eq!(
6157 fs.paths(true),
6158 vec![
6159 PathBuf::from(path!("/")),
6160 PathBuf::from(path!("/one")),
6161 PathBuf::from(path!("/one/two")),
6162 PathBuf::from(path!("/one/two/c.rs")),
6163 PathBuf::from(path!("/one/two/three")),
6164 PathBuf::from(path!("/one/two/three/a.txt")),
6165 PathBuf::from(path!("/one/two/three/b..")),
6166 PathBuf::from(path!("/one/two/three/four")),
6167 ]
6168 );
6169
6170 // And we cannot open buffers with '..'
6171 let result = project
6172 .update(cx, |project, cx| {
6173 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6174 project.open_buffer((id, "../c.rs"), cx)
6175 })
6176 .await;
6177 assert!(result.is_err())
6178}
6179
6180#[gpui::test]
6181async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6182 init_test(cx);
6183
6184 let fs = FakeFs::new(cx.executor());
6185 fs.insert_tree(
6186 path!("/dir"),
6187 json!({
6188 "a.tsx": "a",
6189 }),
6190 )
6191 .await;
6192
6193 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6194
6195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6196 language_registry.add(tsx_lang());
6197 let language_server_names = [
6198 "TypeScriptServer",
6199 "TailwindServer",
6200 "ESLintServer",
6201 "NoHoverCapabilitiesServer",
6202 ];
6203 let mut language_servers = [
6204 language_registry.register_fake_lsp(
6205 "tsx",
6206 FakeLspAdapter {
6207 name: language_server_names[0],
6208 capabilities: lsp::ServerCapabilities {
6209 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6210 ..lsp::ServerCapabilities::default()
6211 },
6212 ..FakeLspAdapter::default()
6213 },
6214 ),
6215 language_registry.register_fake_lsp(
6216 "tsx",
6217 FakeLspAdapter {
6218 name: language_server_names[1],
6219 capabilities: lsp::ServerCapabilities {
6220 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6221 ..lsp::ServerCapabilities::default()
6222 },
6223 ..FakeLspAdapter::default()
6224 },
6225 ),
6226 language_registry.register_fake_lsp(
6227 "tsx",
6228 FakeLspAdapter {
6229 name: language_server_names[2],
6230 capabilities: lsp::ServerCapabilities {
6231 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6232 ..lsp::ServerCapabilities::default()
6233 },
6234 ..FakeLspAdapter::default()
6235 },
6236 ),
6237 language_registry.register_fake_lsp(
6238 "tsx",
6239 FakeLspAdapter {
6240 name: language_server_names[3],
6241 capabilities: lsp::ServerCapabilities {
6242 hover_provider: None,
6243 ..lsp::ServerCapabilities::default()
6244 },
6245 ..FakeLspAdapter::default()
6246 },
6247 ),
6248 ];
6249
6250 let (buffer, _handle) = project
6251 .update(cx, |p, cx| {
6252 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6253 })
6254 .await
6255 .unwrap();
6256 cx.executor().run_until_parked();
6257
6258 let mut servers_with_hover_requests = HashMap::default();
6259 for i in 0..language_server_names.len() {
6260 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6261 panic!(
6262 "Failed to get language server #{i} with name {}",
6263 &language_server_names[i]
6264 )
6265 });
6266 let new_server_name = new_server.server.name();
6267 assert!(
6268 !servers_with_hover_requests.contains_key(&new_server_name),
6269 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6270 );
6271 match new_server_name.as_ref() {
6272 "TailwindServer" | "TypeScriptServer" => {
6273 servers_with_hover_requests.insert(
6274 new_server_name.clone(),
6275 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6276 move |_, _| {
6277 let name = new_server_name.clone();
6278 async move {
6279 Ok(Some(lsp::Hover {
6280 contents: lsp::HoverContents::Scalar(
6281 lsp::MarkedString::String(format!("{name} hover")),
6282 ),
6283 range: None,
6284 }))
6285 }
6286 },
6287 ),
6288 );
6289 }
6290 "ESLintServer" => {
6291 servers_with_hover_requests.insert(
6292 new_server_name,
6293 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6294 |_, _| async move { Ok(None) },
6295 ),
6296 );
6297 }
6298 "NoHoverCapabilitiesServer" => {
6299 let _never_handled = new_server
6300 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6301 panic!(
6302 "Should not call for hovers server with no corresponding capabilities"
6303 )
6304 });
6305 }
6306 unexpected => panic!("Unexpected server name: {unexpected}"),
6307 }
6308 }
6309
6310 let hover_task = project.update(cx, |project, cx| {
6311 project.hover(&buffer, Point::new(0, 0), cx)
6312 });
6313 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6314 |mut hover_request| async move {
6315 hover_request
6316 .next()
6317 .await
6318 .expect("All hover requests should have been triggered")
6319 },
6320 ))
6321 .await;
6322 assert_eq!(
6323 vec!["TailwindServer hover", "TypeScriptServer hover"],
6324 hover_task
6325 .await
6326 .into_iter()
6327 .flatten()
6328 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6329 .sorted()
6330 .collect::<Vec<_>>(),
6331 "Should receive hover responses from all related servers with hover capabilities"
6332 );
6333}
6334
6335#[gpui::test]
6336async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6337 init_test(cx);
6338
6339 let fs = FakeFs::new(cx.executor());
6340 fs.insert_tree(
6341 path!("/dir"),
6342 json!({
6343 "a.ts": "a",
6344 }),
6345 )
6346 .await;
6347
6348 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6349
6350 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6351 language_registry.add(typescript_lang());
6352 let mut fake_language_servers = language_registry.register_fake_lsp(
6353 "TypeScript",
6354 FakeLspAdapter {
6355 capabilities: lsp::ServerCapabilities {
6356 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6357 ..lsp::ServerCapabilities::default()
6358 },
6359 ..FakeLspAdapter::default()
6360 },
6361 );
6362
6363 let (buffer, _handle) = project
6364 .update(cx, |p, cx| {
6365 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6366 })
6367 .await
6368 .unwrap();
6369 cx.executor().run_until_parked();
6370
6371 let fake_server = fake_language_servers
6372 .next()
6373 .await
6374 .expect("failed to get the language server");
6375
6376 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6377 move |_, _| async move {
6378 Ok(Some(lsp::Hover {
6379 contents: lsp::HoverContents::Array(vec![
6380 lsp::MarkedString::String("".to_string()),
6381 lsp::MarkedString::String(" ".to_string()),
6382 lsp::MarkedString::String("\n\n\n".to_string()),
6383 ]),
6384 range: None,
6385 }))
6386 },
6387 );
6388
6389 let hover_task = project.update(cx, |project, cx| {
6390 project.hover(&buffer, Point::new(0, 0), cx)
6391 });
6392 let () = request_handled
6393 .next()
6394 .await
6395 .expect("All hover requests should have been triggered");
6396 assert_eq!(
6397 Vec::<String>::new(),
6398 hover_task
6399 .await
6400 .into_iter()
6401 .flatten()
6402 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6403 .sorted()
6404 .collect::<Vec<_>>(),
6405 "Empty hover parts should be ignored"
6406 );
6407}
6408
6409#[gpui::test]
6410async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6411 init_test(cx);
6412
6413 let fs = FakeFs::new(cx.executor());
6414 fs.insert_tree(
6415 path!("/dir"),
6416 json!({
6417 "a.ts": "a",
6418 }),
6419 )
6420 .await;
6421
6422 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6423
6424 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6425 language_registry.add(typescript_lang());
6426 let mut fake_language_servers = language_registry.register_fake_lsp(
6427 "TypeScript",
6428 FakeLspAdapter {
6429 capabilities: lsp::ServerCapabilities {
6430 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6431 ..lsp::ServerCapabilities::default()
6432 },
6433 ..FakeLspAdapter::default()
6434 },
6435 );
6436
6437 let (buffer, _handle) = project
6438 .update(cx, |p, cx| {
6439 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6440 })
6441 .await
6442 .unwrap();
6443 cx.executor().run_until_parked();
6444
6445 let fake_server = fake_language_servers
6446 .next()
6447 .await
6448 .expect("failed to get the language server");
6449
6450 let mut request_handled = fake_server
6451 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6452 Ok(Some(vec![
6453 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6454 title: "organize imports".to_string(),
6455 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6456 ..lsp::CodeAction::default()
6457 }),
6458 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6459 title: "fix code".to_string(),
6460 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6461 ..lsp::CodeAction::default()
6462 }),
6463 ]))
6464 });
6465
6466 let code_actions_task = project.update(cx, |project, cx| {
6467 project.code_actions(
6468 &buffer,
6469 0..buffer.read(cx).len(),
6470 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6471 cx,
6472 )
6473 });
6474
6475 let () = request_handled
6476 .next()
6477 .await
6478 .expect("The code action request should have been triggered");
6479
6480 let code_actions = code_actions_task.await.unwrap().unwrap();
6481 assert_eq!(code_actions.len(), 1);
6482 assert_eq!(
6483 code_actions[0].lsp_action.action_kind(),
6484 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6485 );
6486}
6487
6488#[gpui::test]
6489async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6490 init_test(cx);
6491
6492 let fs = FakeFs::new(cx.executor());
6493 fs.insert_tree(
6494 path!("/dir"),
6495 json!({
6496 "a.tsx": "a",
6497 }),
6498 )
6499 .await;
6500
6501 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6502
6503 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6504 language_registry.add(tsx_lang());
6505 let language_server_names = [
6506 "TypeScriptServer",
6507 "TailwindServer",
6508 "ESLintServer",
6509 "NoActionsCapabilitiesServer",
6510 ];
6511
6512 let mut language_server_rxs = [
6513 language_registry.register_fake_lsp(
6514 "tsx",
6515 FakeLspAdapter {
6516 name: language_server_names[0],
6517 capabilities: lsp::ServerCapabilities {
6518 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6519 ..lsp::ServerCapabilities::default()
6520 },
6521 ..FakeLspAdapter::default()
6522 },
6523 ),
6524 language_registry.register_fake_lsp(
6525 "tsx",
6526 FakeLspAdapter {
6527 name: language_server_names[1],
6528 capabilities: lsp::ServerCapabilities {
6529 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6530 ..lsp::ServerCapabilities::default()
6531 },
6532 ..FakeLspAdapter::default()
6533 },
6534 ),
6535 language_registry.register_fake_lsp(
6536 "tsx",
6537 FakeLspAdapter {
6538 name: language_server_names[2],
6539 capabilities: lsp::ServerCapabilities {
6540 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6541 ..lsp::ServerCapabilities::default()
6542 },
6543 ..FakeLspAdapter::default()
6544 },
6545 ),
6546 language_registry.register_fake_lsp(
6547 "tsx",
6548 FakeLspAdapter {
6549 name: language_server_names[3],
6550 capabilities: lsp::ServerCapabilities {
6551 code_action_provider: None,
6552 ..lsp::ServerCapabilities::default()
6553 },
6554 ..FakeLspAdapter::default()
6555 },
6556 ),
6557 ];
6558
6559 let (buffer, _handle) = project
6560 .update(cx, |p, cx| {
6561 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6562 })
6563 .await
6564 .unwrap();
6565 cx.executor().run_until_parked();
6566
6567 let mut servers_with_actions_requests = HashMap::default();
6568 for i in 0..language_server_names.len() {
6569 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6570 panic!(
6571 "Failed to get language server #{i} with name {}",
6572 &language_server_names[i]
6573 )
6574 });
6575 let new_server_name = new_server.server.name();
6576
6577 assert!(
6578 !servers_with_actions_requests.contains_key(&new_server_name),
6579 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6580 );
6581 match new_server_name.0.as_ref() {
6582 "TailwindServer" | "TypeScriptServer" => {
6583 servers_with_actions_requests.insert(
6584 new_server_name.clone(),
6585 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6586 move |_, _| {
6587 let name = new_server_name.clone();
6588 async move {
6589 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6590 lsp::CodeAction {
6591 title: format!("{name} code action"),
6592 ..lsp::CodeAction::default()
6593 },
6594 )]))
6595 }
6596 },
6597 ),
6598 );
6599 }
6600 "ESLintServer" => {
6601 servers_with_actions_requests.insert(
6602 new_server_name,
6603 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6604 |_, _| async move { Ok(None) },
6605 ),
6606 );
6607 }
6608 "NoActionsCapabilitiesServer" => {
6609 let _never_handled = new_server
6610 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6611 panic!(
6612 "Should not call for code actions server with no corresponding capabilities"
6613 )
6614 });
6615 }
6616 unexpected => panic!("Unexpected server name: {unexpected}"),
6617 }
6618 }
6619
6620 let code_actions_task = project.update(cx, |project, cx| {
6621 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6622 });
6623
6624 // cx.run_until_parked();
6625 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6626 |mut code_actions_request| async move {
6627 code_actions_request
6628 .next()
6629 .await
6630 .expect("All code actions requests should have been triggered")
6631 },
6632 ))
6633 .await;
6634 assert_eq!(
6635 vec!["TailwindServer code action", "TypeScriptServer code action"],
6636 code_actions_task
6637 .await
6638 .unwrap()
6639 .unwrap()
6640 .into_iter()
6641 .map(|code_action| code_action.lsp_action.title().to_owned())
6642 .sorted()
6643 .collect::<Vec<_>>(),
6644 "Should receive code actions responses from all related servers with hover capabilities"
6645 );
6646}
6647
6648#[gpui::test]
6649async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6650 init_test(cx);
6651
6652 let fs = FakeFs::new(cx.executor());
6653 fs.insert_tree(
6654 "/dir",
6655 json!({
6656 "a.rs": "let a = 1;",
6657 "b.rs": "let b = 2;",
6658 "c.rs": "let c = 2;",
6659 }),
6660 )
6661 .await;
6662
6663 let project = Project::test(
6664 fs,
6665 [
6666 "/dir/a.rs".as_ref(),
6667 "/dir/b.rs".as_ref(),
6668 "/dir/c.rs".as_ref(),
6669 ],
6670 cx,
6671 )
6672 .await;
6673
6674 // check the initial state and get the worktrees
6675 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6676 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6677 assert_eq!(worktrees.len(), 3);
6678
6679 let worktree_a = worktrees[0].read(cx);
6680 let worktree_b = worktrees[1].read(cx);
6681 let worktree_c = worktrees[2].read(cx);
6682
6683 // check they start in the right order
6684 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6685 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6686 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6687
6688 (
6689 worktrees[0].clone(),
6690 worktrees[1].clone(),
6691 worktrees[2].clone(),
6692 )
6693 });
6694
6695 // move first worktree to after the second
6696 // [a, b, c] -> [b, a, c]
6697 project
6698 .update(cx, |project, cx| {
6699 let first = worktree_a.read(cx);
6700 let second = worktree_b.read(cx);
6701 project.move_worktree(first.id(), second.id(), cx)
6702 })
6703 .expect("moving first after second");
6704
6705 // check the state after moving
6706 project.update(cx, |project, cx| {
6707 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6708 assert_eq!(worktrees.len(), 3);
6709
6710 let first = worktrees[0].read(cx);
6711 let second = worktrees[1].read(cx);
6712 let third = worktrees[2].read(cx);
6713
6714 // check they are now in the right order
6715 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6716 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6717 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6718 });
6719
6720 // move the second worktree to before the first
6721 // [b, a, c] -> [a, b, c]
6722 project
6723 .update(cx, |project, cx| {
6724 let second = worktree_a.read(cx);
6725 let first = worktree_b.read(cx);
6726 project.move_worktree(first.id(), second.id(), cx)
6727 })
6728 .expect("moving second before first");
6729
6730 // check the state after moving
6731 project.update(cx, |project, cx| {
6732 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6733 assert_eq!(worktrees.len(), 3);
6734
6735 let first = worktrees[0].read(cx);
6736 let second = worktrees[1].read(cx);
6737 let third = worktrees[2].read(cx);
6738
6739 // check they are now in the right order
6740 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6741 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6742 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6743 });
6744
6745 // move the second worktree to after the third
6746 // [a, b, c] -> [a, c, b]
6747 project
6748 .update(cx, |project, cx| {
6749 let second = worktree_b.read(cx);
6750 let third = worktree_c.read(cx);
6751 project.move_worktree(second.id(), third.id(), cx)
6752 })
6753 .expect("moving second after third");
6754
6755 // check the state after moving
6756 project.update(cx, |project, cx| {
6757 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6758 assert_eq!(worktrees.len(), 3);
6759
6760 let first = worktrees[0].read(cx);
6761 let second = worktrees[1].read(cx);
6762 let third = worktrees[2].read(cx);
6763
6764 // check they are now in the right order
6765 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6766 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6767 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6768 });
6769
6770 // move the third worktree to before the second
6771 // [a, c, b] -> [a, b, c]
6772 project
6773 .update(cx, |project, cx| {
6774 let third = worktree_c.read(cx);
6775 let second = worktree_b.read(cx);
6776 project.move_worktree(third.id(), second.id(), cx)
6777 })
6778 .expect("moving third before second");
6779
6780 // check the state after moving
6781 project.update(cx, |project, cx| {
6782 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6783 assert_eq!(worktrees.len(), 3);
6784
6785 let first = worktrees[0].read(cx);
6786 let second = worktrees[1].read(cx);
6787 let third = worktrees[2].read(cx);
6788
6789 // check they are now in the right order
6790 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6791 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6792 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6793 });
6794
6795 // move the first worktree to after the third
6796 // [a, b, c] -> [b, c, a]
6797 project
6798 .update(cx, |project, cx| {
6799 let first = worktree_a.read(cx);
6800 let third = worktree_c.read(cx);
6801 project.move_worktree(first.id(), third.id(), cx)
6802 })
6803 .expect("moving first after third");
6804
6805 // check the state after moving
6806 project.update(cx, |project, cx| {
6807 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6808 assert_eq!(worktrees.len(), 3);
6809
6810 let first = worktrees[0].read(cx);
6811 let second = worktrees[1].read(cx);
6812 let third = worktrees[2].read(cx);
6813
6814 // check they are now in the right order
6815 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6816 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6817 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6818 });
6819
6820 // move the third worktree to before the first
6821 // [b, c, a] -> [a, b, c]
6822 project
6823 .update(cx, |project, cx| {
6824 let third = worktree_a.read(cx);
6825 let first = worktree_b.read(cx);
6826 project.move_worktree(third.id(), first.id(), cx)
6827 })
6828 .expect("moving third before first");
6829
6830 // check the state after moving
6831 project.update(cx, |project, cx| {
6832 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6833 assert_eq!(worktrees.len(), 3);
6834
6835 let first = worktrees[0].read(cx);
6836 let second = worktrees[1].read(cx);
6837 let third = worktrees[2].read(cx);
6838
6839 // check they are now in the right order
6840 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6841 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6842 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6843 });
6844}
6845
6846#[gpui::test]
6847async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6848 init_test(cx);
6849
6850 let staged_contents = r#"
6851 fn main() {
6852 println!("hello world");
6853 }
6854 "#
6855 .unindent();
6856 let file_contents = r#"
6857 // print goodbye
6858 fn main() {
6859 println!("goodbye world");
6860 }
6861 "#
6862 .unindent();
6863
6864 let fs = FakeFs::new(cx.background_executor.clone());
6865 fs.insert_tree(
6866 "/dir",
6867 json!({
6868 ".git": {},
6869 "src": {
6870 "main.rs": file_contents,
6871 }
6872 }),
6873 )
6874 .await;
6875
6876 fs.set_index_for_repo(
6877 Path::new("/dir/.git"),
6878 &[("src/main.rs".into(), staged_contents)],
6879 );
6880
6881 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6882
6883 let buffer = project
6884 .update(cx, |project, cx| {
6885 project.open_local_buffer("/dir/src/main.rs", cx)
6886 })
6887 .await
6888 .unwrap();
6889 let unstaged_diff = project
6890 .update(cx, |project, cx| {
6891 project.open_unstaged_diff(buffer.clone(), cx)
6892 })
6893 .await
6894 .unwrap();
6895
6896 cx.run_until_parked();
6897 unstaged_diff.update(cx, |unstaged_diff, cx| {
6898 let snapshot = buffer.read(cx).snapshot();
6899 assert_hunks(
6900 unstaged_diff.hunks(&snapshot, cx),
6901 &snapshot,
6902 &unstaged_diff.base_text_string().unwrap(),
6903 &[
6904 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6905 (
6906 2..3,
6907 " println!(\"hello world\");\n",
6908 " println!(\"goodbye world\");\n",
6909 DiffHunkStatus::modified_none(),
6910 ),
6911 ],
6912 );
6913 });
6914
6915 let staged_contents = r#"
6916 // print goodbye
6917 fn main() {
6918 }
6919 "#
6920 .unindent();
6921
6922 fs.set_index_for_repo(
6923 Path::new("/dir/.git"),
6924 &[("src/main.rs".into(), staged_contents)],
6925 );
6926
6927 cx.run_until_parked();
6928 unstaged_diff.update(cx, |unstaged_diff, cx| {
6929 let snapshot = buffer.read(cx).snapshot();
6930 assert_hunks(
6931 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6932 &snapshot,
6933 &unstaged_diff.base_text().text(),
6934 &[(
6935 2..3,
6936 "",
6937 " println!(\"goodbye world\");\n",
6938 DiffHunkStatus::added_none(),
6939 )],
6940 );
6941 });
6942}
6943
6944#[gpui::test]
6945async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6946 init_test(cx);
6947
6948 let committed_contents = r#"
6949 fn main() {
6950 println!("hello world");
6951 }
6952 "#
6953 .unindent();
6954 let staged_contents = r#"
6955 fn main() {
6956 println!("goodbye world");
6957 }
6958 "#
6959 .unindent();
6960 let file_contents = r#"
6961 // print goodbye
6962 fn main() {
6963 println!("goodbye world");
6964 }
6965 "#
6966 .unindent();
6967
6968 let fs = FakeFs::new(cx.background_executor.clone());
6969 fs.insert_tree(
6970 "/dir",
6971 json!({
6972 ".git": {},
6973 "src": {
6974 "modification.rs": file_contents,
6975 }
6976 }),
6977 )
6978 .await;
6979
6980 fs.set_head_for_repo(
6981 Path::new("/dir/.git"),
6982 &[
6983 ("src/modification.rs".into(), committed_contents),
6984 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6985 ],
6986 "deadbeef",
6987 );
6988 fs.set_index_for_repo(
6989 Path::new("/dir/.git"),
6990 &[
6991 ("src/modification.rs".into(), staged_contents),
6992 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6993 ],
6994 );
6995
6996 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6997 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6998 let language = rust_lang();
6999 language_registry.add(language.clone());
7000
7001 let buffer_1 = project
7002 .update(cx, |project, cx| {
7003 project.open_local_buffer("/dir/src/modification.rs", cx)
7004 })
7005 .await
7006 .unwrap();
7007 let diff_1 = project
7008 .update(cx, |project, cx| {
7009 project.open_uncommitted_diff(buffer_1.clone(), cx)
7010 })
7011 .await
7012 .unwrap();
7013 diff_1.read_with(cx, |diff, _| {
7014 assert_eq!(diff.base_text().language().cloned(), Some(language))
7015 });
7016 cx.run_until_parked();
7017 diff_1.update(cx, |diff, cx| {
7018 let snapshot = buffer_1.read(cx).snapshot();
7019 assert_hunks(
7020 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7021 &snapshot,
7022 &diff.base_text_string().unwrap(),
7023 &[
7024 (
7025 0..1,
7026 "",
7027 "// print goodbye\n",
7028 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7029 ),
7030 (
7031 2..3,
7032 " println!(\"hello world\");\n",
7033 " println!(\"goodbye world\");\n",
7034 DiffHunkStatus::modified_none(),
7035 ),
7036 ],
7037 );
7038 });
7039
7040 // Reset HEAD to a version that differs from both the buffer and the index.
7041 let committed_contents = r#"
7042 // print goodbye
7043 fn main() {
7044 }
7045 "#
7046 .unindent();
7047 fs.set_head_for_repo(
7048 Path::new("/dir/.git"),
7049 &[
7050 ("src/modification.rs".into(), committed_contents.clone()),
7051 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
7052 ],
7053 "deadbeef",
7054 );
7055
7056 // Buffer now has an unstaged hunk.
7057 cx.run_until_parked();
7058 diff_1.update(cx, |diff, cx| {
7059 let snapshot = buffer_1.read(cx).snapshot();
7060 assert_hunks(
7061 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7062 &snapshot,
7063 &diff.base_text().text(),
7064 &[(
7065 2..3,
7066 "",
7067 " println!(\"goodbye world\");\n",
7068 DiffHunkStatus::added_none(),
7069 )],
7070 );
7071 });
7072
7073 // Open a buffer for a file that's been deleted.
7074 let buffer_2 = project
7075 .update(cx, |project, cx| {
7076 project.open_local_buffer("/dir/src/deletion.rs", cx)
7077 })
7078 .await
7079 .unwrap();
7080 let diff_2 = project
7081 .update(cx, |project, cx| {
7082 project.open_uncommitted_diff(buffer_2.clone(), cx)
7083 })
7084 .await
7085 .unwrap();
7086 cx.run_until_parked();
7087 diff_2.update(cx, |diff, cx| {
7088 let snapshot = buffer_2.read(cx).snapshot();
7089 assert_hunks(
7090 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7091 &snapshot,
7092 &diff.base_text_string().unwrap(),
7093 &[(
7094 0..0,
7095 "// the-deleted-contents\n",
7096 "",
7097 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7098 )],
7099 );
7100 });
7101
7102 // Stage the deletion of this file
7103 fs.set_index_for_repo(
7104 Path::new("/dir/.git"),
7105 &[("src/modification.rs".into(), committed_contents.clone())],
7106 );
7107 cx.run_until_parked();
7108 diff_2.update(cx, |diff, cx| {
7109 let snapshot = buffer_2.read(cx).snapshot();
7110 assert_hunks(
7111 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7112 &snapshot,
7113 &diff.base_text_string().unwrap(),
7114 &[(
7115 0..0,
7116 "// the-deleted-contents\n",
7117 "",
7118 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7119 )],
7120 );
7121 });
7122}
7123
7124#[gpui::test]
7125async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7126 use DiffHunkSecondaryStatus::*;
7127 init_test(cx);
7128
7129 let committed_contents = r#"
7130 zero
7131 one
7132 two
7133 three
7134 four
7135 five
7136 "#
7137 .unindent();
7138 let file_contents = r#"
7139 one
7140 TWO
7141 three
7142 FOUR
7143 five
7144 "#
7145 .unindent();
7146
7147 let fs = FakeFs::new(cx.background_executor.clone());
7148 fs.insert_tree(
7149 "/dir",
7150 json!({
7151 ".git": {},
7152 "file.txt": file_contents.clone()
7153 }),
7154 )
7155 .await;
7156
7157 fs.set_head_and_index_for_repo(
7158 "/dir/.git".as_ref(),
7159 &[("file.txt".into(), committed_contents.clone())],
7160 );
7161
7162 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7163
7164 let buffer = project
7165 .update(cx, |project, cx| {
7166 project.open_local_buffer("/dir/file.txt", cx)
7167 })
7168 .await
7169 .unwrap();
7170 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7171 let uncommitted_diff = project
7172 .update(cx, |project, cx| {
7173 project.open_uncommitted_diff(buffer.clone(), cx)
7174 })
7175 .await
7176 .unwrap();
7177 let mut diff_events = cx.events(&uncommitted_diff);
7178
7179 // The hunks are initially unstaged.
7180 uncommitted_diff.read_with(cx, |diff, cx| {
7181 assert_hunks(
7182 diff.hunks(&snapshot, cx),
7183 &snapshot,
7184 &diff.base_text_string().unwrap(),
7185 &[
7186 (
7187 0..0,
7188 "zero\n",
7189 "",
7190 DiffHunkStatus::deleted(HasSecondaryHunk),
7191 ),
7192 (
7193 1..2,
7194 "two\n",
7195 "TWO\n",
7196 DiffHunkStatus::modified(HasSecondaryHunk),
7197 ),
7198 (
7199 3..4,
7200 "four\n",
7201 "FOUR\n",
7202 DiffHunkStatus::modified(HasSecondaryHunk),
7203 ),
7204 ],
7205 );
7206 });
7207
7208 // Stage a hunk. It appears as optimistically staged.
7209 uncommitted_diff.update(cx, |diff, cx| {
7210 let range =
7211 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7212 let hunks = diff
7213 .hunks_intersecting_range(range, &snapshot, cx)
7214 .collect::<Vec<_>>();
7215 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7216
7217 assert_hunks(
7218 diff.hunks(&snapshot, cx),
7219 &snapshot,
7220 &diff.base_text_string().unwrap(),
7221 &[
7222 (
7223 0..0,
7224 "zero\n",
7225 "",
7226 DiffHunkStatus::deleted(HasSecondaryHunk),
7227 ),
7228 (
7229 1..2,
7230 "two\n",
7231 "TWO\n",
7232 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7233 ),
7234 (
7235 3..4,
7236 "four\n",
7237 "FOUR\n",
7238 DiffHunkStatus::modified(HasSecondaryHunk),
7239 ),
7240 ],
7241 );
7242 });
7243
7244 // The diff emits a change event for the range of the staged hunk.
7245 assert!(matches!(
7246 diff_events.next().await.unwrap(),
7247 BufferDiffEvent::HunksStagedOrUnstaged(_)
7248 ));
7249 let event = diff_events.next().await.unwrap();
7250 if let BufferDiffEvent::DiffChanged {
7251 changed_range: Some(changed_range),
7252 } = event
7253 {
7254 let changed_range = changed_range.to_point(&snapshot);
7255 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7256 } else {
7257 panic!("Unexpected event {event:?}");
7258 }
7259
7260 // When the write to the index completes, it appears as staged.
7261 cx.run_until_parked();
7262 uncommitted_diff.update(cx, |diff, cx| {
7263 assert_hunks(
7264 diff.hunks(&snapshot, cx),
7265 &snapshot,
7266 &diff.base_text_string().unwrap(),
7267 &[
7268 (
7269 0..0,
7270 "zero\n",
7271 "",
7272 DiffHunkStatus::deleted(HasSecondaryHunk),
7273 ),
7274 (
7275 1..2,
7276 "two\n",
7277 "TWO\n",
7278 DiffHunkStatus::modified(NoSecondaryHunk),
7279 ),
7280 (
7281 3..4,
7282 "four\n",
7283 "FOUR\n",
7284 DiffHunkStatus::modified(HasSecondaryHunk),
7285 ),
7286 ],
7287 );
7288 });
7289
7290 // The diff emits a change event for the changed index text.
7291 let event = diff_events.next().await.unwrap();
7292 if let BufferDiffEvent::DiffChanged {
7293 changed_range: Some(changed_range),
7294 } = event
7295 {
7296 let changed_range = changed_range.to_point(&snapshot);
7297 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7298 } else {
7299 panic!("Unexpected event {event:?}");
7300 }
7301
7302 // Simulate a problem writing to the git index.
7303 fs.set_error_message_for_index_write(
7304 "/dir/.git".as_ref(),
7305 Some("failed to write git index".into()),
7306 );
7307
7308 // Stage another hunk.
7309 uncommitted_diff.update(cx, |diff, cx| {
7310 let range =
7311 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7312 let hunks = diff
7313 .hunks_intersecting_range(range, &snapshot, cx)
7314 .collect::<Vec<_>>();
7315 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7316
7317 assert_hunks(
7318 diff.hunks(&snapshot, cx),
7319 &snapshot,
7320 &diff.base_text_string().unwrap(),
7321 &[
7322 (
7323 0..0,
7324 "zero\n",
7325 "",
7326 DiffHunkStatus::deleted(HasSecondaryHunk),
7327 ),
7328 (
7329 1..2,
7330 "two\n",
7331 "TWO\n",
7332 DiffHunkStatus::modified(NoSecondaryHunk),
7333 ),
7334 (
7335 3..4,
7336 "four\n",
7337 "FOUR\n",
7338 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7339 ),
7340 ],
7341 );
7342 });
7343 assert!(matches!(
7344 diff_events.next().await.unwrap(),
7345 BufferDiffEvent::HunksStagedOrUnstaged(_)
7346 ));
7347 let event = diff_events.next().await.unwrap();
7348 if let BufferDiffEvent::DiffChanged {
7349 changed_range: Some(changed_range),
7350 } = event
7351 {
7352 let changed_range = changed_range.to_point(&snapshot);
7353 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7354 } else {
7355 panic!("Unexpected event {event:?}");
7356 }
7357
7358 // When the write fails, the hunk returns to being unstaged.
7359 cx.run_until_parked();
7360 uncommitted_diff.update(cx, |diff, cx| {
7361 assert_hunks(
7362 diff.hunks(&snapshot, cx),
7363 &snapshot,
7364 &diff.base_text_string().unwrap(),
7365 &[
7366 (
7367 0..0,
7368 "zero\n",
7369 "",
7370 DiffHunkStatus::deleted(HasSecondaryHunk),
7371 ),
7372 (
7373 1..2,
7374 "two\n",
7375 "TWO\n",
7376 DiffHunkStatus::modified(NoSecondaryHunk),
7377 ),
7378 (
7379 3..4,
7380 "four\n",
7381 "FOUR\n",
7382 DiffHunkStatus::modified(HasSecondaryHunk),
7383 ),
7384 ],
7385 );
7386 });
7387
7388 let event = diff_events.next().await.unwrap();
7389 if let BufferDiffEvent::DiffChanged {
7390 changed_range: Some(changed_range),
7391 } = event
7392 {
7393 let changed_range = changed_range.to_point(&snapshot);
7394 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7395 } else {
7396 panic!("Unexpected event {event:?}");
7397 }
7398
7399 // Allow writing to the git index to succeed again.
7400 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7401
7402 // Stage two hunks with separate operations.
7403 uncommitted_diff.update(cx, |diff, cx| {
7404 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7405 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7406 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7407 });
7408
7409 // Both staged hunks appear as pending.
7410 uncommitted_diff.update(cx, |diff, cx| {
7411 assert_hunks(
7412 diff.hunks(&snapshot, cx),
7413 &snapshot,
7414 &diff.base_text_string().unwrap(),
7415 &[
7416 (
7417 0..0,
7418 "zero\n",
7419 "",
7420 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7421 ),
7422 (
7423 1..2,
7424 "two\n",
7425 "TWO\n",
7426 DiffHunkStatus::modified(NoSecondaryHunk),
7427 ),
7428 (
7429 3..4,
7430 "four\n",
7431 "FOUR\n",
7432 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7433 ),
7434 ],
7435 );
7436 });
7437
7438 // Both staging operations take effect.
7439 cx.run_until_parked();
7440 uncommitted_diff.update(cx, |diff, cx| {
7441 assert_hunks(
7442 diff.hunks(&snapshot, cx),
7443 &snapshot,
7444 &diff.base_text_string().unwrap(),
7445 &[
7446 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7447 (
7448 1..2,
7449 "two\n",
7450 "TWO\n",
7451 DiffHunkStatus::modified(NoSecondaryHunk),
7452 ),
7453 (
7454 3..4,
7455 "four\n",
7456 "FOUR\n",
7457 DiffHunkStatus::modified(NoSecondaryHunk),
7458 ),
7459 ],
7460 );
7461 });
7462}
7463
7464#[gpui::test(seeds(340, 472))]
7465async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7466 use DiffHunkSecondaryStatus::*;
7467 init_test(cx);
7468
7469 let committed_contents = r#"
7470 zero
7471 one
7472 two
7473 three
7474 four
7475 five
7476 "#
7477 .unindent();
7478 let file_contents = r#"
7479 one
7480 TWO
7481 three
7482 FOUR
7483 five
7484 "#
7485 .unindent();
7486
7487 let fs = FakeFs::new(cx.background_executor.clone());
7488 fs.insert_tree(
7489 "/dir",
7490 json!({
7491 ".git": {},
7492 "file.txt": file_contents.clone()
7493 }),
7494 )
7495 .await;
7496
7497 fs.set_head_for_repo(
7498 "/dir/.git".as_ref(),
7499 &[("file.txt".into(), committed_contents.clone())],
7500 "deadbeef",
7501 );
7502 fs.set_index_for_repo(
7503 "/dir/.git".as_ref(),
7504 &[("file.txt".into(), committed_contents.clone())],
7505 );
7506
7507 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7508
7509 let buffer = project
7510 .update(cx, |project, cx| {
7511 project.open_local_buffer("/dir/file.txt", cx)
7512 })
7513 .await
7514 .unwrap();
7515 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7516 let uncommitted_diff = project
7517 .update(cx, |project, cx| {
7518 project.open_uncommitted_diff(buffer.clone(), cx)
7519 })
7520 .await
7521 .unwrap();
7522
7523 // The hunks are initially unstaged.
7524 uncommitted_diff.read_with(cx, |diff, cx| {
7525 assert_hunks(
7526 diff.hunks(&snapshot, cx),
7527 &snapshot,
7528 &diff.base_text_string().unwrap(),
7529 &[
7530 (
7531 0..0,
7532 "zero\n",
7533 "",
7534 DiffHunkStatus::deleted(HasSecondaryHunk),
7535 ),
7536 (
7537 1..2,
7538 "two\n",
7539 "TWO\n",
7540 DiffHunkStatus::modified(HasSecondaryHunk),
7541 ),
7542 (
7543 3..4,
7544 "four\n",
7545 "FOUR\n",
7546 DiffHunkStatus::modified(HasSecondaryHunk),
7547 ),
7548 ],
7549 );
7550 });
7551
7552 // Pause IO events
7553 fs.pause_events();
7554
7555 // Stage the first hunk.
7556 uncommitted_diff.update(cx, |diff, cx| {
7557 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7558 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7559 assert_hunks(
7560 diff.hunks(&snapshot, cx),
7561 &snapshot,
7562 &diff.base_text_string().unwrap(),
7563 &[
7564 (
7565 0..0,
7566 "zero\n",
7567 "",
7568 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7569 ),
7570 (
7571 1..2,
7572 "two\n",
7573 "TWO\n",
7574 DiffHunkStatus::modified(HasSecondaryHunk),
7575 ),
7576 (
7577 3..4,
7578 "four\n",
7579 "FOUR\n",
7580 DiffHunkStatus::modified(HasSecondaryHunk),
7581 ),
7582 ],
7583 );
7584 });
7585
7586 // Stage the second hunk *before* receiving the FS event for the first hunk.
7587 cx.run_until_parked();
7588 uncommitted_diff.update(cx, |diff, cx| {
7589 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7590 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7591 assert_hunks(
7592 diff.hunks(&snapshot, cx),
7593 &snapshot,
7594 &diff.base_text_string().unwrap(),
7595 &[
7596 (
7597 0..0,
7598 "zero\n",
7599 "",
7600 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7601 ),
7602 (
7603 1..2,
7604 "two\n",
7605 "TWO\n",
7606 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7607 ),
7608 (
7609 3..4,
7610 "four\n",
7611 "FOUR\n",
7612 DiffHunkStatus::modified(HasSecondaryHunk),
7613 ),
7614 ],
7615 );
7616 });
7617
7618 // Process the FS event for staging the first hunk (second event is still pending).
7619 fs.flush_events(1);
7620 cx.run_until_parked();
7621
7622 // Stage the third hunk before receiving the second FS event.
7623 uncommitted_diff.update(cx, |diff, cx| {
7624 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7625 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7626 });
7627
7628 // Wait for all remaining IO.
7629 cx.run_until_parked();
7630 fs.flush_events(fs.buffered_event_count());
7631
7632 // Now all hunks are staged.
7633 cx.run_until_parked();
7634 uncommitted_diff.update(cx, |diff, cx| {
7635 assert_hunks(
7636 diff.hunks(&snapshot, cx),
7637 &snapshot,
7638 &diff.base_text_string().unwrap(),
7639 &[
7640 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7641 (
7642 1..2,
7643 "two\n",
7644 "TWO\n",
7645 DiffHunkStatus::modified(NoSecondaryHunk),
7646 ),
7647 (
7648 3..4,
7649 "four\n",
7650 "FOUR\n",
7651 DiffHunkStatus::modified(NoSecondaryHunk),
7652 ),
7653 ],
7654 );
7655 });
7656}
7657
7658#[gpui::test(iterations = 25)]
7659async fn test_staging_random_hunks(
7660 mut rng: StdRng,
7661 executor: BackgroundExecutor,
7662 cx: &mut gpui::TestAppContext,
7663) {
7664 let operations = env::var("OPERATIONS")
7665 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7666 .unwrap_or(20);
7667
7668 // Try to induce races between diff recalculation and index writes.
7669 if rng.random_bool(0.5) {
7670 executor.deprioritize(*CALCULATE_DIFF_TASK);
7671 }
7672
7673 use DiffHunkSecondaryStatus::*;
7674 init_test(cx);
7675
7676 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7677 let index_text = committed_text.clone();
7678 let buffer_text = (0..30)
7679 .map(|i| match i % 5 {
7680 0 => format!("line {i} (modified)\n"),
7681 _ => format!("line {i}\n"),
7682 })
7683 .collect::<String>();
7684
7685 let fs = FakeFs::new(cx.background_executor.clone());
7686 fs.insert_tree(
7687 path!("/dir"),
7688 json!({
7689 ".git": {},
7690 "file.txt": buffer_text.clone()
7691 }),
7692 )
7693 .await;
7694 fs.set_head_for_repo(
7695 path!("/dir/.git").as_ref(),
7696 &[("file.txt".into(), committed_text.clone())],
7697 "deadbeef",
7698 );
7699 fs.set_index_for_repo(
7700 path!("/dir/.git").as_ref(),
7701 &[("file.txt".into(), index_text.clone())],
7702 );
7703 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7704
7705 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7706 let buffer = project
7707 .update(cx, |project, cx| {
7708 project.open_local_buffer(path!("/dir/file.txt"), cx)
7709 })
7710 .await
7711 .unwrap();
7712 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7713 let uncommitted_diff = project
7714 .update(cx, |project, cx| {
7715 project.open_uncommitted_diff(buffer.clone(), cx)
7716 })
7717 .await
7718 .unwrap();
7719
7720 let mut hunks =
7721 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7722 assert_eq!(hunks.len(), 6);
7723
7724 for _i in 0..operations {
7725 let hunk_ix = rng.random_range(0..hunks.len());
7726 let hunk = &mut hunks[hunk_ix];
7727 let row = hunk.range.start.row;
7728
7729 if hunk.status().has_secondary_hunk() {
7730 log::info!("staging hunk at {row}");
7731 uncommitted_diff.update(cx, |diff, cx| {
7732 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7733 });
7734 hunk.secondary_status = SecondaryHunkRemovalPending;
7735 } else {
7736 log::info!("unstaging hunk at {row}");
7737 uncommitted_diff.update(cx, |diff, cx| {
7738 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7739 });
7740 hunk.secondary_status = SecondaryHunkAdditionPending;
7741 }
7742
7743 for _ in 0..rng.random_range(0..10) {
7744 log::info!("yielding");
7745 cx.executor().simulate_random_delay().await;
7746 }
7747 }
7748
7749 cx.executor().run_until_parked();
7750
7751 for hunk in &mut hunks {
7752 if hunk.secondary_status == SecondaryHunkRemovalPending {
7753 hunk.secondary_status = NoSecondaryHunk;
7754 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7755 hunk.secondary_status = HasSecondaryHunk;
7756 }
7757 }
7758
7759 log::info!(
7760 "index text:\n{}",
7761 repo.load_index_text("file.txt".into()).await.unwrap()
7762 );
7763
7764 uncommitted_diff.update(cx, |diff, cx| {
7765 let expected_hunks = hunks
7766 .iter()
7767 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7768 .collect::<Vec<_>>();
7769 let actual_hunks = diff
7770 .hunks(&snapshot, cx)
7771 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7772 .collect::<Vec<_>>();
7773 assert_eq!(actual_hunks, expected_hunks);
7774 });
7775}
7776
7777#[gpui::test]
7778async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7779 init_test(cx);
7780
7781 let committed_contents = r#"
7782 fn main() {
7783 println!("hello from HEAD");
7784 }
7785 "#
7786 .unindent();
7787 let file_contents = r#"
7788 fn main() {
7789 println!("hello from the working copy");
7790 }
7791 "#
7792 .unindent();
7793
7794 let fs = FakeFs::new(cx.background_executor.clone());
7795 fs.insert_tree(
7796 "/dir",
7797 json!({
7798 ".git": {},
7799 "src": {
7800 "main.rs": file_contents,
7801 }
7802 }),
7803 )
7804 .await;
7805
7806 fs.set_head_for_repo(
7807 Path::new("/dir/.git"),
7808 &[("src/main.rs".into(), committed_contents.clone())],
7809 "deadbeef",
7810 );
7811 fs.set_index_for_repo(
7812 Path::new("/dir/.git"),
7813 &[("src/main.rs".into(), committed_contents.clone())],
7814 );
7815
7816 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7817
7818 let buffer = project
7819 .update(cx, |project, cx| {
7820 project.open_local_buffer("/dir/src/main.rs", cx)
7821 })
7822 .await
7823 .unwrap();
7824 let uncommitted_diff = project
7825 .update(cx, |project, cx| {
7826 project.open_uncommitted_diff(buffer.clone(), cx)
7827 })
7828 .await
7829 .unwrap();
7830
7831 cx.run_until_parked();
7832 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7833 let snapshot = buffer.read(cx).snapshot();
7834 assert_hunks(
7835 uncommitted_diff.hunks(&snapshot, cx),
7836 &snapshot,
7837 &uncommitted_diff.base_text_string().unwrap(),
7838 &[(
7839 1..2,
7840 " println!(\"hello from HEAD\");\n",
7841 " println!(\"hello from the working copy\");\n",
7842 DiffHunkStatus {
7843 kind: DiffHunkStatusKind::Modified,
7844 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7845 },
7846 )],
7847 );
7848 });
7849}
7850
7851#[gpui::test]
7852async fn test_repository_and_path_for_project_path(
7853 background_executor: BackgroundExecutor,
7854 cx: &mut gpui::TestAppContext,
7855) {
7856 init_test(cx);
7857 let fs = FakeFs::new(background_executor);
7858 fs.insert_tree(
7859 path!("/root"),
7860 json!({
7861 "c.txt": "",
7862 "dir1": {
7863 ".git": {},
7864 "deps": {
7865 "dep1": {
7866 ".git": {},
7867 "src": {
7868 "a.txt": ""
7869 }
7870 }
7871 },
7872 "src": {
7873 "b.txt": ""
7874 }
7875 },
7876 }),
7877 )
7878 .await;
7879
7880 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7881 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7882 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7883 project
7884 .update(cx, |project, cx| project.git_scans_complete(cx))
7885 .await;
7886 cx.run_until_parked();
7887
7888 project.read_with(cx, |project, cx| {
7889 let git_store = project.git_store().read(cx);
7890 let pairs = [
7891 ("c.txt", None),
7892 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7893 (
7894 "dir1/deps/dep1/src/a.txt",
7895 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7896 ),
7897 ];
7898 let expected = pairs
7899 .iter()
7900 .map(|(path, result)| {
7901 (
7902 path,
7903 result.map(|(repo, repo_path)| {
7904 (Path::new(repo).into(), RepoPath::from(repo_path))
7905 }),
7906 )
7907 })
7908 .collect::<Vec<_>>();
7909 let actual = pairs
7910 .iter()
7911 .map(|(path, _)| {
7912 let project_path = (tree_id, Path::new(path)).into();
7913 let result = maybe!({
7914 let (repo, repo_path) =
7915 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7916 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7917 });
7918 (path, result)
7919 })
7920 .collect::<Vec<_>>();
7921 pretty_assertions::assert_eq!(expected, actual);
7922 });
7923
7924 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7925 .await
7926 .unwrap();
7927 cx.run_until_parked();
7928
7929 project.read_with(cx, |project, cx| {
7930 let git_store = project.git_store().read(cx);
7931 assert_eq!(
7932 git_store.repository_and_path_for_project_path(
7933 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7934 cx
7935 ),
7936 None
7937 );
7938 });
7939}
7940
7941#[gpui::test]
7942async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7943 init_test(cx);
7944 let fs = FakeFs::new(cx.background_executor.clone());
7945 fs.insert_tree(
7946 path!("/root"),
7947 json!({
7948 "home": {
7949 ".git": {},
7950 "project": {
7951 "a.txt": "A"
7952 },
7953 },
7954 }),
7955 )
7956 .await;
7957 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7958
7959 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7960 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7961 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7962
7963 project
7964 .update(cx, |project, cx| project.git_scans_complete(cx))
7965 .await;
7966 tree.flush_fs_events(cx).await;
7967
7968 project.read_with(cx, |project, cx| {
7969 let containing = project
7970 .git_store()
7971 .read(cx)
7972 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7973 assert!(containing.is_none());
7974 });
7975
7976 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7977 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7978 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7979 project
7980 .update(cx, |project, cx| project.git_scans_complete(cx))
7981 .await;
7982 tree.flush_fs_events(cx).await;
7983
7984 project.read_with(cx, |project, cx| {
7985 let containing = project
7986 .git_store()
7987 .read(cx)
7988 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7989 assert_eq!(
7990 containing
7991 .unwrap()
7992 .0
7993 .read(cx)
7994 .work_directory_abs_path
7995 .as_ref(),
7996 Path::new(path!("/root/home"))
7997 );
7998 });
7999}
8000
8001#[gpui::test]
8002async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8003 init_test(cx);
8004 cx.executor().allow_parking();
8005
8006 let root = TempTree::new(json!({
8007 "project": {
8008 "a.txt": "a", // Modified
8009 "b.txt": "bb", // Added
8010 "c.txt": "ccc", // Unchanged
8011 "d.txt": "dddd", // Deleted
8012 },
8013 }));
8014
8015 // Set up git repository before creating the project.
8016 let work_dir = root.path().join("project");
8017 let repo = git_init(work_dir.as_path());
8018 git_add("a.txt", &repo);
8019 git_add("c.txt", &repo);
8020 git_add("d.txt", &repo);
8021 git_commit("Initial commit", &repo);
8022 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8023 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8024
8025 let project = Project::test(
8026 Arc::new(RealFs::new(None, cx.executor())),
8027 [root.path()],
8028 cx,
8029 )
8030 .await;
8031
8032 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8033 tree.flush_fs_events(cx).await;
8034 project
8035 .update(cx, |project, cx| project.git_scans_complete(cx))
8036 .await;
8037 cx.executor().run_until_parked();
8038
8039 let repository = project.read_with(cx, |project, cx| {
8040 project.repositories(cx).values().next().unwrap().clone()
8041 });
8042
8043 // Check that the right git state is observed on startup
8044 repository.read_with(cx, |repository, _| {
8045 let entries = repository.cached_status().collect::<Vec<_>>();
8046 assert_eq!(
8047 entries,
8048 [
8049 StatusEntry {
8050 repo_path: "a.txt".into(),
8051 status: StatusCode::Modified.worktree(),
8052 },
8053 StatusEntry {
8054 repo_path: "b.txt".into(),
8055 status: FileStatus::Untracked,
8056 },
8057 StatusEntry {
8058 repo_path: "d.txt".into(),
8059 status: StatusCode::Deleted.worktree(),
8060 },
8061 ]
8062 );
8063 });
8064
8065 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8066
8067 tree.flush_fs_events(cx).await;
8068 project
8069 .update(cx, |project, cx| project.git_scans_complete(cx))
8070 .await;
8071 cx.executor().run_until_parked();
8072
8073 repository.read_with(cx, |repository, _| {
8074 let entries = repository.cached_status().collect::<Vec<_>>();
8075 assert_eq!(
8076 entries,
8077 [
8078 StatusEntry {
8079 repo_path: "a.txt".into(),
8080 status: StatusCode::Modified.worktree(),
8081 },
8082 StatusEntry {
8083 repo_path: "b.txt".into(),
8084 status: FileStatus::Untracked,
8085 },
8086 StatusEntry {
8087 repo_path: "c.txt".into(),
8088 status: StatusCode::Modified.worktree(),
8089 },
8090 StatusEntry {
8091 repo_path: "d.txt".into(),
8092 status: StatusCode::Deleted.worktree(),
8093 },
8094 ]
8095 );
8096 });
8097
8098 git_add("a.txt", &repo);
8099 git_add("c.txt", &repo);
8100 git_remove_index(Path::new("d.txt"), &repo);
8101 git_commit("Another commit", &repo);
8102 tree.flush_fs_events(cx).await;
8103 project
8104 .update(cx, |project, cx| project.git_scans_complete(cx))
8105 .await;
8106 cx.executor().run_until_parked();
8107
8108 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8109 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8110 tree.flush_fs_events(cx).await;
8111 project
8112 .update(cx, |project, cx| project.git_scans_complete(cx))
8113 .await;
8114 cx.executor().run_until_parked();
8115
8116 repository.read_with(cx, |repository, _cx| {
8117 let entries = repository.cached_status().collect::<Vec<_>>();
8118
8119 // Deleting an untracked entry, b.txt, should leave no status
8120 // a.txt was tracked, and so should have a status
8121 assert_eq!(
8122 entries,
8123 [StatusEntry {
8124 repo_path: "a.txt".into(),
8125 status: StatusCode::Deleted.worktree(),
8126 }]
8127 );
8128 });
8129}
8130
8131#[gpui::test]
8132async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8133 init_test(cx);
8134 cx.executor().allow_parking();
8135
8136 let root = TempTree::new(json!({
8137 "project": {
8138 "sub": {},
8139 "a.txt": "",
8140 },
8141 }));
8142
8143 let work_dir = root.path().join("project");
8144 let repo = git_init(work_dir.as_path());
8145 // a.txt exists in HEAD and the working copy but is deleted in the index.
8146 git_add("a.txt", &repo);
8147 git_commit("Initial commit", &repo);
8148 git_remove_index("a.txt".as_ref(), &repo);
8149 // `sub` is a nested git repository.
8150 let _sub = git_init(&work_dir.join("sub"));
8151
8152 let project = Project::test(
8153 Arc::new(RealFs::new(None, cx.executor())),
8154 [root.path()],
8155 cx,
8156 )
8157 .await;
8158
8159 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8160 tree.flush_fs_events(cx).await;
8161 project
8162 .update(cx, |project, cx| project.git_scans_complete(cx))
8163 .await;
8164 cx.executor().run_until_parked();
8165
8166 let repository = project.read_with(cx, |project, cx| {
8167 project
8168 .repositories(cx)
8169 .values()
8170 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8171 .unwrap()
8172 .clone()
8173 });
8174
8175 repository.read_with(cx, |repository, _cx| {
8176 let entries = repository.cached_status().collect::<Vec<_>>();
8177
8178 // `sub` doesn't appear in our computed statuses.
8179 // a.txt appears with a combined `DA` status.
8180 assert_eq!(
8181 entries,
8182 [StatusEntry {
8183 repo_path: "a.txt".into(),
8184 status: TrackedStatus {
8185 index_status: StatusCode::Deleted,
8186 worktree_status: StatusCode::Added
8187 }
8188 .into(),
8189 }]
8190 )
8191 });
8192}
8193
8194#[gpui::test]
8195async fn test_repository_subfolder_git_status(
8196 executor: gpui::BackgroundExecutor,
8197 cx: &mut gpui::TestAppContext,
8198) {
8199 init_test(cx);
8200
8201 let fs = FakeFs::new(executor);
8202 fs.insert_tree(
8203 path!("/root"),
8204 json!({
8205 "my-repo": {
8206 ".git": {},
8207 "a.txt": "a",
8208 "sub-folder-1": {
8209 "sub-folder-2": {
8210 "c.txt": "cc",
8211 "d": {
8212 "e.txt": "eee"
8213 }
8214 },
8215 }
8216 },
8217 }),
8218 )
8219 .await;
8220
8221 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8222 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8223
8224 fs.set_status_for_repo(
8225 path!("/root/my-repo/.git").as_ref(),
8226 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8227 );
8228
8229 let project = Project::test(
8230 fs.clone(),
8231 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8232 cx,
8233 )
8234 .await;
8235
8236 project
8237 .update(cx, |project, cx| project.git_scans_complete(cx))
8238 .await;
8239 cx.run_until_parked();
8240
8241 let repository = project.read_with(cx, |project, cx| {
8242 project.repositories(cx).values().next().unwrap().clone()
8243 });
8244
8245 // Ensure that the git status is loaded correctly
8246 repository.read_with(cx, |repository, _cx| {
8247 assert_eq!(
8248 repository.work_directory_abs_path,
8249 Path::new(path!("/root/my-repo")).into()
8250 );
8251
8252 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8253 assert_eq!(
8254 repository.status_for_path(&E_TXT.into()).unwrap().status,
8255 FileStatus::Untracked
8256 );
8257 });
8258
8259 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8260 project
8261 .update(cx, |project, cx| project.git_scans_complete(cx))
8262 .await;
8263 cx.run_until_parked();
8264
8265 repository.read_with(cx, |repository, _cx| {
8266 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8267 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8268 });
8269}
8270
8271// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8272#[cfg(any())]
8273#[gpui::test]
8274async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8275 init_test(cx);
8276 cx.executor().allow_parking();
8277
8278 let root = TempTree::new(json!({
8279 "project": {
8280 "a.txt": "a",
8281 },
8282 }));
8283 let root_path = root.path();
8284
8285 let repo = git_init(&root_path.join("project"));
8286 git_add("a.txt", &repo);
8287 git_commit("init", &repo);
8288
8289 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8290
8291 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8292 tree.flush_fs_events(cx).await;
8293 project
8294 .update(cx, |project, cx| project.git_scans_complete(cx))
8295 .await;
8296 cx.executor().run_until_parked();
8297
8298 let repository = project.read_with(cx, |project, cx| {
8299 project.repositories(cx).values().next().unwrap().clone()
8300 });
8301
8302 git_branch("other-branch", &repo);
8303 git_checkout("refs/heads/other-branch", &repo);
8304 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8305 git_add("a.txt", &repo);
8306 git_commit("capitalize", &repo);
8307 let commit = repo
8308 .head()
8309 .expect("Failed to get HEAD")
8310 .peel_to_commit()
8311 .expect("HEAD is not a commit");
8312 git_checkout("refs/heads/main", &repo);
8313 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8314 git_add("a.txt", &repo);
8315 git_commit("improve letter", &repo);
8316 git_cherry_pick(&commit, &repo);
8317 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8318 .expect("No CHERRY_PICK_HEAD");
8319 pretty_assertions::assert_eq!(
8320 git_status(&repo),
8321 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8322 );
8323 tree.flush_fs_events(cx).await;
8324 project
8325 .update(cx, |project, cx| project.git_scans_complete(cx))
8326 .await;
8327 cx.executor().run_until_parked();
8328 let conflicts = repository.update(cx, |repository, _| {
8329 repository
8330 .merge_conflicts
8331 .iter()
8332 .cloned()
8333 .collect::<Vec<_>>()
8334 });
8335 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8336
8337 git_add("a.txt", &repo);
8338 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8339 git_commit("whatevs", &repo);
8340 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8341 .expect("Failed to remove CHERRY_PICK_HEAD");
8342 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8343 tree.flush_fs_events(cx).await;
8344 let conflicts = repository.update(cx, |repository, _| {
8345 repository
8346 .merge_conflicts
8347 .iter()
8348 .cloned()
8349 .collect::<Vec<_>>()
8350 });
8351 pretty_assertions::assert_eq!(conflicts, []);
8352}
8353
8354#[gpui::test]
8355async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8356 init_test(cx);
8357 let fs = FakeFs::new(cx.background_executor.clone());
8358 fs.insert_tree(
8359 path!("/root"),
8360 json!({
8361 ".git": {},
8362 ".gitignore": "*.txt\n",
8363 "a.xml": "<a></a>",
8364 "b.txt": "Some text"
8365 }),
8366 )
8367 .await;
8368
8369 fs.set_head_and_index_for_repo(
8370 path!("/root/.git").as_ref(),
8371 &[
8372 (".gitignore".into(), "*.txt\n".into()),
8373 ("a.xml".into(), "<a></a>".into()),
8374 ],
8375 );
8376
8377 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8378
8379 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8380 tree.flush_fs_events(cx).await;
8381 project
8382 .update(cx, |project, cx| project.git_scans_complete(cx))
8383 .await;
8384 cx.executor().run_until_parked();
8385
8386 let repository = project.read_with(cx, |project, cx| {
8387 project.repositories(cx).values().next().unwrap().clone()
8388 });
8389
8390 // One file is unmodified, the other is ignored.
8391 cx.read(|cx| {
8392 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8393 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8394 });
8395
8396 // Change the gitignore, and stage the newly non-ignored file.
8397 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8398 .await
8399 .unwrap();
8400 fs.set_index_for_repo(
8401 Path::new(path!("/root/.git")),
8402 &[
8403 (".gitignore".into(), "*.txt\n".into()),
8404 ("a.xml".into(), "<a></a>".into()),
8405 ("b.txt".into(), "Some text".into()),
8406 ],
8407 );
8408
8409 cx.executor().run_until_parked();
8410 cx.read(|cx| {
8411 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8412 assert_entry_git_state(
8413 tree.read(cx),
8414 repository.read(cx),
8415 "b.txt",
8416 Some(StatusCode::Added),
8417 false,
8418 );
8419 });
8420}
8421
8422// NOTE:
8423// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8424// a directory which some program has already open.
8425// This is a limitation of the Windows.
8426// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8427#[gpui::test]
8428#[cfg_attr(target_os = "windows", ignore)]
8429async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8430 init_test(cx);
8431 cx.executor().allow_parking();
8432 let root = TempTree::new(json!({
8433 "projects": {
8434 "project1": {
8435 "a": "",
8436 "b": "",
8437 }
8438 },
8439
8440 }));
8441 let root_path = root.path();
8442
8443 let repo = git_init(&root_path.join("projects/project1"));
8444 git_add("a", &repo);
8445 git_commit("init", &repo);
8446 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8447
8448 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8449
8450 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8451 tree.flush_fs_events(cx).await;
8452 project
8453 .update(cx, |project, cx| project.git_scans_complete(cx))
8454 .await;
8455 cx.executor().run_until_parked();
8456
8457 let repository = project.read_with(cx, |project, cx| {
8458 project.repositories(cx).values().next().unwrap().clone()
8459 });
8460
8461 repository.read_with(cx, |repository, _| {
8462 assert_eq!(
8463 repository.work_directory_abs_path.as_ref(),
8464 root_path.join("projects/project1").as_path()
8465 );
8466 assert_eq!(
8467 repository
8468 .status_for_path(&"a".into())
8469 .map(|entry| entry.status),
8470 Some(StatusCode::Modified.worktree()),
8471 );
8472 assert_eq!(
8473 repository
8474 .status_for_path(&"b".into())
8475 .map(|entry| entry.status),
8476 Some(FileStatus::Untracked),
8477 );
8478 });
8479
8480 std::fs::rename(
8481 root_path.join("projects/project1"),
8482 root_path.join("projects/project2"),
8483 )
8484 .unwrap();
8485 tree.flush_fs_events(cx).await;
8486
8487 repository.read_with(cx, |repository, _| {
8488 assert_eq!(
8489 repository.work_directory_abs_path.as_ref(),
8490 root_path.join("projects/project2").as_path()
8491 );
8492 assert_eq!(
8493 repository.status_for_path(&"a".into()).unwrap().status,
8494 StatusCode::Modified.worktree(),
8495 );
8496 assert_eq!(
8497 repository.status_for_path(&"b".into()).unwrap().status,
8498 FileStatus::Untracked,
8499 );
8500 });
8501}
8502
8503// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8504// you can't rename a directory which some program has already open. This is a
8505// limitation of the Windows. See:
8506// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8507#[gpui::test]
8508#[cfg_attr(target_os = "windows", ignore)]
8509async fn test_file_status(cx: &mut gpui::TestAppContext) {
8510 init_test(cx);
8511 cx.executor().allow_parking();
8512 const IGNORE_RULE: &str = "**/target";
8513
8514 let root = TempTree::new(json!({
8515 "project": {
8516 "a.txt": "a",
8517 "b.txt": "bb",
8518 "c": {
8519 "d": {
8520 "e.txt": "eee"
8521 }
8522 },
8523 "f.txt": "ffff",
8524 "target": {
8525 "build_file": "???"
8526 },
8527 ".gitignore": IGNORE_RULE
8528 },
8529
8530 }));
8531 let root_path = root.path();
8532
8533 const A_TXT: &str = "a.txt";
8534 const B_TXT: &str = "b.txt";
8535 const E_TXT: &str = "c/d/e.txt";
8536 const F_TXT: &str = "f.txt";
8537 const DOTGITIGNORE: &str = ".gitignore";
8538 const BUILD_FILE: &str = "target/build_file";
8539
8540 // Set up git repository before creating the worktree.
8541 let work_dir = root.path().join("project");
8542 let mut repo = git_init(work_dir.as_path());
8543 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8544 git_add(A_TXT, &repo);
8545 git_add(E_TXT, &repo);
8546 git_add(DOTGITIGNORE, &repo);
8547 git_commit("Initial commit", &repo);
8548
8549 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8550
8551 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8552 tree.flush_fs_events(cx).await;
8553 project
8554 .update(cx, |project, cx| project.git_scans_complete(cx))
8555 .await;
8556 cx.executor().run_until_parked();
8557
8558 let repository = project.read_with(cx, |project, cx| {
8559 project.repositories(cx).values().next().unwrap().clone()
8560 });
8561
8562 // Check that the right git state is observed on startup
8563 repository.read_with(cx, |repository, _cx| {
8564 assert_eq!(
8565 repository.work_directory_abs_path.as_ref(),
8566 root_path.join("project").as_path()
8567 );
8568
8569 assert_eq!(
8570 repository.status_for_path(&B_TXT.into()).unwrap().status,
8571 FileStatus::Untracked,
8572 );
8573 assert_eq!(
8574 repository.status_for_path(&F_TXT.into()).unwrap().status,
8575 FileStatus::Untracked,
8576 );
8577 });
8578
8579 // Modify a file in the working copy.
8580 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8581 tree.flush_fs_events(cx).await;
8582 project
8583 .update(cx, |project, cx| project.git_scans_complete(cx))
8584 .await;
8585 cx.executor().run_until_parked();
8586
8587 // The worktree detects that the file's git status has changed.
8588 repository.read_with(cx, |repository, _| {
8589 assert_eq!(
8590 repository.status_for_path(&A_TXT.into()).unwrap().status,
8591 StatusCode::Modified.worktree(),
8592 );
8593 });
8594
8595 // Create a commit in the git repository.
8596 git_add(A_TXT, &repo);
8597 git_add(B_TXT, &repo);
8598 git_commit("Committing modified and added", &repo);
8599 tree.flush_fs_events(cx).await;
8600 project
8601 .update(cx, |project, cx| project.git_scans_complete(cx))
8602 .await;
8603 cx.executor().run_until_parked();
8604
8605 // The worktree detects that the files' git status have changed.
8606 repository.read_with(cx, |repository, _cx| {
8607 assert_eq!(
8608 repository.status_for_path(&F_TXT.into()).unwrap().status,
8609 FileStatus::Untracked,
8610 );
8611 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8612 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8613 });
8614
8615 // Modify files in the working copy and perform git operations on other files.
8616 git_reset(0, &repo);
8617 git_remove_index(Path::new(B_TXT), &repo);
8618 git_stash(&mut repo);
8619 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8620 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8621 tree.flush_fs_events(cx).await;
8622 project
8623 .update(cx, |project, cx| project.git_scans_complete(cx))
8624 .await;
8625 cx.executor().run_until_parked();
8626
8627 // Check that more complex repo changes are tracked
8628 repository.read_with(cx, |repository, _cx| {
8629 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8630 assert_eq!(
8631 repository.status_for_path(&B_TXT.into()).unwrap().status,
8632 FileStatus::Untracked,
8633 );
8634 assert_eq!(
8635 repository.status_for_path(&E_TXT.into()).unwrap().status,
8636 StatusCode::Modified.worktree(),
8637 );
8638 });
8639
8640 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8641 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8642 std::fs::write(
8643 work_dir.join(DOTGITIGNORE),
8644 [IGNORE_RULE, "f.txt"].join("\n"),
8645 )
8646 .unwrap();
8647
8648 git_add(Path::new(DOTGITIGNORE), &repo);
8649 git_commit("Committing modified git ignore", &repo);
8650
8651 tree.flush_fs_events(cx).await;
8652 cx.executor().run_until_parked();
8653
8654 let mut renamed_dir_name = "first_directory/second_directory";
8655 const RENAMED_FILE: &str = "rf.txt";
8656
8657 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8658 std::fs::write(
8659 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8660 "new-contents",
8661 )
8662 .unwrap();
8663
8664 tree.flush_fs_events(cx).await;
8665 project
8666 .update(cx, |project, cx| project.git_scans_complete(cx))
8667 .await;
8668 cx.executor().run_until_parked();
8669
8670 repository.read_with(cx, |repository, _cx| {
8671 assert_eq!(
8672 repository
8673 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8674 .unwrap()
8675 .status,
8676 FileStatus::Untracked,
8677 );
8678 });
8679
8680 renamed_dir_name = "new_first_directory/second_directory";
8681
8682 std::fs::rename(
8683 work_dir.join("first_directory"),
8684 work_dir.join("new_first_directory"),
8685 )
8686 .unwrap();
8687
8688 tree.flush_fs_events(cx).await;
8689 project
8690 .update(cx, |project, cx| project.git_scans_complete(cx))
8691 .await;
8692 cx.executor().run_until_parked();
8693
8694 repository.read_with(cx, |repository, _cx| {
8695 assert_eq!(
8696 repository
8697 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8698 .unwrap()
8699 .status,
8700 FileStatus::Untracked,
8701 );
8702 });
8703}
8704
8705#[gpui::test]
8706async fn test_repos_in_invisible_worktrees(
8707 executor: BackgroundExecutor,
8708 cx: &mut gpui::TestAppContext,
8709) {
8710 init_test(cx);
8711 let fs = FakeFs::new(executor);
8712 fs.insert_tree(
8713 path!("/root"),
8714 json!({
8715 "dir1": {
8716 ".git": {},
8717 "dep1": {
8718 ".git": {},
8719 "src": {
8720 "a.txt": "",
8721 },
8722 },
8723 "b.txt": "",
8724 },
8725 }),
8726 )
8727 .await;
8728
8729 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8730 let _visible_worktree =
8731 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8732 project
8733 .update(cx, |project, cx| project.git_scans_complete(cx))
8734 .await;
8735
8736 let repos = project.read_with(cx, |project, cx| {
8737 project
8738 .repositories(cx)
8739 .values()
8740 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8741 .collect::<Vec<_>>()
8742 });
8743 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8744
8745 let (_invisible_worktree, _) = project
8746 .update(cx, |project, cx| {
8747 project.worktree_store.update(cx, |worktree_store, cx| {
8748 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8749 })
8750 })
8751 .await
8752 .expect("failed to create worktree");
8753 project
8754 .update(cx, |project, cx| project.git_scans_complete(cx))
8755 .await;
8756
8757 let repos = project.read_with(cx, |project, cx| {
8758 project
8759 .repositories(cx)
8760 .values()
8761 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8762 .collect::<Vec<_>>()
8763 });
8764 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8765}
8766
8767#[gpui::test(iterations = 10)]
8768async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8769 init_test(cx);
8770 cx.update(|cx| {
8771 cx.update_global::<SettingsStore, _>(|store, cx| {
8772 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8773 project_settings.file_scan_exclusions = Some(Vec::new());
8774 });
8775 });
8776 });
8777 let fs = FakeFs::new(cx.background_executor.clone());
8778 fs.insert_tree(
8779 path!("/root"),
8780 json!({
8781 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8782 "tree": {
8783 ".git": {},
8784 ".gitignore": "ignored-dir\n",
8785 "tracked-dir": {
8786 "tracked-file1": "",
8787 "ancestor-ignored-file1": "",
8788 },
8789 "ignored-dir": {
8790 "ignored-file1": ""
8791 }
8792 }
8793 }),
8794 )
8795 .await;
8796 fs.set_head_and_index_for_repo(
8797 path!("/root/tree/.git").as_ref(),
8798 &[
8799 (".gitignore".into(), "ignored-dir\n".into()),
8800 ("tracked-dir/tracked-file1".into(), "".into()),
8801 ],
8802 );
8803
8804 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8805
8806 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8807 tree.flush_fs_events(cx).await;
8808 project
8809 .update(cx, |project, cx| project.git_scans_complete(cx))
8810 .await;
8811 cx.executor().run_until_parked();
8812
8813 let repository = project.read_with(cx, |project, cx| {
8814 project.repositories(cx).values().next().unwrap().clone()
8815 });
8816
8817 tree.read_with(cx, |tree, _| {
8818 tree.as_local()
8819 .unwrap()
8820 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8821 })
8822 .recv()
8823 .await;
8824
8825 cx.read(|cx| {
8826 assert_entry_git_state(
8827 tree.read(cx),
8828 repository.read(cx),
8829 "tracked-dir/tracked-file1",
8830 None,
8831 false,
8832 );
8833 assert_entry_git_state(
8834 tree.read(cx),
8835 repository.read(cx),
8836 "tracked-dir/ancestor-ignored-file1",
8837 None,
8838 false,
8839 );
8840 assert_entry_git_state(
8841 tree.read(cx),
8842 repository.read(cx),
8843 "ignored-dir/ignored-file1",
8844 None,
8845 true,
8846 );
8847 });
8848
8849 fs.create_file(
8850 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8851 Default::default(),
8852 )
8853 .await
8854 .unwrap();
8855 fs.set_index_for_repo(
8856 path!("/root/tree/.git").as_ref(),
8857 &[
8858 (".gitignore".into(), "ignored-dir\n".into()),
8859 ("tracked-dir/tracked-file1".into(), "".into()),
8860 ("tracked-dir/tracked-file2".into(), "".into()),
8861 ],
8862 );
8863 fs.create_file(
8864 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8865 Default::default(),
8866 )
8867 .await
8868 .unwrap();
8869 fs.create_file(
8870 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8871 Default::default(),
8872 )
8873 .await
8874 .unwrap();
8875
8876 cx.executor().run_until_parked();
8877 cx.read(|cx| {
8878 assert_entry_git_state(
8879 tree.read(cx),
8880 repository.read(cx),
8881 "tracked-dir/tracked-file2",
8882 Some(StatusCode::Added),
8883 false,
8884 );
8885 assert_entry_git_state(
8886 tree.read(cx),
8887 repository.read(cx),
8888 "tracked-dir/ancestor-ignored-file2",
8889 None,
8890 false,
8891 );
8892 assert_entry_git_state(
8893 tree.read(cx),
8894 repository.read(cx),
8895 "ignored-dir/ignored-file2",
8896 None,
8897 true,
8898 );
8899 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8900 });
8901}
8902
8903#[gpui::test]
8904async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8905 init_test(cx);
8906
8907 let fs = FakeFs::new(cx.executor());
8908 fs.insert_tree(
8909 path!("/project"),
8910 json!({
8911 ".git": {
8912 "worktrees": {
8913 "some-worktree": {
8914 "commondir": "../..\n",
8915 // For is_git_dir
8916 "HEAD": "",
8917 "config": ""
8918 }
8919 },
8920 "modules": {
8921 "subdir": {
8922 "some-submodule": {
8923 // For is_git_dir
8924 "HEAD": "",
8925 "config": "",
8926 }
8927 }
8928 }
8929 },
8930 "src": {
8931 "a.txt": "A",
8932 },
8933 "some-worktree": {
8934 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8935 "src": {
8936 "b.txt": "B",
8937 }
8938 },
8939 "subdir": {
8940 "some-submodule": {
8941 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8942 "c.txt": "C",
8943 }
8944 }
8945 }),
8946 )
8947 .await;
8948
8949 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8950 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8951 scan_complete.await;
8952
8953 let mut repositories = project.update(cx, |project, cx| {
8954 project
8955 .repositories(cx)
8956 .values()
8957 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8958 .collect::<Vec<_>>()
8959 });
8960 repositories.sort();
8961 pretty_assertions::assert_eq!(
8962 repositories,
8963 [
8964 Path::new(path!("/project")).into(),
8965 Path::new(path!("/project/some-worktree")).into(),
8966 Path::new(path!("/project/subdir/some-submodule")).into(),
8967 ]
8968 );
8969
8970 // Generate a git-related event for the worktree and check that it's refreshed.
8971 fs.with_git_state(
8972 path!("/project/some-worktree/.git").as_ref(),
8973 true,
8974 |state| {
8975 state
8976 .head_contents
8977 .insert("src/b.txt".into(), "b".to_owned());
8978 state
8979 .index_contents
8980 .insert("src/b.txt".into(), "b".to_owned());
8981 },
8982 )
8983 .unwrap();
8984 cx.run_until_parked();
8985
8986 let buffer = project
8987 .update(cx, |project, cx| {
8988 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8989 })
8990 .await
8991 .unwrap();
8992 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8993 let (repo, _) = project
8994 .git_store()
8995 .read(cx)
8996 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8997 .unwrap();
8998 pretty_assertions::assert_eq!(
8999 repo.read(cx).work_directory_abs_path,
9000 Path::new(path!("/project/some-worktree")).into(),
9001 );
9002 let barrier = repo.update(cx, |repo, _| repo.barrier());
9003 (repo.clone(), barrier)
9004 });
9005 barrier.await.unwrap();
9006 worktree_repo.update(cx, |repo, _| {
9007 pretty_assertions::assert_eq!(
9008 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
9009 StatusCode::Modified.worktree(),
9010 );
9011 });
9012
9013 // The same for the submodule.
9014 fs.with_git_state(
9015 path!("/project/subdir/some-submodule/.git").as_ref(),
9016 true,
9017 |state| {
9018 state.head_contents.insert("c.txt".into(), "c".to_owned());
9019 state.index_contents.insert("c.txt".into(), "c".to_owned());
9020 },
9021 )
9022 .unwrap();
9023 cx.run_until_parked();
9024
9025 let buffer = project
9026 .update(cx, |project, cx| {
9027 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9028 })
9029 .await
9030 .unwrap();
9031 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9032 let (repo, _) = project
9033 .git_store()
9034 .read(cx)
9035 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9036 .unwrap();
9037 pretty_assertions::assert_eq!(
9038 repo.read(cx).work_directory_abs_path,
9039 Path::new(path!("/project/subdir/some-submodule")).into(),
9040 );
9041 let barrier = repo.update(cx, |repo, _| repo.barrier());
9042 (repo.clone(), barrier)
9043 });
9044 barrier.await.unwrap();
9045 submodule_repo.update(cx, |repo, _| {
9046 pretty_assertions::assert_eq!(
9047 repo.status_for_path(&"c.txt".into()).unwrap().status,
9048 StatusCode::Modified.worktree(),
9049 );
9050 });
9051}
9052
9053#[gpui::test]
9054async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9055 init_test(cx);
9056 let fs = FakeFs::new(cx.background_executor.clone());
9057 fs.insert_tree(
9058 path!("/root"),
9059 json!({
9060 "project": {
9061 ".git": {},
9062 "child1": {
9063 "a.txt": "A",
9064 },
9065 "child2": {
9066 "b.txt": "B",
9067 }
9068 }
9069 }),
9070 )
9071 .await;
9072
9073 let project = Project::test(
9074 fs.clone(),
9075 [
9076 path!("/root/project/child1").as_ref(),
9077 path!("/root/project/child2").as_ref(),
9078 ],
9079 cx,
9080 )
9081 .await;
9082
9083 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9084 tree.flush_fs_events(cx).await;
9085 project
9086 .update(cx, |project, cx| project.git_scans_complete(cx))
9087 .await;
9088 cx.executor().run_until_parked();
9089
9090 let repos = project.read_with(cx, |project, cx| {
9091 project
9092 .repositories(cx)
9093 .values()
9094 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9095 .collect::<Vec<_>>()
9096 });
9097 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9098}
9099
9100async fn search(
9101 project: &Entity<Project>,
9102 query: SearchQuery,
9103 cx: &mut gpui::TestAppContext,
9104) -> Result<HashMap<String, Vec<Range<usize>>>> {
9105 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9106 let mut results = HashMap::default();
9107 while let Ok(search_result) = search_rx.recv().await {
9108 match search_result {
9109 SearchResult::Buffer { buffer, ranges } => {
9110 results.entry(buffer).or_insert(ranges);
9111 }
9112 SearchResult::LimitReached => {}
9113 }
9114 }
9115 Ok(results
9116 .into_iter()
9117 .map(|(buffer, ranges)| {
9118 buffer.update(cx, |buffer, cx| {
9119 let path = buffer
9120 .file()
9121 .unwrap()
9122 .full_path(cx)
9123 .to_string_lossy()
9124 .to_string();
9125 let ranges = ranges
9126 .into_iter()
9127 .map(|range| range.to_offset(buffer))
9128 .collect::<Vec<_>>();
9129 (path, ranges)
9130 })
9131 })
9132 .collect())
9133}
9134
9135pub fn init_test(cx: &mut gpui::TestAppContext) {
9136 zlog::init_test();
9137
9138 cx.update(|cx| {
9139 let settings_store = SettingsStore::test(cx);
9140 cx.set_global(settings_store);
9141 release_channel::init(SemanticVersion::default(), cx);
9142 language::init(cx);
9143 Project::init_settings(cx);
9144 });
9145}
9146
9147fn json_lang() -> Arc<Language> {
9148 Arc::new(Language::new(
9149 LanguageConfig {
9150 name: "JSON".into(),
9151 matcher: LanguageMatcher {
9152 path_suffixes: vec!["json".to_string()],
9153 ..Default::default()
9154 },
9155 ..Default::default()
9156 },
9157 None,
9158 ))
9159}
9160
9161fn js_lang() -> Arc<Language> {
9162 Arc::new(Language::new(
9163 LanguageConfig {
9164 name: "JavaScript".into(),
9165 matcher: LanguageMatcher {
9166 path_suffixes: vec!["js".to_string()],
9167 ..Default::default()
9168 },
9169 ..Default::default()
9170 },
9171 None,
9172 ))
9173}
9174
9175fn rust_lang() -> Arc<Language> {
9176 Arc::new(Language::new(
9177 LanguageConfig {
9178 name: "Rust".into(),
9179 matcher: LanguageMatcher {
9180 path_suffixes: vec!["rs".to_string()],
9181 ..Default::default()
9182 },
9183 ..Default::default()
9184 },
9185 Some(tree_sitter_rust::LANGUAGE.into()),
9186 ))
9187}
9188
9189fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9190 struct PythonMootToolchainLister(Arc<FakeFs>);
9191 #[async_trait]
9192 impl ToolchainLister for PythonMootToolchainLister {
9193 async fn list(
9194 &self,
9195 worktree_root: PathBuf,
9196 subroot_relative_path: Arc<Path>,
9197 _: Option<HashMap<String, String>>,
9198 ) -> ToolchainList {
9199 // This lister will always return a path .venv directories within ancestors
9200 let ancestors = subroot_relative_path
9201 .ancestors()
9202 .map(ToOwned::to_owned)
9203 .collect::<Vec<_>>();
9204 let mut toolchains = vec![];
9205 for ancestor in ancestors {
9206 let venv_path = worktree_root.join(ancestor).join(".venv");
9207 if self.0.is_dir(&venv_path).await {
9208 toolchains.push(Toolchain {
9209 name: SharedString::new("Python Venv"),
9210 path: venv_path.to_string_lossy().into_owned().into(),
9211 language_name: LanguageName(SharedString::new_static("Python")),
9212 as_json: serde_json::Value::Null,
9213 })
9214 }
9215 }
9216 ToolchainList {
9217 toolchains,
9218 ..Default::default()
9219 }
9220 }
9221 async fn resolve(
9222 &self,
9223 _: PathBuf,
9224 _: Option<HashMap<String, String>>,
9225 ) -> anyhow::Result<Toolchain> {
9226 Err(anyhow::anyhow!("Not implemented"))
9227 }
9228 fn meta(&self) -> ToolchainMetadata {
9229 ToolchainMetadata {
9230 term: SharedString::new_static("Virtual Environment"),
9231 new_toolchain_placeholder: SharedString::new_static(
9232 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9233 ),
9234 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9235 }
9236 }
9237 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9238 vec![]
9239 }
9240 }
9241 Arc::new(
9242 Language::new(
9243 LanguageConfig {
9244 name: "Python".into(),
9245 matcher: LanguageMatcher {
9246 path_suffixes: vec!["py".to_string()],
9247 ..Default::default()
9248 },
9249 ..Default::default()
9250 },
9251 None, // We're not testing Python parsing with this language.
9252 )
9253 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9254 "pyproject.toml",
9255 ))))
9256 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9257 )
9258}
9259
9260fn typescript_lang() -> Arc<Language> {
9261 Arc::new(Language::new(
9262 LanguageConfig {
9263 name: "TypeScript".into(),
9264 matcher: LanguageMatcher {
9265 path_suffixes: vec!["ts".to_string()],
9266 ..Default::default()
9267 },
9268 ..Default::default()
9269 },
9270 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9271 ))
9272}
9273
9274fn tsx_lang() -> Arc<Language> {
9275 Arc::new(Language::new(
9276 LanguageConfig {
9277 name: "tsx".into(),
9278 matcher: LanguageMatcher {
9279 path_suffixes: vec!["tsx".to_string()],
9280 ..Default::default()
9281 },
9282 ..Default::default()
9283 },
9284 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9285 ))
9286}
9287
9288fn get_all_tasks(
9289 project: &Entity<Project>,
9290 task_contexts: Arc<TaskContexts>,
9291 cx: &mut App,
9292) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9293 let new_tasks = project.update(cx, |project, cx| {
9294 project.task_store.update(cx, |task_store, cx| {
9295 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9296 this.used_and_current_resolved_tasks(task_contexts, cx)
9297 })
9298 })
9299 });
9300
9301 cx.background_spawn(async move {
9302 let (mut old, new) = new_tasks.await;
9303 old.extend(new);
9304 old
9305 })
9306}
9307
9308#[track_caller]
9309fn assert_entry_git_state(
9310 tree: &Worktree,
9311 repository: &Repository,
9312 path: &str,
9313 index_status: Option<StatusCode>,
9314 is_ignored: bool,
9315) {
9316 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9317 let entry = tree
9318 .entry_for_path(path)
9319 .unwrap_or_else(|| panic!("entry {path} not found"));
9320 let status = repository
9321 .status_for_path(&path.into())
9322 .map(|entry| entry.status);
9323 let expected = index_status.map(|index_status| {
9324 TrackedStatus {
9325 index_status,
9326 worktree_status: StatusCode::Unmodified,
9327 }
9328 .into()
9329 });
9330 assert_eq!(
9331 status, expected,
9332 "expected {path} to have git status: {expected:?}"
9333 );
9334 assert_eq!(
9335 entry.is_ignored, is_ignored,
9336 "expected {path} to have is_ignored: {is_ignored}"
9337 );
9338}
9339
9340#[track_caller]
9341fn git_init(path: &Path) -> git2::Repository {
9342 let mut init_opts = RepositoryInitOptions::new();
9343 init_opts.initial_head("main");
9344 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9345}
9346
9347#[track_caller]
9348fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9349 let path = path.as_ref();
9350 let mut index = repo.index().expect("Failed to get index");
9351 index.add_path(path).expect("Failed to add file");
9352 index.write().expect("Failed to write index");
9353}
9354
9355#[track_caller]
9356fn git_remove_index(path: &Path, repo: &git2::Repository) {
9357 let mut index = repo.index().expect("Failed to get index");
9358 index.remove_path(path).expect("Failed to add file");
9359 index.write().expect("Failed to write index");
9360}
9361
9362#[track_caller]
9363fn git_commit(msg: &'static str, repo: &git2::Repository) {
9364 use git2::Signature;
9365
9366 let signature = Signature::now("test", "test@zed.dev").unwrap();
9367 let oid = repo.index().unwrap().write_tree().unwrap();
9368 let tree = repo.find_tree(oid).unwrap();
9369 if let Ok(head) = repo.head() {
9370 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9371
9372 let parent_commit = parent_obj.as_commit().unwrap();
9373
9374 repo.commit(
9375 Some("HEAD"),
9376 &signature,
9377 &signature,
9378 msg,
9379 &tree,
9380 &[parent_commit],
9381 )
9382 .expect("Failed to commit with parent");
9383 } else {
9384 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9385 .expect("Failed to commit");
9386 }
9387}
9388
9389#[cfg(any())]
9390#[track_caller]
9391fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9392 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9393}
9394
9395#[track_caller]
9396fn git_stash(repo: &mut git2::Repository) {
9397 use git2::Signature;
9398
9399 let signature = Signature::now("test", "test@zed.dev").unwrap();
9400 repo.stash_save(&signature, "N/A", None)
9401 .expect("Failed to stash");
9402}
9403
9404#[track_caller]
9405fn git_reset(offset: usize, repo: &git2::Repository) {
9406 let head = repo.head().expect("Couldn't get repo head");
9407 let object = head.peel(git2::ObjectType::Commit).unwrap();
9408 let commit = object.as_commit().unwrap();
9409 let new_head = commit
9410 .parents()
9411 .inspect(|parnet| {
9412 parnet.message();
9413 })
9414 .nth(offset)
9415 .expect("Not enough history");
9416 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9417 .expect("Could not reset");
9418}
9419
9420#[cfg(any())]
9421#[track_caller]
9422fn git_branch(name: &str, repo: &git2::Repository) {
9423 let head = repo
9424 .head()
9425 .expect("Couldn't get repo head")
9426 .peel_to_commit()
9427 .expect("HEAD is not a commit");
9428 repo.branch(name, &head, false).expect("Failed to commit");
9429}
9430
9431#[cfg(any())]
9432#[track_caller]
9433fn git_checkout(name: &str, repo: &git2::Repository) {
9434 repo.set_head(name).expect("Failed to set head");
9435 repo.checkout_head(None).expect("Failed to check out head");
9436}
9437
9438#[cfg(any())]
9439#[track_caller]
9440fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9441 repo.statuses(None)
9442 .unwrap()
9443 .iter()
9444 .map(|status| (status.path().unwrap().to_string(), status.status()))
9445 .collect()
9446}
9447
9448#[gpui::test]
9449async fn test_find_project_path_abs(
9450 background_executor: BackgroundExecutor,
9451 cx: &mut gpui::TestAppContext,
9452) {
9453 // find_project_path should work with absolute paths
9454 init_test(cx);
9455
9456 let fs = FakeFs::new(background_executor);
9457 fs.insert_tree(
9458 path!("/root"),
9459 json!({
9460 "project1": {
9461 "file1.txt": "content1",
9462 "subdir": {
9463 "file2.txt": "content2"
9464 }
9465 },
9466 "project2": {
9467 "file3.txt": "content3"
9468 }
9469 }),
9470 )
9471 .await;
9472
9473 let project = Project::test(
9474 fs.clone(),
9475 [
9476 path!("/root/project1").as_ref(),
9477 path!("/root/project2").as_ref(),
9478 ],
9479 cx,
9480 )
9481 .await;
9482
9483 // Make sure the worktrees are fully initialized
9484 project
9485 .update(cx, |project, cx| project.git_scans_complete(cx))
9486 .await;
9487 cx.run_until_parked();
9488
9489 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9490 project.read_with(cx, |project, cx| {
9491 let worktrees: Vec<_> = project.worktrees(cx).collect();
9492 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9493 let id1 = worktrees[0].read(cx).id();
9494 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9495 let id2 = worktrees[1].read(cx).id();
9496 (abs_path1, id1, abs_path2, id2)
9497 });
9498
9499 project.update(cx, |project, cx| {
9500 let abs_path = project1_abs_path.join("file1.txt");
9501 let found_path = project.find_project_path(abs_path, cx).unwrap();
9502 assert_eq!(found_path.worktree_id, project1_id);
9503 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9504
9505 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9506 let found_path = project.find_project_path(abs_path, cx).unwrap();
9507 assert_eq!(found_path.worktree_id, project1_id);
9508 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9509
9510 let abs_path = project2_abs_path.join("file3.txt");
9511 let found_path = project.find_project_path(abs_path, cx).unwrap();
9512 assert_eq!(found_path.worktree_id, project2_id);
9513 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9514
9515 let abs_path = project1_abs_path.join("nonexistent.txt");
9516 let found_path = project.find_project_path(abs_path, cx);
9517 assert!(
9518 found_path.is_some(),
9519 "Should find project path for nonexistent file in worktree"
9520 );
9521
9522 // Test with an absolute path outside any worktree
9523 let abs_path = Path::new("/some/other/path");
9524 let found_path = project.find_project_path(abs_path, cx);
9525 assert!(
9526 found_path.is_none(),
9527 "Should not find project path for path outside any worktree"
9528 );
9529 });
9530}