1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree(
1217 path!("/the-root"),
1218 json!({
1219 ".gitignore": "target\n",
1220 "Cargo.lock": "",
1221 "src": {
1222 "a.rs": "",
1223 "b.rs": "",
1224 },
1225 "target": {
1226 "x": {
1227 "out": {
1228 "x.rs": ""
1229 }
1230 },
1231 "y": {
1232 "out": {
1233 "y.rs": "",
1234 }
1235 },
1236 "z": {
1237 "out": {
1238 "z.rs": ""
1239 }
1240 }
1241 }
1242 }),
1243 )
1244 .await;
1245 fs.insert_tree(
1246 path!("/the-registry"),
1247 json!({
1248 "dep1": {
1249 "src": {
1250 "dep1.rs": "",
1251 }
1252 },
1253 "dep2": {
1254 "src": {
1255 "dep2.rs": "",
1256 }
1257 },
1258 }),
1259 )
1260 .await;
1261 fs.insert_tree(
1262 path!("/the/stdlib"),
1263 json!({
1264 "LICENSE": "",
1265 "src": {
1266 "string.rs": "",
1267 }
1268 }),
1269 )
1270 .await;
1271
1272 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1273 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1274 (project.languages().clone(), project.lsp_store())
1275 });
1276 language_registry.add(rust_lang());
1277 let mut fake_servers = language_registry.register_fake_lsp(
1278 "Rust",
1279 FakeLspAdapter {
1280 name: "the-language-server",
1281 ..Default::default()
1282 },
1283 );
1284
1285 cx.executor().run_until_parked();
1286
1287 // Start the language server by opening a buffer with a compatible file extension.
1288 project
1289 .update(cx, |project, cx| {
1290 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1291 })
1292 .await
1293 .unwrap();
1294
1295 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1296 project.update(cx, |project, cx| {
1297 let worktree = project.worktrees(cx).next().unwrap();
1298 assert_eq!(
1299 worktree
1300 .read(cx)
1301 .snapshot()
1302 .entries(true, 0)
1303 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("", false),
1307 (".gitignore", false),
1308 ("Cargo.lock", false),
1309 ("src", false),
1310 ("src/a.rs", false),
1311 ("src/b.rs", false),
1312 ("target", true),
1313 ]
1314 );
1315 });
1316
1317 let prev_read_dir_count = fs.read_dir_call_count();
1318
1319 let fake_server = fake_servers.next().await.unwrap();
1320 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1321 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1322 id
1323 });
1324
1325 // Simulate jumping to a definition in a dependency outside of the worktree.
1326 let _out_of_worktree_buffer = project
1327 .update(cx, |project, cx| {
1328 project.open_local_buffer_via_lsp(
1329 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1330 server_id,
1331 cx,
1332 )
1333 })
1334 .await
1335 .unwrap();
1336
1337 // Keep track of the FS events reported to the language server.
1338 let file_changes = Arc::new(Mutex::new(Vec::new()));
1339 fake_server
1340 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1341 registrations: vec![lsp::Registration {
1342 id: Default::default(),
1343 method: "workspace/didChangeWatchedFiles".to_string(),
1344 register_options: serde_json::to_value(
1345 lsp::DidChangeWatchedFilesRegistrationOptions {
1346 watchers: vec![
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/Cargo.toml").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/src/*.{rs,c}").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the-root/target/y/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("/the/stdlib/src/**/*.rs").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 lsp::FileSystemWatcher {
1372 glob_pattern: lsp::GlobPattern::String(
1373 path!("**/Cargo.lock").to_string(),
1374 ),
1375 kind: None,
1376 },
1377 ],
1378 },
1379 )
1380 .ok(),
1381 }],
1382 })
1383 .await
1384 .into_response()
1385 .unwrap();
1386 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1387 let file_changes = file_changes.clone();
1388 move |params, _| {
1389 let mut file_changes = file_changes.lock();
1390 file_changes.extend(params.changes);
1391 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1392 }
1393 });
1394
1395 cx.executor().run_until_parked();
1396 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1397 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1398
1399 let mut new_watched_paths = fs.watched_paths();
1400 new_watched_paths.retain(|path| {
1401 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1402 });
1403 assert_eq!(
1404 &new_watched_paths,
1405 &[
1406 Path::new(path!("/the-root")),
1407 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1408 Path::new(path!("/the/stdlib/src"))
1409 ]
1410 );
1411
1412 // Now the language server has asked us to watch an ignored directory path,
1413 // so we recursively load it.
1414 project.update(cx, |project, cx| {
1415 let worktree = project.visible_worktrees(cx).next().unwrap();
1416 assert_eq!(
1417 worktree
1418 .read(cx)
1419 .snapshot()
1420 .entries(true, 0)
1421 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1422 .collect::<Vec<_>>(),
1423 &[
1424 ("", false),
1425 (".gitignore", false),
1426 ("Cargo.lock", false),
1427 ("src", false),
1428 ("src/a.rs", false),
1429 ("src/b.rs", false),
1430 ("target", true),
1431 ("target/x", true),
1432 ("target/y", true),
1433 ("target/y/out", true),
1434 ("target/y/out/y.rs", true),
1435 ("target/z", true),
1436 ]
1437 );
1438 });
1439
1440 // Perform some file system mutations, two of which match the watched patterns,
1441 // and one of which does not.
1442 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1446 .await
1447 .unwrap();
1448 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/x/out/x2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.create_file(
1458 path!("/the-root/target/y/out/y2.rs").as_ref(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463 fs.save(
1464 path!("/the-root/Cargo.lock").as_ref(),
1465 &"".into(),
1466 Default::default(),
1467 )
1468 .await
1469 .unwrap();
1470 fs.save(
1471 path!("/the-stdlib/LICENSE").as_ref(),
1472 &"".into(),
1473 Default::default(),
1474 )
1475 .await
1476 .unwrap();
1477 fs.save(
1478 path!("/the/stdlib/src/string.rs").as_ref(),
1479 &"".into(),
1480 Default::default(),
1481 )
1482 .await
1483 .unwrap();
1484
1485 // The language server receives events for the FS mutations that match its watch patterns.
1486 cx.executor().run_until_parked();
1487 assert_eq!(
1488 &*file_changes.lock(),
1489 &[
1490 lsp::FileEvent {
1491 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1492 typ: lsp::FileChangeType::CHANGED,
1493 },
1494 lsp::FileEvent {
1495 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1496 typ: lsp::FileChangeType::DELETED,
1497 },
1498 lsp::FileEvent {
1499 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1500 typ: lsp::FileChangeType::CREATED,
1501 },
1502 lsp::FileEvent {
1503 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1504 typ: lsp::FileChangeType::CREATED,
1505 },
1506 lsp::FileEvent {
1507 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1508 typ: lsp::FileChangeType::CHANGED,
1509 },
1510 ]
1511 );
1512}
1513
1514#[gpui::test]
1515async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1516 init_test(cx);
1517
1518 let fs = FakeFs::new(cx.executor());
1519 fs.insert_tree(
1520 path!("/dir"),
1521 json!({
1522 "a.rs": "let a = 1;",
1523 "b.rs": "let b = 2;"
1524 }),
1525 )
1526 .await;
1527
1528 let project = Project::test(
1529 fs,
1530 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1531 cx,
1532 )
1533 .await;
1534 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1535
1536 let buffer_a = project
1537 .update(cx, |project, cx| {
1538 project.open_local_buffer(path!("/dir/a.rs"), cx)
1539 })
1540 .await
1541 .unwrap();
1542 let buffer_b = project
1543 .update(cx, |project, cx| {
1544 project.open_local_buffer(path!("/dir/b.rs"), cx)
1545 })
1546 .await
1547 .unwrap();
1548
1549 lsp_store.update(cx, |lsp_store, cx| {
1550 lsp_store
1551 .update_diagnostics(
1552 LanguageServerId(0),
1553 lsp::PublishDiagnosticsParams {
1554 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1555 version: None,
1556 diagnostics: vec![lsp::Diagnostic {
1557 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1558 severity: Some(lsp::DiagnosticSeverity::ERROR),
1559 message: "error 1".to_string(),
1560 ..Default::default()
1561 }],
1562 },
1563 None,
1564 DiagnosticSourceKind::Pushed,
1565 &[],
1566 cx,
1567 )
1568 .unwrap();
1569 lsp_store
1570 .update_diagnostics(
1571 LanguageServerId(0),
1572 lsp::PublishDiagnosticsParams {
1573 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1574 version: None,
1575 diagnostics: vec![lsp::Diagnostic {
1576 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1577 severity: Some(DiagnosticSeverity::WARNING),
1578 message: "error 2".to_string(),
1579 ..Default::default()
1580 }],
1581 },
1582 None,
1583 DiagnosticSourceKind::Pushed,
1584 &[],
1585 cx,
1586 )
1587 .unwrap();
1588 });
1589
1590 buffer_a.update(cx, |buffer, _| {
1591 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1592 assert_eq!(
1593 chunks
1594 .iter()
1595 .map(|(s, d)| (s.as_str(), *d))
1596 .collect::<Vec<_>>(),
1597 &[
1598 ("let ", None),
1599 ("a", Some(DiagnosticSeverity::ERROR)),
1600 (" = 1;", None),
1601 ]
1602 );
1603 });
1604 buffer_b.update(cx, |buffer, _| {
1605 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1606 assert_eq!(
1607 chunks
1608 .iter()
1609 .map(|(s, d)| (s.as_str(), *d))
1610 .collect::<Vec<_>>(),
1611 &[
1612 ("let ", None),
1613 ("b", Some(DiagnosticSeverity::WARNING)),
1614 (" = 2;", None),
1615 ]
1616 );
1617 });
1618}
1619
1620#[gpui::test]
1621async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1622 init_test(cx);
1623
1624 let fs = FakeFs::new(cx.executor());
1625 fs.insert_tree(
1626 path!("/root"),
1627 json!({
1628 "dir": {
1629 ".git": {
1630 "HEAD": "ref: refs/heads/main",
1631 },
1632 ".gitignore": "b.rs",
1633 "a.rs": "let a = 1;",
1634 "b.rs": "let b = 2;",
1635 },
1636 "other.rs": "let b = c;"
1637 }),
1638 )
1639 .await;
1640
1641 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1642 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1643 let (worktree, _) = project
1644 .update(cx, |project, cx| {
1645 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1646 })
1647 .await
1648 .unwrap();
1649 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1650
1651 let (worktree, _) = project
1652 .update(cx, |project, cx| {
1653 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1654 })
1655 .await
1656 .unwrap();
1657 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1658
1659 let server_id = LanguageServerId(0);
1660 lsp_store.update(cx, |lsp_store, cx| {
1661 lsp_store
1662 .update_diagnostics(
1663 server_id,
1664 lsp::PublishDiagnosticsParams {
1665 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1666 version: None,
1667 diagnostics: vec![lsp::Diagnostic {
1668 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1669 severity: Some(lsp::DiagnosticSeverity::ERROR),
1670 message: "unused variable 'b'".to_string(),
1671 ..Default::default()
1672 }],
1673 },
1674 None,
1675 DiagnosticSourceKind::Pushed,
1676 &[],
1677 cx,
1678 )
1679 .unwrap();
1680 lsp_store
1681 .update_diagnostics(
1682 server_id,
1683 lsp::PublishDiagnosticsParams {
1684 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1685 version: None,
1686 diagnostics: vec![lsp::Diagnostic {
1687 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1688 severity: Some(lsp::DiagnosticSeverity::ERROR),
1689 message: "unknown variable 'c'".to_string(),
1690 ..Default::default()
1691 }],
1692 },
1693 None,
1694 DiagnosticSourceKind::Pushed,
1695 &[],
1696 cx,
1697 )
1698 .unwrap();
1699 });
1700
1701 let main_ignored_buffer = project
1702 .update(cx, |project, cx| {
1703 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1704 })
1705 .await
1706 .unwrap();
1707 main_ignored_buffer.update(cx, |buffer, _| {
1708 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1709 assert_eq!(
1710 chunks
1711 .iter()
1712 .map(|(s, d)| (s.as_str(), *d))
1713 .collect::<Vec<_>>(),
1714 &[
1715 ("let ", None),
1716 ("b", Some(DiagnosticSeverity::ERROR)),
1717 (" = 2;", None),
1718 ],
1719 "Gigitnored buffers should still get in-buffer diagnostics",
1720 );
1721 });
1722 let other_buffer = project
1723 .update(cx, |project, cx| {
1724 project.open_buffer((other_worktree_id, rel_path("")), cx)
1725 })
1726 .await
1727 .unwrap();
1728 other_buffer.update(cx, |buffer, _| {
1729 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1730 assert_eq!(
1731 chunks
1732 .iter()
1733 .map(|(s, d)| (s.as_str(), *d))
1734 .collect::<Vec<_>>(),
1735 &[
1736 ("let b = ", None),
1737 ("c", Some(DiagnosticSeverity::ERROR)),
1738 (";", None),
1739 ],
1740 "Buffers from hidden projects should still get in-buffer diagnostics"
1741 );
1742 });
1743
1744 project.update(cx, |project, cx| {
1745 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1746 assert_eq!(
1747 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1748 vec![(
1749 ProjectPath {
1750 worktree_id: main_worktree_id,
1751 path: rel_path("b.rs").into(),
1752 },
1753 server_id,
1754 DiagnosticSummary {
1755 error_count: 1,
1756 warning_count: 0,
1757 }
1758 )]
1759 );
1760 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1761 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let progress_token = "the-progress-token";
1770
1771 let fs = FakeFs::new(cx.executor());
1772 fs.insert_tree(
1773 path!("/dir"),
1774 json!({
1775 "a.rs": "fn a() { A }",
1776 "b.rs": "const y: i32 = 1",
1777 }),
1778 )
1779 .await;
1780
1781 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1783
1784 language_registry.add(rust_lang());
1785 let mut fake_servers = language_registry.register_fake_lsp(
1786 "Rust",
1787 FakeLspAdapter {
1788 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1789 disk_based_diagnostics_sources: vec!["disk".into()],
1790 ..Default::default()
1791 },
1792 );
1793
1794 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1795
1796 // Cause worktree to start the fake language server
1797 let _ = project
1798 .update(cx, |project, cx| {
1799 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1800 })
1801 .await
1802 .unwrap();
1803
1804 let mut events = cx.events(&project);
1805
1806 let fake_server = fake_servers.next().await.unwrap();
1807 assert_eq!(
1808 events.next().await.unwrap(),
1809 Event::LanguageServerAdded(
1810 LanguageServerId(0),
1811 fake_server.server.name(),
1812 Some(worktree_id)
1813 ),
1814 );
1815
1816 fake_server
1817 .start_progress(format!("{}/0", progress_token))
1818 .await;
1819 assert_eq!(
1820 events.next().await.unwrap(),
1821 Event::DiskBasedDiagnosticsStarted {
1822 language_server_id: LanguageServerId(0),
1823 }
1824 );
1825
1826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1827 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1828 version: None,
1829 diagnostics: vec![lsp::Diagnostic {
1830 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1831 severity: Some(lsp::DiagnosticSeverity::ERROR),
1832 message: "undefined variable 'A'".to_string(),
1833 ..Default::default()
1834 }],
1835 });
1836 assert_eq!(
1837 events.next().await.unwrap(),
1838 Event::DiagnosticsUpdated {
1839 language_server_id: LanguageServerId(0),
1840 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1841 }
1842 );
1843
1844 fake_server.end_progress(format!("{}/0", progress_token));
1845 assert_eq!(
1846 events.next().await.unwrap(),
1847 Event::DiskBasedDiagnosticsFinished {
1848 language_server_id: LanguageServerId(0)
1849 }
1850 );
1851
1852 let buffer = project
1853 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1854 .await
1855 .unwrap();
1856
1857 buffer.update(cx, |buffer, _| {
1858 let snapshot = buffer.snapshot();
1859 let diagnostics = snapshot
1860 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1861 .collect::<Vec<_>>();
1862 assert_eq!(
1863 diagnostics,
1864 &[DiagnosticEntryRef {
1865 range: Point::new(0, 9)..Point::new(0, 10),
1866 diagnostic: &Diagnostic {
1867 severity: lsp::DiagnosticSeverity::ERROR,
1868 message: "undefined variable 'A'".to_string(),
1869 group_id: 0,
1870 is_primary: true,
1871 source_kind: DiagnosticSourceKind::Pushed,
1872 ..Diagnostic::default()
1873 }
1874 }]
1875 )
1876 });
1877
1878 // Ensure publishing empty diagnostics twice only results in one update event.
1879 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1880 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1881 version: None,
1882 diagnostics: Default::default(),
1883 });
1884 assert_eq!(
1885 events.next().await.unwrap(),
1886 Event::DiagnosticsUpdated {
1887 language_server_id: LanguageServerId(0),
1888 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1889 }
1890 );
1891
1892 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1893 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1894 version: None,
1895 diagnostics: Default::default(),
1896 });
1897 cx.executor().run_until_parked();
1898 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1899}
1900
1901#[gpui::test]
1902async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1903 init_test(cx);
1904
1905 let progress_token = "the-progress-token";
1906
1907 let fs = FakeFs::new(cx.executor());
1908 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1909
1910 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1911
1912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1913 language_registry.add(rust_lang());
1914 let mut fake_servers = language_registry.register_fake_lsp(
1915 "Rust",
1916 FakeLspAdapter {
1917 name: "the-language-server",
1918 disk_based_diagnostics_sources: vec!["disk".into()],
1919 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1920 ..FakeLspAdapter::default()
1921 },
1922 );
1923
1924 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1925
1926 let (buffer, _handle) = project
1927 .update(cx, |project, cx| {
1928 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1929 })
1930 .await
1931 .unwrap();
1932 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1933 // Simulate diagnostics starting to update.
1934 let fake_server = fake_servers.next().await.unwrap();
1935 fake_server.start_progress(progress_token).await;
1936
1937 // Restart the server before the diagnostics finish updating.
1938 project.update(cx, |project, cx| {
1939 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1940 });
1941 let mut events = cx.events(&project);
1942
1943 // Simulate the newly started server sending more diagnostics.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 assert_eq!(
1946 events.next().await.unwrap(),
1947 Event::LanguageServerRemoved(LanguageServerId(0))
1948 );
1949 assert_eq!(
1950 events.next().await.unwrap(),
1951 Event::LanguageServerAdded(
1952 LanguageServerId(1),
1953 fake_server.server.name(),
1954 Some(worktree_id)
1955 )
1956 );
1957 fake_server.start_progress(progress_token).await;
1958 assert_eq!(
1959 events.next().await.unwrap(),
1960 Event::LanguageServerBufferRegistered {
1961 server_id: LanguageServerId(1),
1962 buffer_id,
1963 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1964 name: Some(fake_server.server.name())
1965 }
1966 );
1967 assert_eq!(
1968 events.next().await.unwrap(),
1969 Event::DiskBasedDiagnosticsStarted {
1970 language_server_id: LanguageServerId(1)
1971 }
1972 );
1973 project.update(cx, |project, cx| {
1974 assert_eq!(
1975 project
1976 .language_servers_running_disk_based_diagnostics(cx)
1977 .collect::<Vec<_>>(),
1978 [LanguageServerId(1)]
1979 );
1980 });
1981
1982 // All diagnostics are considered done, despite the old server's diagnostic
1983 // task never completing.
1984 fake_server.end_progress(progress_token);
1985 assert_eq!(
1986 events.next().await.unwrap(),
1987 Event::DiskBasedDiagnosticsFinished {
1988 language_server_id: LanguageServerId(1)
1989 }
1990 );
1991 project.update(cx, |project, cx| {
1992 assert_eq!(
1993 project
1994 .language_servers_running_disk_based_diagnostics(cx)
1995 .collect::<Vec<_>>(),
1996 [] as [language::LanguageServerId; 0]
1997 );
1998 });
1999}
2000
2001#[gpui::test]
2002async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2003 init_test(cx);
2004
2005 let fs = FakeFs::new(cx.executor());
2006 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2007
2008 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2009
2010 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2011 language_registry.add(rust_lang());
2012 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2013
2014 let (buffer, _) = project
2015 .update(cx, |project, cx| {
2016 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2017 })
2018 .await
2019 .unwrap();
2020
2021 // Publish diagnostics
2022 let fake_server = fake_servers.next().await.unwrap();
2023 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2024 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2025 version: None,
2026 diagnostics: vec![lsp::Diagnostic {
2027 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2028 severity: Some(lsp::DiagnosticSeverity::ERROR),
2029 message: "the message".to_string(),
2030 ..Default::default()
2031 }],
2032 });
2033
2034 cx.executor().run_until_parked();
2035 buffer.update(cx, |buffer, _| {
2036 assert_eq!(
2037 buffer
2038 .snapshot()
2039 .diagnostics_in_range::<_, usize>(0..1, false)
2040 .map(|entry| entry.diagnostic.message.clone())
2041 .collect::<Vec<_>>(),
2042 ["the message".to_string()]
2043 );
2044 });
2045 project.update(cx, |project, cx| {
2046 assert_eq!(
2047 project.diagnostic_summary(false, cx),
2048 DiagnosticSummary {
2049 error_count: 1,
2050 warning_count: 0,
2051 }
2052 );
2053 });
2054
2055 project.update(cx, |project, cx| {
2056 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2057 });
2058
2059 // The diagnostics are cleared.
2060 cx.executor().run_until_parked();
2061 buffer.update(cx, |buffer, _| {
2062 assert_eq!(
2063 buffer
2064 .snapshot()
2065 .diagnostics_in_range::<_, usize>(0..1, false)
2066 .map(|entry| entry.diagnostic.message.clone())
2067 .collect::<Vec<_>>(),
2068 Vec::<String>::new(),
2069 );
2070 });
2071 project.update(cx, |project, cx| {
2072 assert_eq!(
2073 project.diagnostic_summary(false, cx),
2074 DiagnosticSummary {
2075 error_count: 0,
2076 warning_count: 0,
2077 }
2078 );
2079 });
2080}
2081
2082#[gpui::test]
2083async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2084 init_test(cx);
2085
2086 let fs = FakeFs::new(cx.executor());
2087 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2088
2089 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2090 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2091
2092 language_registry.add(rust_lang());
2093 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2094
2095 let (buffer, _handle) = project
2096 .update(cx, |project, cx| {
2097 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 // Before restarting the server, report diagnostics with an unknown buffer version.
2103 let fake_server = fake_servers.next().await.unwrap();
2104 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2105 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2106 version: Some(10000),
2107 diagnostics: Vec::new(),
2108 });
2109 cx.executor().run_until_parked();
2110 project.update(cx, |project, cx| {
2111 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2112 });
2113
2114 let mut fake_server = fake_servers.next().await.unwrap();
2115 let notification = fake_server
2116 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2117 .await
2118 .text_document;
2119 assert_eq!(notification.version, 0);
2120}
2121
2122#[gpui::test]
2123async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2124 init_test(cx);
2125
2126 let progress_token = "the-progress-token";
2127
2128 let fs = FakeFs::new(cx.executor());
2129 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2130
2131 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2132
2133 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2134 language_registry.add(rust_lang());
2135 let mut fake_servers = language_registry.register_fake_lsp(
2136 "Rust",
2137 FakeLspAdapter {
2138 name: "the-language-server",
2139 disk_based_diagnostics_sources: vec!["disk".into()],
2140 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2141 ..Default::default()
2142 },
2143 );
2144
2145 let (buffer, _handle) = project
2146 .update(cx, |project, cx| {
2147 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2148 })
2149 .await
2150 .unwrap();
2151
2152 // Simulate diagnostics starting to update.
2153 let mut fake_server = fake_servers.next().await.unwrap();
2154 fake_server
2155 .start_progress_with(
2156 "another-token",
2157 lsp::WorkDoneProgressBegin {
2158 cancellable: Some(false),
2159 ..Default::default()
2160 },
2161 )
2162 .await;
2163 fake_server
2164 .start_progress_with(
2165 progress_token,
2166 lsp::WorkDoneProgressBegin {
2167 cancellable: Some(true),
2168 ..Default::default()
2169 },
2170 )
2171 .await;
2172 cx.executor().run_until_parked();
2173
2174 project.update(cx, |project, cx| {
2175 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2176 });
2177
2178 let cancel_notification = fake_server
2179 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2180 .await;
2181 assert_eq!(
2182 cancel_notification.token,
2183 NumberOrString::String(progress_token.into())
2184 );
2185}
2186
2187#[gpui::test]
2188async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2189 init_test(cx);
2190
2191 let fs = FakeFs::new(cx.executor());
2192 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2193 .await;
2194
2195 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2196 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2197
2198 let mut fake_rust_servers = language_registry.register_fake_lsp(
2199 "Rust",
2200 FakeLspAdapter {
2201 name: "rust-lsp",
2202 ..Default::default()
2203 },
2204 );
2205 let mut fake_js_servers = language_registry.register_fake_lsp(
2206 "JavaScript",
2207 FakeLspAdapter {
2208 name: "js-lsp",
2209 ..Default::default()
2210 },
2211 );
2212 language_registry.add(rust_lang());
2213 language_registry.add(js_lang());
2214
2215 let _rs_buffer = project
2216 .update(cx, |project, cx| {
2217 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2218 })
2219 .await
2220 .unwrap();
2221 let _js_buffer = project
2222 .update(cx, |project, cx| {
2223 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2224 })
2225 .await
2226 .unwrap();
2227
2228 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2229 assert_eq!(
2230 fake_rust_server_1
2231 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2232 .await
2233 .text_document
2234 .uri
2235 .as_str(),
2236 uri!("file:///dir/a.rs")
2237 );
2238
2239 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2240 assert_eq!(
2241 fake_js_server
2242 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2243 .await
2244 .text_document
2245 .uri
2246 .as_str(),
2247 uri!("file:///dir/b.js")
2248 );
2249
2250 // Disable Rust language server, ensuring only that server gets stopped.
2251 cx.update(|cx| {
2252 SettingsStore::update_global(cx, |settings, cx| {
2253 settings.update_user_settings(cx, |settings| {
2254 settings.languages_mut().insert(
2255 "Rust".into(),
2256 LanguageSettingsContent {
2257 enable_language_server: Some(false),
2258 ..Default::default()
2259 },
2260 );
2261 });
2262 })
2263 });
2264 fake_rust_server_1
2265 .receive_notification::<lsp::notification::Exit>()
2266 .await;
2267
2268 // Enable Rust and disable JavaScript language servers, ensuring that the
2269 // former gets started again and that the latter stops.
2270 cx.update(|cx| {
2271 SettingsStore::update_global(cx, |settings, cx| {
2272 settings.update_user_settings(cx, |settings| {
2273 settings.languages_mut().insert(
2274 "Rust".into(),
2275 LanguageSettingsContent {
2276 enable_language_server: Some(true),
2277 ..Default::default()
2278 },
2279 );
2280 settings.languages_mut().insert(
2281 "JavaScript".into(),
2282 LanguageSettingsContent {
2283 enable_language_server: Some(false),
2284 ..Default::default()
2285 },
2286 );
2287 });
2288 })
2289 });
2290 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2291 assert_eq!(
2292 fake_rust_server_2
2293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2294 .await
2295 .text_document
2296 .uri
2297 .as_str(),
2298 uri!("file:///dir/a.rs")
2299 );
2300 fake_js_server
2301 .receive_notification::<lsp::notification::Exit>()
2302 .await;
2303}
2304
2305#[gpui::test(iterations = 3)]
2306async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2307 init_test(cx);
2308
2309 let text = "
2310 fn a() { A }
2311 fn b() { BB }
2312 fn c() { CCC }
2313 "
2314 .unindent();
2315
2316 let fs = FakeFs::new(cx.executor());
2317 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2318
2319 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2321
2322 language_registry.add(rust_lang());
2323 let mut fake_servers = language_registry.register_fake_lsp(
2324 "Rust",
2325 FakeLspAdapter {
2326 disk_based_diagnostics_sources: vec!["disk".into()],
2327 ..Default::default()
2328 },
2329 );
2330
2331 let buffer = project
2332 .update(cx, |project, cx| {
2333 project.open_local_buffer(path!("/dir/a.rs"), cx)
2334 })
2335 .await
2336 .unwrap();
2337
2338 let _handle = project.update(cx, |project, cx| {
2339 project.register_buffer_with_language_servers(&buffer, cx)
2340 });
2341
2342 let mut fake_server = fake_servers.next().await.unwrap();
2343 let open_notification = fake_server
2344 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2345 .await;
2346
2347 // Edit the buffer, moving the content down
2348 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2349 let change_notification_1 = fake_server
2350 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2351 .await;
2352 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2353
2354 // Report some diagnostics for the initial version of the buffer
2355 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2356 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2357 version: Some(open_notification.text_document.version),
2358 diagnostics: vec![
2359 lsp::Diagnostic {
2360 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2361 severity: Some(DiagnosticSeverity::ERROR),
2362 message: "undefined variable 'A'".to_string(),
2363 source: Some("disk".to_string()),
2364 ..Default::default()
2365 },
2366 lsp::Diagnostic {
2367 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2368 severity: Some(DiagnosticSeverity::ERROR),
2369 message: "undefined variable 'BB'".to_string(),
2370 source: Some("disk".to_string()),
2371 ..Default::default()
2372 },
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 source: Some("disk".to_string()),
2377 message: "undefined variable 'CCC'".to_string(),
2378 ..Default::default()
2379 },
2380 ],
2381 });
2382
2383 // The diagnostics have moved down since they were created.
2384 cx.executor().run_until_parked();
2385 buffer.update(cx, |buffer, _| {
2386 assert_eq!(
2387 buffer
2388 .snapshot()
2389 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2390 .collect::<Vec<_>>(),
2391 &[
2392 DiagnosticEntry {
2393 range: Point::new(3, 9)..Point::new(3, 11),
2394 diagnostic: Diagnostic {
2395 source: Some("disk".into()),
2396 severity: DiagnosticSeverity::ERROR,
2397 message: "undefined variable 'BB'".to_string(),
2398 is_disk_based: true,
2399 group_id: 1,
2400 is_primary: true,
2401 source_kind: DiagnosticSourceKind::Pushed,
2402 ..Diagnostic::default()
2403 },
2404 },
2405 DiagnosticEntry {
2406 range: Point::new(4, 9)..Point::new(4, 12),
2407 diagnostic: Diagnostic {
2408 source: Some("disk".into()),
2409 severity: DiagnosticSeverity::ERROR,
2410 message: "undefined variable 'CCC'".to_string(),
2411 is_disk_based: true,
2412 group_id: 2,
2413 is_primary: true,
2414 source_kind: DiagnosticSourceKind::Pushed,
2415 ..Diagnostic::default()
2416 }
2417 }
2418 ]
2419 );
2420 assert_eq!(
2421 chunks_with_diagnostics(buffer, 0..buffer.len()),
2422 [
2423 ("\n\nfn a() { ".to_string(), None),
2424 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2425 (" }\nfn b() { ".to_string(), None),
2426 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2427 (" }\nfn c() { ".to_string(), None),
2428 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2429 (" }\n".to_string(), None),
2430 ]
2431 );
2432 assert_eq!(
2433 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2434 [
2435 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2436 (" }\nfn c() { ".to_string(), None),
2437 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2438 ]
2439 );
2440 });
2441
2442 // Ensure overlapping diagnostics are highlighted correctly.
2443 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2444 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2445 version: Some(open_notification.text_document.version),
2446 diagnostics: vec![
2447 lsp::Diagnostic {
2448 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2449 severity: Some(DiagnosticSeverity::ERROR),
2450 message: "undefined variable 'A'".to_string(),
2451 source: Some("disk".to_string()),
2452 ..Default::default()
2453 },
2454 lsp::Diagnostic {
2455 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2456 severity: Some(DiagnosticSeverity::WARNING),
2457 message: "unreachable statement".to_string(),
2458 source: Some("disk".to_string()),
2459 ..Default::default()
2460 },
2461 ],
2462 });
2463
2464 cx.executor().run_until_parked();
2465 buffer.update(cx, |buffer, _| {
2466 assert_eq!(
2467 buffer
2468 .snapshot()
2469 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2470 .collect::<Vec<_>>(),
2471 &[
2472 DiagnosticEntry {
2473 range: Point::new(2, 9)..Point::new(2, 12),
2474 diagnostic: Diagnostic {
2475 source: Some("disk".into()),
2476 severity: DiagnosticSeverity::WARNING,
2477 message: "unreachable statement".to_string(),
2478 is_disk_based: true,
2479 group_id: 4,
2480 is_primary: true,
2481 source_kind: DiagnosticSourceKind::Pushed,
2482 ..Diagnostic::default()
2483 }
2484 },
2485 DiagnosticEntry {
2486 range: Point::new(2, 9)..Point::new(2, 10),
2487 diagnostic: Diagnostic {
2488 source: Some("disk".into()),
2489 severity: DiagnosticSeverity::ERROR,
2490 message: "undefined variable 'A'".to_string(),
2491 is_disk_based: true,
2492 group_id: 3,
2493 is_primary: true,
2494 source_kind: DiagnosticSourceKind::Pushed,
2495 ..Diagnostic::default()
2496 },
2497 }
2498 ]
2499 );
2500 assert_eq!(
2501 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2502 [
2503 ("fn a() { ".to_string(), None),
2504 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2505 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2506 ("\n".to_string(), None),
2507 ]
2508 );
2509 assert_eq!(
2510 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2511 [
2512 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2513 ("\n".to_string(), None),
2514 ]
2515 );
2516 });
2517
2518 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2519 // changes since the last save.
2520 buffer.update(cx, |buffer, cx| {
2521 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2522 buffer.edit(
2523 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2524 None,
2525 cx,
2526 );
2527 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2528 });
2529 let change_notification_2 = fake_server
2530 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2531 .await;
2532 assert!(
2533 change_notification_2.text_document.version > change_notification_1.text_document.version
2534 );
2535
2536 // Handle out-of-order diagnostics
2537 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2538 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2539 version: Some(change_notification_2.text_document.version),
2540 diagnostics: vec![
2541 lsp::Diagnostic {
2542 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2543 severity: Some(DiagnosticSeverity::ERROR),
2544 message: "undefined variable 'BB'".to_string(),
2545 source: Some("disk".to_string()),
2546 ..Default::default()
2547 },
2548 lsp::Diagnostic {
2549 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2550 severity: Some(DiagnosticSeverity::WARNING),
2551 message: "undefined variable 'A'".to_string(),
2552 source: Some("disk".to_string()),
2553 ..Default::default()
2554 },
2555 ],
2556 });
2557
2558 cx.executor().run_until_parked();
2559 buffer.update(cx, |buffer, _| {
2560 assert_eq!(
2561 buffer
2562 .snapshot()
2563 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2564 .collect::<Vec<_>>(),
2565 &[
2566 DiagnosticEntry {
2567 range: Point::new(2, 21)..Point::new(2, 22),
2568 diagnostic: Diagnostic {
2569 source: Some("disk".into()),
2570 severity: DiagnosticSeverity::WARNING,
2571 message: "undefined variable 'A'".to_string(),
2572 is_disk_based: true,
2573 group_id: 6,
2574 is_primary: true,
2575 source_kind: DiagnosticSourceKind::Pushed,
2576 ..Diagnostic::default()
2577 }
2578 },
2579 DiagnosticEntry {
2580 range: Point::new(3, 9)..Point::new(3, 14),
2581 diagnostic: Diagnostic {
2582 source: Some("disk".into()),
2583 severity: DiagnosticSeverity::ERROR,
2584 message: "undefined variable 'BB'".to_string(),
2585 is_disk_based: true,
2586 group_id: 5,
2587 is_primary: true,
2588 source_kind: DiagnosticSourceKind::Pushed,
2589 ..Diagnostic::default()
2590 },
2591 }
2592 ]
2593 );
2594 });
2595}
2596
2597#[gpui::test]
2598async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2599 init_test(cx);
2600
2601 let text = concat!(
2602 "let one = ;\n", //
2603 "let two = \n",
2604 "let three = 3;\n",
2605 );
2606
2607 let fs = FakeFs::new(cx.executor());
2608 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2609
2610 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2611 let buffer = project
2612 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2613 .await
2614 .unwrap();
2615
2616 project.update(cx, |project, cx| {
2617 project.lsp_store.update(cx, |lsp_store, cx| {
2618 lsp_store
2619 .update_diagnostic_entries(
2620 LanguageServerId(0),
2621 PathBuf::from("/dir/a.rs"),
2622 None,
2623 None,
2624 vec![
2625 DiagnosticEntry {
2626 range: Unclipped(PointUtf16::new(0, 10))
2627 ..Unclipped(PointUtf16::new(0, 10)),
2628 diagnostic: Diagnostic {
2629 severity: DiagnosticSeverity::ERROR,
2630 message: "syntax error 1".to_string(),
2631 source_kind: DiagnosticSourceKind::Pushed,
2632 ..Diagnostic::default()
2633 },
2634 },
2635 DiagnosticEntry {
2636 range: Unclipped(PointUtf16::new(1, 10))
2637 ..Unclipped(PointUtf16::new(1, 10)),
2638 diagnostic: Diagnostic {
2639 severity: DiagnosticSeverity::ERROR,
2640 message: "syntax error 2".to_string(),
2641 source_kind: DiagnosticSourceKind::Pushed,
2642 ..Diagnostic::default()
2643 },
2644 },
2645 ],
2646 cx,
2647 )
2648 .unwrap();
2649 })
2650 });
2651
2652 // An empty range is extended forward to include the following character.
2653 // At the end of a line, an empty range is extended backward to include
2654 // the preceding character.
2655 buffer.update(cx, |buffer, _| {
2656 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2657 assert_eq!(
2658 chunks
2659 .iter()
2660 .map(|(s, d)| (s.as_str(), *d))
2661 .collect::<Vec<_>>(),
2662 &[
2663 ("let one = ", None),
2664 (";", Some(DiagnosticSeverity::ERROR)),
2665 ("\nlet two =", None),
2666 (" ", Some(DiagnosticSeverity::ERROR)),
2667 ("\nlet three = 3;\n", None)
2668 ]
2669 );
2670 });
2671}
2672
2673#[gpui::test]
2674async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2675 init_test(cx);
2676
2677 let fs = FakeFs::new(cx.executor());
2678 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2679 .await;
2680
2681 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2682 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2683
2684 lsp_store.update(cx, |lsp_store, cx| {
2685 lsp_store
2686 .update_diagnostic_entries(
2687 LanguageServerId(0),
2688 Path::new("/dir/a.rs").to_owned(),
2689 None,
2690 None,
2691 vec![DiagnosticEntry {
2692 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2693 diagnostic: Diagnostic {
2694 severity: DiagnosticSeverity::ERROR,
2695 is_primary: true,
2696 message: "syntax error a1".to_string(),
2697 source_kind: DiagnosticSourceKind::Pushed,
2698 ..Diagnostic::default()
2699 },
2700 }],
2701 cx,
2702 )
2703 .unwrap();
2704 lsp_store
2705 .update_diagnostic_entries(
2706 LanguageServerId(1),
2707 Path::new("/dir/a.rs").to_owned(),
2708 None,
2709 None,
2710 vec![DiagnosticEntry {
2711 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2712 diagnostic: Diagnostic {
2713 severity: DiagnosticSeverity::ERROR,
2714 is_primary: true,
2715 message: "syntax error b1".to_string(),
2716 source_kind: DiagnosticSourceKind::Pushed,
2717 ..Diagnostic::default()
2718 },
2719 }],
2720 cx,
2721 )
2722 .unwrap();
2723
2724 assert_eq!(
2725 lsp_store.diagnostic_summary(false, cx),
2726 DiagnosticSummary {
2727 error_count: 2,
2728 warning_count: 0,
2729 }
2730 );
2731 });
2732}
2733
2734#[gpui::test]
2735async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2736 init_test(cx);
2737
2738 let text = "
2739 fn a() {
2740 f1();
2741 }
2742 fn b() {
2743 f2();
2744 }
2745 fn c() {
2746 f3();
2747 }
2748 "
2749 .unindent();
2750
2751 let fs = FakeFs::new(cx.executor());
2752 fs.insert_tree(
2753 path!("/dir"),
2754 json!({
2755 "a.rs": text.clone(),
2756 }),
2757 )
2758 .await;
2759
2760 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2761 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2762
2763 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2764 language_registry.add(rust_lang());
2765 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2766
2767 let (buffer, _handle) = project
2768 .update(cx, |project, cx| {
2769 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2770 })
2771 .await
2772 .unwrap();
2773
2774 let mut fake_server = fake_servers.next().await.unwrap();
2775 let lsp_document_version = fake_server
2776 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2777 .await
2778 .text_document
2779 .version;
2780
2781 // Simulate editing the buffer after the language server computes some edits.
2782 buffer.update(cx, |buffer, cx| {
2783 buffer.edit(
2784 [(
2785 Point::new(0, 0)..Point::new(0, 0),
2786 "// above first function\n",
2787 )],
2788 None,
2789 cx,
2790 );
2791 buffer.edit(
2792 [(
2793 Point::new(2, 0)..Point::new(2, 0),
2794 " // inside first function\n",
2795 )],
2796 None,
2797 cx,
2798 );
2799 buffer.edit(
2800 [(
2801 Point::new(6, 4)..Point::new(6, 4),
2802 "// inside second function ",
2803 )],
2804 None,
2805 cx,
2806 );
2807
2808 assert_eq!(
2809 buffer.text(),
2810 "
2811 // above first function
2812 fn a() {
2813 // inside first function
2814 f1();
2815 }
2816 fn b() {
2817 // inside second function f2();
2818 }
2819 fn c() {
2820 f3();
2821 }
2822 "
2823 .unindent()
2824 );
2825 });
2826
2827 let edits = lsp_store
2828 .update(cx, |lsp_store, cx| {
2829 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2830 &buffer,
2831 vec![
2832 // replace body of first function
2833 lsp::TextEdit {
2834 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2835 new_text: "
2836 fn a() {
2837 f10();
2838 }
2839 "
2840 .unindent(),
2841 },
2842 // edit inside second function
2843 lsp::TextEdit {
2844 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2845 new_text: "00".into(),
2846 },
2847 // edit inside third function via two distinct edits
2848 lsp::TextEdit {
2849 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2850 new_text: "4000".into(),
2851 },
2852 lsp::TextEdit {
2853 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2854 new_text: "".into(),
2855 },
2856 ],
2857 LanguageServerId(0),
2858 Some(lsp_document_version),
2859 cx,
2860 )
2861 })
2862 .await
2863 .unwrap();
2864
2865 buffer.update(cx, |buffer, cx| {
2866 for (range, new_text) in edits {
2867 buffer.edit([(range, new_text)], None, cx);
2868 }
2869 assert_eq!(
2870 buffer.text(),
2871 "
2872 // above first function
2873 fn a() {
2874 // inside first function
2875 f10();
2876 }
2877 fn b() {
2878 // inside second function f200();
2879 }
2880 fn c() {
2881 f4000();
2882 }
2883 "
2884 .unindent()
2885 );
2886 });
2887}
2888
2889#[gpui::test]
2890async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2891 init_test(cx);
2892
2893 let text = "
2894 use a::b;
2895 use a::c;
2896
2897 fn f() {
2898 b();
2899 c();
2900 }
2901 "
2902 .unindent();
2903
2904 let fs = FakeFs::new(cx.executor());
2905 fs.insert_tree(
2906 path!("/dir"),
2907 json!({
2908 "a.rs": text.clone(),
2909 }),
2910 )
2911 .await;
2912
2913 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2914 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2915 let buffer = project
2916 .update(cx, |project, cx| {
2917 project.open_local_buffer(path!("/dir/a.rs"), cx)
2918 })
2919 .await
2920 .unwrap();
2921
2922 // Simulate the language server sending us a small edit in the form of a very large diff.
2923 // Rust-analyzer does this when performing a merge-imports code action.
2924 let edits = lsp_store
2925 .update(cx, |lsp_store, cx| {
2926 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2927 &buffer,
2928 [
2929 // Replace the first use statement without editing the semicolon.
2930 lsp::TextEdit {
2931 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2932 new_text: "a::{b, c}".into(),
2933 },
2934 // Reinsert the remainder of the file between the semicolon and the final
2935 // newline of the file.
2936 lsp::TextEdit {
2937 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2938 new_text: "\n\n".into(),
2939 },
2940 lsp::TextEdit {
2941 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2942 new_text: "
2943 fn f() {
2944 b();
2945 c();
2946 }"
2947 .unindent(),
2948 },
2949 // Delete everything after the first newline of the file.
2950 lsp::TextEdit {
2951 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2952 new_text: "".into(),
2953 },
2954 ],
2955 LanguageServerId(0),
2956 None,
2957 cx,
2958 )
2959 })
2960 .await
2961 .unwrap();
2962
2963 buffer.update(cx, |buffer, cx| {
2964 let edits = edits
2965 .into_iter()
2966 .map(|(range, text)| {
2967 (
2968 range.start.to_point(buffer)..range.end.to_point(buffer),
2969 text,
2970 )
2971 })
2972 .collect::<Vec<_>>();
2973
2974 assert_eq!(
2975 edits,
2976 [
2977 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2978 (Point::new(1, 0)..Point::new(2, 0), "".into())
2979 ]
2980 );
2981
2982 for (range, new_text) in edits {
2983 buffer.edit([(range, new_text)], None, cx);
2984 }
2985 assert_eq!(
2986 buffer.text(),
2987 "
2988 use a::{b, c};
2989
2990 fn f() {
2991 b();
2992 c();
2993 }
2994 "
2995 .unindent()
2996 );
2997 });
2998}
2999
3000#[gpui::test]
3001async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3002 cx: &mut gpui::TestAppContext,
3003) {
3004 init_test(cx);
3005
3006 let text = "Path()";
3007
3008 let fs = FakeFs::new(cx.executor());
3009 fs.insert_tree(
3010 path!("/dir"),
3011 json!({
3012 "a.rs": text
3013 }),
3014 )
3015 .await;
3016
3017 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3018 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3019 let buffer = project
3020 .update(cx, |project, cx| {
3021 project.open_local_buffer(path!("/dir/a.rs"), cx)
3022 })
3023 .await
3024 .unwrap();
3025
3026 // Simulate the language server sending us a pair of edits at the same location,
3027 // with an insertion following a replacement (which violates the LSP spec).
3028 let edits = lsp_store
3029 .update(cx, |lsp_store, cx| {
3030 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3031 &buffer,
3032 [
3033 lsp::TextEdit {
3034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3035 new_text: "Path".into(),
3036 },
3037 lsp::TextEdit {
3038 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3039 new_text: "from path import Path\n\n\n".into(),
3040 },
3041 ],
3042 LanguageServerId(0),
3043 None,
3044 cx,
3045 )
3046 })
3047 .await
3048 .unwrap();
3049
3050 buffer.update(cx, |buffer, cx| {
3051 buffer.edit(edits, None, cx);
3052 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3053 });
3054}
3055
3056#[gpui::test]
3057async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3058 init_test(cx);
3059
3060 let text = "
3061 use a::b;
3062 use a::c;
3063
3064 fn f() {
3065 b();
3066 c();
3067 }
3068 "
3069 .unindent();
3070
3071 let fs = FakeFs::new(cx.executor());
3072 fs.insert_tree(
3073 path!("/dir"),
3074 json!({
3075 "a.rs": text.clone(),
3076 }),
3077 )
3078 .await;
3079
3080 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3081 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3082 let buffer = project
3083 .update(cx, |project, cx| {
3084 project.open_local_buffer(path!("/dir/a.rs"), cx)
3085 })
3086 .await
3087 .unwrap();
3088
3089 // Simulate the language server sending us edits in a non-ordered fashion,
3090 // with ranges sometimes being inverted or pointing to invalid locations.
3091 let edits = lsp_store
3092 .update(cx, |lsp_store, cx| {
3093 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3094 &buffer,
3095 [
3096 lsp::TextEdit {
3097 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3098 new_text: "\n\n".into(),
3099 },
3100 lsp::TextEdit {
3101 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3102 new_text: "a::{b, c}".into(),
3103 },
3104 lsp::TextEdit {
3105 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3106 new_text: "".into(),
3107 },
3108 lsp::TextEdit {
3109 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3110 new_text: "
3111 fn f() {
3112 b();
3113 c();
3114 }"
3115 .unindent(),
3116 },
3117 ],
3118 LanguageServerId(0),
3119 None,
3120 cx,
3121 )
3122 })
3123 .await
3124 .unwrap();
3125
3126 buffer.update(cx, |buffer, cx| {
3127 let edits = edits
3128 .into_iter()
3129 .map(|(range, text)| {
3130 (
3131 range.start.to_point(buffer)..range.end.to_point(buffer),
3132 text,
3133 )
3134 })
3135 .collect::<Vec<_>>();
3136
3137 assert_eq!(
3138 edits,
3139 [
3140 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3141 (Point::new(1, 0)..Point::new(2, 0), "".into())
3142 ]
3143 );
3144
3145 for (range, new_text) in edits {
3146 buffer.edit([(range, new_text)], None, cx);
3147 }
3148 assert_eq!(
3149 buffer.text(),
3150 "
3151 use a::{b, c};
3152
3153 fn f() {
3154 b();
3155 c();
3156 }
3157 "
3158 .unindent()
3159 );
3160 });
3161}
3162
3163fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3164 buffer: &Buffer,
3165 range: Range<T>,
3166) -> Vec<(String, Option<DiagnosticSeverity>)> {
3167 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3168 for chunk in buffer.snapshot().chunks(range, true) {
3169 if chunks
3170 .last()
3171 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3172 {
3173 chunks.last_mut().unwrap().0.push_str(chunk.text);
3174 } else {
3175 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3176 }
3177 }
3178 chunks
3179}
3180
3181#[gpui::test(iterations = 10)]
3182async fn test_definition(cx: &mut gpui::TestAppContext) {
3183 init_test(cx);
3184
3185 let fs = FakeFs::new(cx.executor());
3186 fs.insert_tree(
3187 path!("/dir"),
3188 json!({
3189 "a.rs": "const fn a() { A }",
3190 "b.rs": "const y: i32 = crate::a()",
3191 }),
3192 )
3193 .await;
3194
3195 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3196
3197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3198 language_registry.add(rust_lang());
3199 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3200
3201 let (buffer, _handle) = project
3202 .update(cx, |project, cx| {
3203 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3204 })
3205 .await
3206 .unwrap();
3207
3208 let fake_server = fake_servers.next().await.unwrap();
3209 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3210 let params = params.text_document_position_params;
3211 assert_eq!(
3212 params.text_document.uri.to_file_path().unwrap(),
3213 Path::new(path!("/dir/b.rs")),
3214 );
3215 assert_eq!(params.position, lsp::Position::new(0, 22));
3216
3217 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3218 lsp::Location::new(
3219 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3220 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3221 ),
3222 )))
3223 });
3224 let mut definitions = project
3225 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3226 .await
3227 .unwrap()
3228 .unwrap();
3229
3230 // Assert no new language server started
3231 cx.executor().run_until_parked();
3232 assert!(fake_servers.try_next().is_err());
3233
3234 assert_eq!(definitions.len(), 1);
3235 let definition = definitions.pop().unwrap();
3236 cx.update(|cx| {
3237 let target_buffer = definition.target.buffer.read(cx);
3238 assert_eq!(
3239 target_buffer
3240 .file()
3241 .unwrap()
3242 .as_local()
3243 .unwrap()
3244 .abs_path(cx),
3245 Path::new(path!("/dir/a.rs")),
3246 );
3247 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3248 assert_eq!(
3249 list_worktrees(&project, cx),
3250 [
3251 (path!("/dir/a.rs").as_ref(), false),
3252 (path!("/dir/b.rs").as_ref(), true)
3253 ],
3254 );
3255
3256 drop(definition);
3257 });
3258 cx.update(|cx| {
3259 assert_eq!(
3260 list_worktrees(&project, cx),
3261 [(path!("/dir/b.rs").as_ref(), true)]
3262 );
3263 });
3264
3265 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3266 project
3267 .read(cx)
3268 .worktrees(cx)
3269 .map(|worktree| {
3270 let worktree = worktree.read(cx);
3271 (
3272 worktree.as_local().unwrap().abs_path().as_ref(),
3273 worktree.is_visible(),
3274 )
3275 })
3276 .collect::<Vec<_>>()
3277 }
3278}
3279
3280#[gpui::test]
3281async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3282 init_test(cx);
3283
3284 let fs = FakeFs::new(cx.executor());
3285 fs.insert_tree(
3286 path!("/dir"),
3287 json!({
3288 "a.ts": "",
3289 }),
3290 )
3291 .await;
3292
3293 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3294
3295 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3296 language_registry.add(typescript_lang());
3297 let mut fake_language_servers = language_registry.register_fake_lsp(
3298 "TypeScript",
3299 FakeLspAdapter {
3300 capabilities: lsp::ServerCapabilities {
3301 completion_provider: Some(lsp::CompletionOptions {
3302 trigger_characters: Some(vec![".".to_string()]),
3303 ..Default::default()
3304 }),
3305 ..Default::default()
3306 },
3307 ..Default::default()
3308 },
3309 );
3310
3311 let (buffer, _handle) = project
3312 .update(cx, |p, cx| {
3313 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3314 })
3315 .await
3316 .unwrap();
3317
3318 let fake_server = fake_language_servers.next().await.unwrap();
3319
3320 // When text_edit exists, it takes precedence over insert_text and label
3321 let text = "let a = obj.fqn";
3322 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3323 let completions = project.update(cx, |project, cx| {
3324 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3325 });
3326
3327 fake_server
3328 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3329 Ok(Some(lsp::CompletionResponse::Array(vec![
3330 lsp::CompletionItem {
3331 label: "labelText".into(),
3332 insert_text: Some("insertText".into()),
3333 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3334 range: lsp::Range::new(
3335 lsp::Position::new(0, text.len() as u32 - 3),
3336 lsp::Position::new(0, text.len() as u32),
3337 ),
3338 new_text: "textEditText".into(),
3339 })),
3340 ..Default::default()
3341 },
3342 ])))
3343 })
3344 .next()
3345 .await;
3346
3347 let completions = completions
3348 .await
3349 .unwrap()
3350 .into_iter()
3351 .flat_map(|response| response.completions)
3352 .collect::<Vec<_>>();
3353 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3354
3355 assert_eq!(completions.len(), 1);
3356 assert_eq!(completions[0].new_text, "textEditText");
3357 assert_eq!(
3358 completions[0].replace_range.to_offset(&snapshot),
3359 text.len() - 3..text.len()
3360 );
3361}
3362
3363#[gpui::test]
3364async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3365 init_test(cx);
3366
3367 let fs = FakeFs::new(cx.executor());
3368 fs.insert_tree(
3369 path!("/dir"),
3370 json!({
3371 "a.ts": "",
3372 }),
3373 )
3374 .await;
3375
3376 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3377
3378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3379 language_registry.add(typescript_lang());
3380 let mut fake_language_servers = language_registry.register_fake_lsp(
3381 "TypeScript",
3382 FakeLspAdapter {
3383 capabilities: lsp::ServerCapabilities {
3384 completion_provider: Some(lsp::CompletionOptions {
3385 trigger_characters: Some(vec![".".to_string()]),
3386 ..Default::default()
3387 }),
3388 ..Default::default()
3389 },
3390 ..Default::default()
3391 },
3392 );
3393
3394 let (buffer, _handle) = project
3395 .update(cx, |p, cx| {
3396 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3397 })
3398 .await
3399 .unwrap();
3400
3401 let fake_server = fake_language_servers.next().await.unwrap();
3402 let text = "let a = obj.fqn";
3403
3404 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3405 {
3406 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3407 let completions = project.update(cx, |project, cx| {
3408 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3409 });
3410
3411 fake_server
3412 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3413 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3414 is_incomplete: false,
3415 item_defaults: Some(lsp::CompletionListItemDefaults {
3416 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3417 lsp::Range::new(
3418 lsp::Position::new(0, text.len() as u32 - 3),
3419 lsp::Position::new(0, text.len() as u32),
3420 ),
3421 )),
3422 ..Default::default()
3423 }),
3424 items: vec![lsp::CompletionItem {
3425 label: "labelText".into(),
3426 text_edit_text: Some("textEditText".into()),
3427 text_edit: None,
3428 ..Default::default()
3429 }],
3430 })))
3431 })
3432 .next()
3433 .await;
3434
3435 let completions = completions
3436 .await
3437 .unwrap()
3438 .into_iter()
3439 .flat_map(|response| response.completions)
3440 .collect::<Vec<_>>();
3441 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3442
3443 assert_eq!(completions.len(), 1);
3444 assert_eq!(completions[0].new_text, "textEditText");
3445 assert_eq!(
3446 completions[0].replace_range.to_offset(&snapshot),
3447 text.len() - 3..text.len()
3448 );
3449 }
3450
3451 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3452 {
3453 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3454 let completions = project.update(cx, |project, cx| {
3455 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3456 });
3457
3458 fake_server
3459 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3460 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3461 is_incomplete: false,
3462 item_defaults: Some(lsp::CompletionListItemDefaults {
3463 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3464 lsp::Range::new(
3465 lsp::Position::new(0, text.len() as u32 - 3),
3466 lsp::Position::new(0, text.len() as u32),
3467 ),
3468 )),
3469 ..Default::default()
3470 }),
3471 items: vec![lsp::CompletionItem {
3472 label: "labelText".into(),
3473 text_edit_text: None,
3474 insert_text: Some("irrelevant".into()),
3475 text_edit: None,
3476 ..Default::default()
3477 }],
3478 })))
3479 })
3480 .next()
3481 .await;
3482
3483 let completions = completions
3484 .await
3485 .unwrap()
3486 .into_iter()
3487 .flat_map(|response| response.completions)
3488 .collect::<Vec<_>>();
3489 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3490
3491 assert_eq!(completions.len(), 1);
3492 assert_eq!(completions[0].new_text, "labelText");
3493 assert_eq!(
3494 completions[0].replace_range.to_offset(&snapshot),
3495 text.len() - 3..text.len()
3496 );
3497 }
3498}
3499
3500#[gpui::test]
3501async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3502 init_test(cx);
3503
3504 let fs = FakeFs::new(cx.executor());
3505 fs.insert_tree(
3506 path!("/dir"),
3507 json!({
3508 "a.ts": "",
3509 }),
3510 )
3511 .await;
3512
3513 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3514
3515 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3516 language_registry.add(typescript_lang());
3517 let mut fake_language_servers = language_registry.register_fake_lsp(
3518 "TypeScript",
3519 FakeLspAdapter {
3520 capabilities: lsp::ServerCapabilities {
3521 completion_provider: Some(lsp::CompletionOptions {
3522 trigger_characters: Some(vec![":".to_string()]),
3523 ..Default::default()
3524 }),
3525 ..Default::default()
3526 },
3527 ..Default::default()
3528 },
3529 );
3530
3531 let (buffer, _handle) = project
3532 .update(cx, |p, cx| {
3533 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3534 })
3535 .await
3536 .unwrap();
3537
3538 let fake_server = fake_language_servers.next().await.unwrap();
3539
3540 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3541 let text = "let a = b.fqn";
3542 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3543 let completions = project.update(cx, |project, cx| {
3544 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3545 });
3546
3547 fake_server
3548 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3549 Ok(Some(lsp::CompletionResponse::Array(vec![
3550 lsp::CompletionItem {
3551 label: "fullyQualifiedName?".into(),
3552 insert_text: Some("fullyQualifiedName".into()),
3553 ..Default::default()
3554 },
3555 ])))
3556 })
3557 .next()
3558 .await;
3559 let completions = completions
3560 .await
3561 .unwrap()
3562 .into_iter()
3563 .flat_map(|response| response.completions)
3564 .collect::<Vec<_>>();
3565 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3566 assert_eq!(completions.len(), 1);
3567 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3568 assert_eq!(
3569 completions[0].replace_range.to_offset(&snapshot),
3570 text.len() - 3..text.len()
3571 );
3572
3573 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3574 let text = "let a = \"atoms/cmp\"";
3575 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3576 let completions = project.update(cx, |project, cx| {
3577 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3578 });
3579
3580 fake_server
3581 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3582 Ok(Some(lsp::CompletionResponse::Array(vec![
3583 lsp::CompletionItem {
3584 label: "component".into(),
3585 ..Default::default()
3586 },
3587 ])))
3588 })
3589 .next()
3590 .await;
3591 let completions = completions
3592 .await
3593 .unwrap()
3594 .into_iter()
3595 .flat_map(|response| response.completions)
3596 .collect::<Vec<_>>();
3597 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3598 assert_eq!(completions.len(), 1);
3599 assert_eq!(completions[0].new_text, "component");
3600 assert_eq!(
3601 completions[0].replace_range.to_offset(&snapshot),
3602 text.len() - 4..text.len() - 1
3603 );
3604}
3605
3606#[gpui::test]
3607async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3608 init_test(cx);
3609
3610 let fs = FakeFs::new(cx.executor());
3611 fs.insert_tree(
3612 path!("/dir"),
3613 json!({
3614 "a.ts": "",
3615 }),
3616 )
3617 .await;
3618
3619 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3620
3621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3622 language_registry.add(typescript_lang());
3623 let mut fake_language_servers = language_registry.register_fake_lsp(
3624 "TypeScript",
3625 FakeLspAdapter {
3626 capabilities: lsp::ServerCapabilities {
3627 completion_provider: Some(lsp::CompletionOptions {
3628 trigger_characters: Some(vec![":".to_string()]),
3629 ..Default::default()
3630 }),
3631 ..Default::default()
3632 },
3633 ..Default::default()
3634 },
3635 );
3636
3637 let (buffer, _handle) = project
3638 .update(cx, |p, cx| {
3639 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3640 })
3641 .await
3642 .unwrap();
3643
3644 let fake_server = fake_language_servers.next().await.unwrap();
3645
3646 let text = "let a = b.fqn";
3647 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3648 let completions = project.update(cx, |project, cx| {
3649 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3650 });
3651
3652 fake_server
3653 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3654 Ok(Some(lsp::CompletionResponse::Array(vec![
3655 lsp::CompletionItem {
3656 label: "fullyQualifiedName?".into(),
3657 insert_text: Some("fully\rQualified\r\nName".into()),
3658 ..Default::default()
3659 },
3660 ])))
3661 })
3662 .next()
3663 .await;
3664 let completions = completions
3665 .await
3666 .unwrap()
3667 .into_iter()
3668 .flat_map(|response| response.completions)
3669 .collect::<Vec<_>>();
3670 assert_eq!(completions.len(), 1);
3671 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3672}
3673
3674#[gpui::test(iterations = 10)]
3675async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3676 init_test(cx);
3677
3678 let fs = FakeFs::new(cx.executor());
3679 fs.insert_tree(
3680 path!("/dir"),
3681 json!({
3682 "a.ts": "a",
3683 }),
3684 )
3685 .await;
3686
3687 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3688
3689 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3690 language_registry.add(typescript_lang());
3691 let mut fake_language_servers = language_registry.register_fake_lsp(
3692 "TypeScript",
3693 FakeLspAdapter {
3694 capabilities: lsp::ServerCapabilities {
3695 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3696 lsp::CodeActionOptions {
3697 resolve_provider: Some(true),
3698 ..lsp::CodeActionOptions::default()
3699 },
3700 )),
3701 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3702 commands: vec!["_the/command".to_string()],
3703 ..lsp::ExecuteCommandOptions::default()
3704 }),
3705 ..lsp::ServerCapabilities::default()
3706 },
3707 ..FakeLspAdapter::default()
3708 },
3709 );
3710
3711 let (buffer, _handle) = project
3712 .update(cx, |p, cx| {
3713 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3714 })
3715 .await
3716 .unwrap();
3717
3718 let fake_server = fake_language_servers.next().await.unwrap();
3719
3720 // Language server returns code actions that contain commands, and not edits.
3721 let actions = project.update(cx, |project, cx| {
3722 project.code_actions(&buffer, 0..0, None, cx)
3723 });
3724 fake_server
3725 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3726 Ok(Some(vec![
3727 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3728 title: "The code action".into(),
3729 data: Some(serde_json::json!({
3730 "command": "_the/command",
3731 })),
3732 ..lsp::CodeAction::default()
3733 }),
3734 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3735 title: "two".into(),
3736 ..lsp::CodeAction::default()
3737 }),
3738 ]))
3739 })
3740 .next()
3741 .await;
3742
3743 let action = actions.await.unwrap().unwrap()[0].clone();
3744 let apply = project.update(cx, |project, cx| {
3745 project.apply_code_action(buffer.clone(), action, true, cx)
3746 });
3747
3748 // Resolving the code action does not populate its edits. In absence of
3749 // edits, we must execute the given command.
3750 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3751 |mut action, _| async move {
3752 if action.data.is_some() {
3753 action.command = Some(lsp::Command {
3754 title: "The command".into(),
3755 command: "_the/command".into(),
3756 arguments: Some(vec![json!("the-argument")]),
3757 });
3758 }
3759 Ok(action)
3760 },
3761 );
3762
3763 // While executing the command, the language server sends the editor
3764 // a `workspaceEdit` request.
3765 fake_server
3766 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3767 let fake = fake_server.clone();
3768 move |params, _| {
3769 assert_eq!(params.command, "_the/command");
3770 let fake = fake.clone();
3771 async move {
3772 fake.server
3773 .request::<lsp::request::ApplyWorkspaceEdit>(
3774 lsp::ApplyWorkspaceEditParams {
3775 label: None,
3776 edit: lsp::WorkspaceEdit {
3777 changes: Some(
3778 [(
3779 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3780 vec![lsp::TextEdit {
3781 range: lsp::Range::new(
3782 lsp::Position::new(0, 0),
3783 lsp::Position::new(0, 0),
3784 ),
3785 new_text: "X".into(),
3786 }],
3787 )]
3788 .into_iter()
3789 .collect(),
3790 ),
3791 ..Default::default()
3792 },
3793 },
3794 )
3795 .await
3796 .into_response()
3797 .unwrap();
3798 Ok(Some(json!(null)))
3799 }
3800 }
3801 })
3802 .next()
3803 .await;
3804
3805 // Applying the code action returns a project transaction containing the edits
3806 // sent by the language server in its `workspaceEdit` request.
3807 let transaction = apply.await.unwrap();
3808 assert!(transaction.0.contains_key(&buffer));
3809 buffer.update(cx, |buffer, cx| {
3810 assert_eq!(buffer.text(), "Xa");
3811 buffer.undo(cx);
3812 assert_eq!(buffer.text(), "a");
3813 });
3814}
3815
3816#[gpui::test]
3817async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3818 init_test(cx);
3819 let fs = FakeFs::new(cx.background_executor.clone());
3820 let expected_contents = "content";
3821 fs.as_fake()
3822 .insert_tree(
3823 "/root",
3824 json!({
3825 "test.txt": expected_contents
3826 }),
3827 )
3828 .await;
3829
3830 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3831
3832 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3833 let worktree = project.worktrees(cx).next().unwrap();
3834 let entry_id = worktree
3835 .read(cx)
3836 .entry_for_path(rel_path("test.txt"))
3837 .unwrap()
3838 .id;
3839 (worktree, entry_id)
3840 });
3841 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3842 let _result = project
3843 .update(cx, |project, cx| {
3844 project.rename_entry(
3845 entry_id,
3846 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3847 cx,
3848 )
3849 })
3850 .await
3851 .unwrap();
3852 worktree.read_with(cx, |worktree, _| {
3853 assert!(
3854 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3855 "Old file should have been removed"
3856 );
3857 assert!(
3858 worktree
3859 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3860 .is_some(),
3861 "Whole directory hierarchy and the new file should have been created"
3862 );
3863 });
3864 assert_eq!(
3865 worktree
3866 .update(cx, |worktree, cx| {
3867 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3868 })
3869 .await
3870 .unwrap()
3871 .text,
3872 expected_contents,
3873 "Moved file's contents should be preserved"
3874 );
3875
3876 let entry_id = worktree.read_with(cx, |worktree, _| {
3877 worktree
3878 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3879 .unwrap()
3880 .id
3881 });
3882
3883 let _result = project
3884 .update(cx, |project, cx| {
3885 project.rename_entry(
3886 entry_id,
3887 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3888 cx,
3889 )
3890 })
3891 .await
3892 .unwrap();
3893 worktree.read_with(cx, |worktree, _| {
3894 assert!(
3895 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3896 "First file should not reappear"
3897 );
3898 assert!(
3899 worktree
3900 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3901 .is_none(),
3902 "Old file should have been removed"
3903 );
3904 assert!(
3905 worktree
3906 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3907 .is_some(),
3908 "No error should have occurred after moving into existing directory"
3909 );
3910 });
3911 assert_eq!(
3912 worktree
3913 .update(cx, |worktree, cx| {
3914 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3915 })
3916 .await
3917 .unwrap()
3918 .text,
3919 expected_contents,
3920 "Moved file's contents should be preserved"
3921 );
3922}
3923
3924#[gpui::test(iterations = 10)]
3925async fn test_save_file(cx: &mut gpui::TestAppContext) {
3926 init_test(cx);
3927
3928 let fs = FakeFs::new(cx.executor());
3929 fs.insert_tree(
3930 path!("/dir"),
3931 json!({
3932 "file1": "the old contents",
3933 }),
3934 )
3935 .await;
3936
3937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3938 let buffer = project
3939 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3940 .await
3941 .unwrap();
3942 buffer.update(cx, |buffer, cx| {
3943 assert_eq!(buffer.text(), "the old contents");
3944 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3945 });
3946
3947 project
3948 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3949 .await
3950 .unwrap();
3951
3952 let new_text = fs
3953 .load(Path::new(path!("/dir/file1")))
3954 .await
3955 .unwrap()
3956 .replace("\r\n", "\n");
3957 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3958}
3959
3960#[gpui::test(iterations = 10)]
3961async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3962 // Issue: #24349
3963 init_test(cx);
3964
3965 let fs = FakeFs::new(cx.executor());
3966 fs.insert_tree(path!("/dir"), json!({})).await;
3967
3968 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3969 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3970
3971 language_registry.add(rust_lang());
3972 let mut fake_rust_servers = language_registry.register_fake_lsp(
3973 "Rust",
3974 FakeLspAdapter {
3975 name: "the-rust-language-server",
3976 capabilities: lsp::ServerCapabilities {
3977 completion_provider: Some(lsp::CompletionOptions {
3978 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3979 ..Default::default()
3980 }),
3981 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3982 lsp::TextDocumentSyncOptions {
3983 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3984 ..Default::default()
3985 },
3986 )),
3987 ..Default::default()
3988 },
3989 ..Default::default()
3990 },
3991 );
3992
3993 let buffer = project
3994 .update(cx, |this, cx| this.create_buffer(false, cx))
3995 .unwrap()
3996 .await;
3997 project.update(cx, |this, cx| {
3998 this.register_buffer_with_language_servers(&buffer, cx);
3999 buffer.update(cx, |buffer, cx| {
4000 assert!(!this.has_language_servers_for(buffer, cx));
4001 })
4002 });
4003
4004 project
4005 .update(cx, |this, cx| {
4006 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4007 this.save_buffer_as(
4008 buffer.clone(),
4009 ProjectPath {
4010 worktree_id,
4011 path: rel_path("file.rs").into(),
4012 },
4013 cx,
4014 )
4015 })
4016 .await
4017 .unwrap();
4018 // A server is started up, and it is notified about Rust files.
4019 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4020 assert_eq!(
4021 fake_rust_server
4022 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4023 .await
4024 .text_document,
4025 lsp::TextDocumentItem {
4026 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4027 version: 0,
4028 text: "".to_string(),
4029 language_id: "rust".to_string(),
4030 }
4031 );
4032
4033 project.update(cx, |this, cx| {
4034 buffer.update(cx, |buffer, cx| {
4035 assert!(this.has_language_servers_for(buffer, cx));
4036 })
4037 });
4038}
4039
4040#[gpui::test(iterations = 30)]
4041async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4042 init_test(cx);
4043
4044 let fs = FakeFs::new(cx.executor());
4045 fs.insert_tree(
4046 path!("/dir"),
4047 json!({
4048 "file1": "the original contents",
4049 }),
4050 )
4051 .await;
4052
4053 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4054 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4055 let buffer = project
4056 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4057 .await
4058 .unwrap();
4059
4060 // Simulate buffer diffs being slow, so that they don't complete before
4061 // the next file change occurs.
4062 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4063
4064 // Change the buffer's file on disk, and then wait for the file change
4065 // to be detected by the worktree, so that the buffer starts reloading.
4066 fs.save(
4067 path!("/dir/file1").as_ref(),
4068 &"the first contents".into(),
4069 Default::default(),
4070 )
4071 .await
4072 .unwrap();
4073 worktree.next_event(cx).await;
4074
4075 // Change the buffer's file again. Depending on the random seed, the
4076 // previous file change may still be in progress.
4077 fs.save(
4078 path!("/dir/file1").as_ref(),
4079 &"the second contents".into(),
4080 Default::default(),
4081 )
4082 .await
4083 .unwrap();
4084 worktree.next_event(cx).await;
4085
4086 cx.executor().run_until_parked();
4087 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4088 buffer.read_with(cx, |buffer, _| {
4089 assert_eq!(buffer.text(), on_disk_text);
4090 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4091 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4092 });
4093}
4094
4095#[gpui::test(iterations = 30)]
4096async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4097 init_test(cx);
4098
4099 let fs = FakeFs::new(cx.executor());
4100 fs.insert_tree(
4101 path!("/dir"),
4102 json!({
4103 "file1": "the original contents",
4104 }),
4105 )
4106 .await;
4107
4108 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4109 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4110 let buffer = project
4111 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4112 .await
4113 .unwrap();
4114
4115 // Simulate buffer diffs being slow, so that they don't complete before
4116 // the next file change occurs.
4117 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4118
4119 // Change the buffer's file on disk, and then wait for the file change
4120 // to be detected by the worktree, so that the buffer starts reloading.
4121 fs.save(
4122 path!("/dir/file1").as_ref(),
4123 &"the first contents".into(),
4124 Default::default(),
4125 )
4126 .await
4127 .unwrap();
4128 worktree.next_event(cx).await;
4129
4130 cx.executor()
4131 .spawn(cx.executor().simulate_random_delay())
4132 .await;
4133
4134 // Perform a noop edit, causing the buffer's version to increase.
4135 buffer.update(cx, |buffer, cx| {
4136 buffer.edit([(0..0, " ")], None, cx);
4137 buffer.undo(cx);
4138 });
4139
4140 cx.executor().run_until_parked();
4141 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4142 buffer.read_with(cx, |buffer, _| {
4143 let buffer_text = buffer.text();
4144 if buffer_text == on_disk_text {
4145 assert!(
4146 !buffer.is_dirty() && !buffer.has_conflict(),
4147 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4148 );
4149 }
4150 // If the file change occurred while the buffer was processing the first
4151 // change, the buffer will be in a conflicting state.
4152 else {
4153 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4154 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4155 }
4156 });
4157}
4158
4159#[gpui::test]
4160async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4161 init_test(cx);
4162
4163 let fs = FakeFs::new(cx.executor());
4164 fs.insert_tree(
4165 path!("/dir"),
4166 json!({
4167 "file1": "the old contents",
4168 }),
4169 )
4170 .await;
4171
4172 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4173 let buffer = project
4174 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4175 .await
4176 .unwrap();
4177 buffer.update(cx, |buffer, cx| {
4178 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4179 });
4180
4181 project
4182 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4183 .await
4184 .unwrap();
4185
4186 let new_text = fs
4187 .load(Path::new(path!("/dir/file1")))
4188 .await
4189 .unwrap()
4190 .replace("\r\n", "\n");
4191 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4192}
4193
4194#[gpui::test]
4195async fn test_save_as(cx: &mut gpui::TestAppContext) {
4196 init_test(cx);
4197
4198 let fs = FakeFs::new(cx.executor());
4199 fs.insert_tree("/dir", json!({})).await;
4200
4201 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4202
4203 let languages = project.update(cx, |project, _| project.languages().clone());
4204 languages.add(rust_lang());
4205
4206 let buffer = project.update(cx, |project, cx| {
4207 project.create_local_buffer("", None, false, cx)
4208 });
4209 buffer.update(cx, |buffer, cx| {
4210 buffer.edit([(0..0, "abc")], None, cx);
4211 assert!(buffer.is_dirty());
4212 assert!(!buffer.has_conflict());
4213 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4214 });
4215 project
4216 .update(cx, |project, cx| {
4217 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4218 let path = ProjectPath {
4219 worktree_id,
4220 path: rel_path("file1.rs").into(),
4221 };
4222 project.save_buffer_as(buffer.clone(), path, cx)
4223 })
4224 .await
4225 .unwrap();
4226 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4227
4228 cx.executor().run_until_parked();
4229 buffer.update(cx, |buffer, cx| {
4230 assert_eq!(
4231 buffer.file().unwrap().full_path(cx),
4232 Path::new("dir/file1.rs")
4233 );
4234 assert!(!buffer.is_dirty());
4235 assert!(!buffer.has_conflict());
4236 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4237 });
4238
4239 let opened_buffer = project
4240 .update(cx, |project, cx| {
4241 project.open_local_buffer("/dir/file1.rs", cx)
4242 })
4243 .await
4244 .unwrap();
4245 assert_eq!(opened_buffer, buffer);
4246}
4247
4248#[gpui::test]
4249async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4250 init_test(cx);
4251
4252 let fs = FakeFs::new(cx.executor());
4253 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4254
4255 fs.insert_tree(
4256 path!("/dir"),
4257 json!({
4258 "data_a.txt": "data about a"
4259 }),
4260 )
4261 .await;
4262
4263 let buffer = project
4264 .update(cx, |project, cx| {
4265 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4266 })
4267 .await
4268 .unwrap();
4269
4270 buffer.update(cx, |buffer, cx| {
4271 buffer.edit([(11..12, "b")], None, cx);
4272 });
4273
4274 // Save buffer's contents as a new file and confirm that the buffer's now
4275 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4276 // file associated with the buffer has now been updated to `data_b.txt`
4277 project
4278 .update(cx, |project, cx| {
4279 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4280 let new_path = ProjectPath {
4281 worktree_id,
4282 path: rel_path("data_b.txt").into(),
4283 };
4284
4285 project.save_buffer_as(buffer.clone(), new_path, cx)
4286 })
4287 .await
4288 .unwrap();
4289
4290 buffer.update(cx, |buffer, cx| {
4291 assert_eq!(
4292 buffer.file().unwrap().full_path(cx),
4293 Path::new("dir/data_b.txt")
4294 )
4295 });
4296
4297 // Open the original `data_a.txt` file, confirming that its contents are
4298 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4299 let original_buffer = project
4300 .update(cx, |project, cx| {
4301 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4302 })
4303 .await
4304 .unwrap();
4305
4306 original_buffer.update(cx, |buffer, cx| {
4307 assert_eq!(buffer.text(), "data about a");
4308 assert_eq!(
4309 buffer.file().unwrap().full_path(cx),
4310 Path::new("dir/data_a.txt")
4311 )
4312 });
4313}
4314
4315#[gpui::test(retries = 5)]
4316async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4317 use worktree::WorktreeModelHandle as _;
4318
4319 init_test(cx);
4320 cx.executor().allow_parking();
4321
4322 let dir = TempTree::new(json!({
4323 "a": {
4324 "file1": "",
4325 "file2": "",
4326 "file3": "",
4327 },
4328 "b": {
4329 "c": {
4330 "file4": "",
4331 "file5": "",
4332 }
4333 }
4334 }));
4335
4336 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4337
4338 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4339 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4340 async move { buffer.await.unwrap() }
4341 };
4342 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4343 project.update(cx, |project, cx| {
4344 let tree = project.worktrees(cx).next().unwrap();
4345 tree.read(cx)
4346 .entry_for_path(rel_path(path))
4347 .unwrap_or_else(|| panic!("no entry for path {}", path))
4348 .id
4349 })
4350 };
4351
4352 let buffer2 = buffer_for_path("a/file2", cx).await;
4353 let buffer3 = buffer_for_path("a/file3", cx).await;
4354 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4355 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4356
4357 let file2_id = id_for_path("a/file2", cx);
4358 let file3_id = id_for_path("a/file3", cx);
4359 let file4_id = id_for_path("b/c/file4", cx);
4360
4361 // Create a remote copy of this worktree.
4362 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4363 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4364
4365 let updates = Arc::new(Mutex::new(Vec::new()));
4366 tree.update(cx, |tree, cx| {
4367 let updates = updates.clone();
4368 tree.observe_updates(0, cx, move |update| {
4369 updates.lock().push(update);
4370 async { true }
4371 });
4372 });
4373
4374 let remote = cx.update(|cx| {
4375 Worktree::remote(
4376 0,
4377 ReplicaId::REMOTE_SERVER,
4378 metadata,
4379 project.read(cx).client().into(),
4380 project.read(cx).path_style(cx),
4381 cx,
4382 )
4383 });
4384
4385 cx.executor().run_until_parked();
4386
4387 cx.update(|cx| {
4388 assert!(!buffer2.read(cx).is_dirty());
4389 assert!(!buffer3.read(cx).is_dirty());
4390 assert!(!buffer4.read(cx).is_dirty());
4391 assert!(!buffer5.read(cx).is_dirty());
4392 });
4393
4394 // Rename and delete files and directories.
4395 tree.flush_fs_events(cx).await;
4396 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4397 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4398 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4399 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4400 tree.flush_fs_events(cx).await;
4401
4402 cx.update(|app| {
4403 assert_eq!(
4404 tree.read(app).paths().collect::<Vec<_>>(),
4405 vec![
4406 rel_path("a"),
4407 rel_path("a/file1"),
4408 rel_path("a/file2.new"),
4409 rel_path("b"),
4410 rel_path("d"),
4411 rel_path("d/file3"),
4412 rel_path("d/file4"),
4413 ]
4414 );
4415 });
4416
4417 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4418 assert_eq!(id_for_path("d/file3", cx), file3_id);
4419 assert_eq!(id_for_path("d/file4", cx), file4_id);
4420
4421 cx.update(|cx| {
4422 assert_eq!(
4423 buffer2.read(cx).file().unwrap().path().as_ref(),
4424 rel_path("a/file2.new")
4425 );
4426 assert_eq!(
4427 buffer3.read(cx).file().unwrap().path().as_ref(),
4428 rel_path("d/file3")
4429 );
4430 assert_eq!(
4431 buffer4.read(cx).file().unwrap().path().as_ref(),
4432 rel_path("d/file4")
4433 );
4434 assert_eq!(
4435 buffer5.read(cx).file().unwrap().path().as_ref(),
4436 rel_path("b/c/file5")
4437 );
4438
4439 assert_matches!(
4440 buffer2.read(cx).file().unwrap().disk_state(),
4441 DiskState::Present { .. }
4442 );
4443 assert_matches!(
4444 buffer3.read(cx).file().unwrap().disk_state(),
4445 DiskState::Present { .. }
4446 );
4447 assert_matches!(
4448 buffer4.read(cx).file().unwrap().disk_state(),
4449 DiskState::Present { .. }
4450 );
4451 assert_eq!(
4452 buffer5.read(cx).file().unwrap().disk_state(),
4453 DiskState::Deleted
4454 );
4455 });
4456
4457 // Update the remote worktree. Check that it becomes consistent with the
4458 // local worktree.
4459 cx.executor().run_until_parked();
4460
4461 remote.update(cx, |remote, _| {
4462 for update in updates.lock().drain(..) {
4463 remote.as_remote_mut().unwrap().update_from_remote(update);
4464 }
4465 });
4466 cx.executor().run_until_parked();
4467 remote.update(cx, |remote, _| {
4468 assert_eq!(
4469 remote.paths().collect::<Vec<_>>(),
4470 vec![
4471 rel_path("a"),
4472 rel_path("a/file1"),
4473 rel_path("a/file2.new"),
4474 rel_path("b"),
4475 rel_path("d"),
4476 rel_path("d/file3"),
4477 rel_path("d/file4"),
4478 ]
4479 );
4480 });
4481}
4482
4483#[gpui::test(iterations = 10)]
4484async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4485 init_test(cx);
4486
4487 let fs = FakeFs::new(cx.executor());
4488 fs.insert_tree(
4489 path!("/dir"),
4490 json!({
4491 "a": {
4492 "file1": "",
4493 }
4494 }),
4495 )
4496 .await;
4497
4498 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4499 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4500 let tree_id = tree.update(cx, |tree, _| tree.id());
4501
4502 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4503 project.update(cx, |project, cx| {
4504 let tree = project.worktrees(cx).next().unwrap();
4505 tree.read(cx)
4506 .entry_for_path(rel_path(path))
4507 .unwrap_or_else(|| panic!("no entry for path {}", path))
4508 .id
4509 })
4510 };
4511
4512 let dir_id = id_for_path("a", cx);
4513 let file_id = id_for_path("a/file1", cx);
4514 let buffer = project
4515 .update(cx, |p, cx| {
4516 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4517 })
4518 .await
4519 .unwrap();
4520 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4521
4522 project
4523 .update(cx, |project, cx| {
4524 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4525 })
4526 .unwrap()
4527 .await
4528 .into_included()
4529 .unwrap();
4530 cx.executor().run_until_parked();
4531
4532 assert_eq!(id_for_path("b", cx), dir_id);
4533 assert_eq!(id_for_path("b/file1", cx), file_id);
4534 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4535}
4536
4537#[gpui::test]
4538async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4539 init_test(cx);
4540
4541 let fs = FakeFs::new(cx.executor());
4542 fs.insert_tree(
4543 "/dir",
4544 json!({
4545 "a.txt": "a-contents",
4546 "b.txt": "b-contents",
4547 }),
4548 )
4549 .await;
4550
4551 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4552
4553 // Spawn multiple tasks to open paths, repeating some paths.
4554 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4555 (
4556 p.open_local_buffer("/dir/a.txt", cx),
4557 p.open_local_buffer("/dir/b.txt", cx),
4558 p.open_local_buffer("/dir/a.txt", cx),
4559 )
4560 });
4561
4562 let buffer_a_1 = buffer_a_1.await.unwrap();
4563 let buffer_a_2 = buffer_a_2.await.unwrap();
4564 let buffer_b = buffer_b.await.unwrap();
4565 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4566 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4567
4568 // There is only one buffer per path.
4569 let buffer_a_id = buffer_a_1.entity_id();
4570 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4571
4572 // Open the same path again while it is still open.
4573 drop(buffer_a_1);
4574 let buffer_a_3 = project
4575 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4576 .await
4577 .unwrap();
4578
4579 // There's still only one buffer per path.
4580 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4581}
4582
4583#[gpui::test]
4584async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4585 init_test(cx);
4586
4587 let fs = FakeFs::new(cx.executor());
4588 fs.insert_tree(
4589 path!("/dir"),
4590 json!({
4591 "file1": "abc",
4592 "file2": "def",
4593 "file3": "ghi",
4594 }),
4595 )
4596 .await;
4597
4598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4599
4600 let buffer1 = project
4601 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4602 .await
4603 .unwrap();
4604 let events = Arc::new(Mutex::new(Vec::new()));
4605
4606 // initially, the buffer isn't dirty.
4607 buffer1.update(cx, |buffer, cx| {
4608 cx.subscribe(&buffer1, {
4609 let events = events.clone();
4610 move |_, _, event, _| match event {
4611 BufferEvent::Operation { .. } => {}
4612 _ => events.lock().push(event.clone()),
4613 }
4614 })
4615 .detach();
4616
4617 assert!(!buffer.is_dirty());
4618 assert!(events.lock().is_empty());
4619
4620 buffer.edit([(1..2, "")], None, cx);
4621 });
4622
4623 // after the first edit, the buffer is dirty, and emits a dirtied event.
4624 buffer1.update(cx, |buffer, cx| {
4625 assert!(buffer.text() == "ac");
4626 assert!(buffer.is_dirty());
4627 assert_eq!(
4628 *events.lock(),
4629 &[
4630 language::BufferEvent::Edited,
4631 language::BufferEvent::DirtyChanged
4632 ]
4633 );
4634 events.lock().clear();
4635 buffer.did_save(
4636 buffer.version(),
4637 buffer.file().unwrap().disk_state().mtime(),
4638 cx,
4639 );
4640 });
4641
4642 // after saving, the buffer is not dirty, and emits a saved event.
4643 buffer1.update(cx, |buffer, cx| {
4644 assert!(!buffer.is_dirty());
4645 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4646 events.lock().clear();
4647
4648 buffer.edit([(1..1, "B")], None, cx);
4649 buffer.edit([(2..2, "D")], None, cx);
4650 });
4651
4652 // after editing again, the buffer is dirty, and emits another dirty event.
4653 buffer1.update(cx, |buffer, cx| {
4654 assert!(buffer.text() == "aBDc");
4655 assert!(buffer.is_dirty());
4656 assert_eq!(
4657 *events.lock(),
4658 &[
4659 language::BufferEvent::Edited,
4660 language::BufferEvent::DirtyChanged,
4661 language::BufferEvent::Edited,
4662 ],
4663 );
4664 events.lock().clear();
4665
4666 // After restoring the buffer to its previously-saved state,
4667 // the buffer is not considered dirty anymore.
4668 buffer.edit([(1..3, "")], None, cx);
4669 assert!(buffer.text() == "ac");
4670 assert!(!buffer.is_dirty());
4671 });
4672
4673 assert_eq!(
4674 *events.lock(),
4675 &[
4676 language::BufferEvent::Edited,
4677 language::BufferEvent::DirtyChanged
4678 ]
4679 );
4680
4681 // When a file is deleted, it is not considered dirty.
4682 let events = Arc::new(Mutex::new(Vec::new()));
4683 let buffer2 = project
4684 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4685 .await
4686 .unwrap();
4687 buffer2.update(cx, |_, cx| {
4688 cx.subscribe(&buffer2, {
4689 let events = events.clone();
4690 move |_, _, event, _| match event {
4691 BufferEvent::Operation { .. } => {}
4692 _ => events.lock().push(event.clone()),
4693 }
4694 })
4695 .detach();
4696 });
4697
4698 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4699 .await
4700 .unwrap();
4701 cx.executor().run_until_parked();
4702 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4703 assert_eq!(
4704 mem::take(&mut *events.lock()),
4705 &[language::BufferEvent::FileHandleChanged]
4706 );
4707
4708 // Buffer becomes dirty when edited.
4709 buffer2.update(cx, |buffer, cx| {
4710 buffer.edit([(2..3, "")], None, cx);
4711 assert_eq!(buffer.is_dirty(), true);
4712 });
4713 assert_eq!(
4714 mem::take(&mut *events.lock()),
4715 &[
4716 language::BufferEvent::Edited,
4717 language::BufferEvent::DirtyChanged
4718 ]
4719 );
4720
4721 // Buffer becomes clean again when all of its content is removed, because
4722 // the file was deleted.
4723 buffer2.update(cx, |buffer, cx| {
4724 buffer.edit([(0..2, "")], None, cx);
4725 assert_eq!(buffer.is_empty(), true);
4726 assert_eq!(buffer.is_dirty(), false);
4727 });
4728 assert_eq!(
4729 *events.lock(),
4730 &[
4731 language::BufferEvent::Edited,
4732 language::BufferEvent::DirtyChanged
4733 ]
4734 );
4735
4736 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4737 let events = Arc::new(Mutex::new(Vec::new()));
4738 let buffer3 = project
4739 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4740 .await
4741 .unwrap();
4742 buffer3.update(cx, |_, cx| {
4743 cx.subscribe(&buffer3, {
4744 let events = events.clone();
4745 move |_, _, event, _| match event {
4746 BufferEvent::Operation { .. } => {}
4747 _ => events.lock().push(event.clone()),
4748 }
4749 })
4750 .detach();
4751 });
4752
4753 buffer3.update(cx, |buffer, cx| {
4754 buffer.edit([(0..0, "x")], None, cx);
4755 });
4756 events.lock().clear();
4757 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4758 .await
4759 .unwrap();
4760 cx.executor().run_until_parked();
4761 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4762 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4763}
4764
4765#[gpui::test]
4766async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4767 init_test(cx);
4768
4769 let (initial_contents, initial_offsets) =
4770 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4771 let fs = FakeFs::new(cx.executor());
4772 fs.insert_tree(
4773 path!("/dir"),
4774 json!({
4775 "the-file": initial_contents,
4776 }),
4777 )
4778 .await;
4779 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4780 let buffer = project
4781 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4782 .await
4783 .unwrap();
4784
4785 let anchors = initial_offsets
4786 .iter()
4787 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4788 .collect::<Vec<_>>();
4789
4790 // Change the file on disk, adding two new lines of text, and removing
4791 // one line.
4792 buffer.update(cx, |buffer, _| {
4793 assert!(!buffer.is_dirty());
4794 assert!(!buffer.has_conflict());
4795 });
4796
4797 let (new_contents, new_offsets) =
4798 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4799 fs.save(
4800 path!("/dir/the-file").as_ref(),
4801 &new_contents.as_str().into(),
4802 LineEnding::Unix,
4803 )
4804 .await
4805 .unwrap();
4806
4807 // Because the buffer was not modified, it is reloaded from disk. Its
4808 // contents are edited according to the diff between the old and new
4809 // file contents.
4810 cx.executor().run_until_parked();
4811 buffer.update(cx, |buffer, _| {
4812 assert_eq!(buffer.text(), new_contents);
4813 assert!(!buffer.is_dirty());
4814 assert!(!buffer.has_conflict());
4815
4816 let anchor_offsets = anchors
4817 .iter()
4818 .map(|anchor| anchor.to_offset(&*buffer))
4819 .collect::<Vec<_>>();
4820 assert_eq!(anchor_offsets, new_offsets);
4821 });
4822
4823 // Modify the buffer
4824 buffer.update(cx, |buffer, cx| {
4825 buffer.edit([(0..0, " ")], None, cx);
4826 assert!(buffer.is_dirty());
4827 assert!(!buffer.has_conflict());
4828 });
4829
4830 // Change the file on disk again, adding blank lines to the beginning.
4831 fs.save(
4832 path!("/dir/the-file").as_ref(),
4833 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4834 LineEnding::Unix,
4835 )
4836 .await
4837 .unwrap();
4838
4839 // Because the buffer is modified, it doesn't reload from disk, but is
4840 // marked as having a conflict.
4841 cx.executor().run_until_parked();
4842 buffer.update(cx, |buffer, _| {
4843 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4844 assert!(buffer.has_conflict());
4845 });
4846}
4847
4848#[gpui::test]
4849async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4850 init_test(cx);
4851
4852 let fs = FakeFs::new(cx.executor());
4853 fs.insert_tree(
4854 path!("/dir"),
4855 json!({
4856 "file1": "a\nb\nc\n",
4857 "file2": "one\r\ntwo\r\nthree\r\n",
4858 }),
4859 )
4860 .await;
4861
4862 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4863 let buffer1 = project
4864 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4865 .await
4866 .unwrap();
4867 let buffer2 = project
4868 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4869 .await
4870 .unwrap();
4871
4872 buffer1.update(cx, |buffer, _| {
4873 assert_eq!(buffer.text(), "a\nb\nc\n");
4874 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4875 });
4876 buffer2.update(cx, |buffer, _| {
4877 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4878 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4879 });
4880
4881 // Change a file's line endings on disk from unix to windows. The buffer's
4882 // state updates correctly.
4883 fs.save(
4884 path!("/dir/file1").as_ref(),
4885 &"aaa\nb\nc\n".into(),
4886 LineEnding::Windows,
4887 )
4888 .await
4889 .unwrap();
4890 cx.executor().run_until_parked();
4891 buffer1.update(cx, |buffer, _| {
4892 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4893 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4894 });
4895
4896 // Save a file with windows line endings. The file is written correctly.
4897 buffer2.update(cx, |buffer, cx| {
4898 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4899 });
4900 project
4901 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4902 .await
4903 .unwrap();
4904 assert_eq!(
4905 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4906 "one\r\ntwo\r\nthree\r\nfour\r\n",
4907 );
4908}
4909
4910#[gpui::test]
4911async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4912 init_test(cx);
4913
4914 let fs = FakeFs::new(cx.executor());
4915 fs.insert_tree(
4916 path!("/dir"),
4917 json!({
4918 "a.rs": "
4919 fn foo(mut v: Vec<usize>) {
4920 for x in &v {
4921 v.push(1);
4922 }
4923 }
4924 "
4925 .unindent(),
4926 }),
4927 )
4928 .await;
4929
4930 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4931 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4932 let buffer = project
4933 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4934 .await
4935 .unwrap();
4936
4937 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4938 let message = lsp::PublishDiagnosticsParams {
4939 uri: buffer_uri.clone(),
4940 diagnostics: vec![
4941 lsp::Diagnostic {
4942 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4943 severity: Some(DiagnosticSeverity::WARNING),
4944 message: "error 1".to_string(),
4945 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4946 location: lsp::Location {
4947 uri: buffer_uri.clone(),
4948 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4949 },
4950 message: "error 1 hint 1".to_string(),
4951 }]),
4952 ..Default::default()
4953 },
4954 lsp::Diagnostic {
4955 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4956 severity: Some(DiagnosticSeverity::HINT),
4957 message: "error 1 hint 1".to_string(),
4958 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4959 location: lsp::Location {
4960 uri: buffer_uri.clone(),
4961 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4962 },
4963 message: "original diagnostic".to_string(),
4964 }]),
4965 ..Default::default()
4966 },
4967 lsp::Diagnostic {
4968 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4969 severity: Some(DiagnosticSeverity::ERROR),
4970 message: "error 2".to_string(),
4971 related_information: Some(vec![
4972 lsp::DiagnosticRelatedInformation {
4973 location: lsp::Location {
4974 uri: buffer_uri.clone(),
4975 range: lsp::Range::new(
4976 lsp::Position::new(1, 13),
4977 lsp::Position::new(1, 15),
4978 ),
4979 },
4980 message: "error 2 hint 1".to_string(),
4981 },
4982 lsp::DiagnosticRelatedInformation {
4983 location: lsp::Location {
4984 uri: buffer_uri.clone(),
4985 range: lsp::Range::new(
4986 lsp::Position::new(1, 13),
4987 lsp::Position::new(1, 15),
4988 ),
4989 },
4990 message: "error 2 hint 2".to_string(),
4991 },
4992 ]),
4993 ..Default::default()
4994 },
4995 lsp::Diagnostic {
4996 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4997 severity: Some(DiagnosticSeverity::HINT),
4998 message: "error 2 hint 1".to_string(),
4999 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5000 location: lsp::Location {
5001 uri: buffer_uri.clone(),
5002 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5003 },
5004 message: "original diagnostic".to_string(),
5005 }]),
5006 ..Default::default()
5007 },
5008 lsp::Diagnostic {
5009 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5010 severity: Some(DiagnosticSeverity::HINT),
5011 message: "error 2 hint 2".to_string(),
5012 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5013 location: lsp::Location {
5014 uri: buffer_uri,
5015 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5016 },
5017 message: "original diagnostic".to_string(),
5018 }]),
5019 ..Default::default()
5020 },
5021 ],
5022 version: None,
5023 };
5024
5025 lsp_store
5026 .update(cx, |lsp_store, cx| {
5027 lsp_store.update_diagnostics(
5028 LanguageServerId(0),
5029 message,
5030 None,
5031 DiagnosticSourceKind::Pushed,
5032 &[],
5033 cx,
5034 )
5035 })
5036 .unwrap();
5037 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5038
5039 assert_eq!(
5040 buffer
5041 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5042 .collect::<Vec<_>>(),
5043 &[
5044 DiagnosticEntry {
5045 range: Point::new(1, 8)..Point::new(1, 9),
5046 diagnostic: Diagnostic {
5047 severity: DiagnosticSeverity::WARNING,
5048 message: "error 1".to_string(),
5049 group_id: 1,
5050 is_primary: true,
5051 source_kind: DiagnosticSourceKind::Pushed,
5052 ..Diagnostic::default()
5053 }
5054 },
5055 DiagnosticEntry {
5056 range: Point::new(1, 8)..Point::new(1, 9),
5057 diagnostic: Diagnostic {
5058 severity: DiagnosticSeverity::HINT,
5059 message: "error 1 hint 1".to_string(),
5060 group_id: 1,
5061 is_primary: false,
5062 source_kind: DiagnosticSourceKind::Pushed,
5063 ..Diagnostic::default()
5064 }
5065 },
5066 DiagnosticEntry {
5067 range: Point::new(1, 13)..Point::new(1, 15),
5068 diagnostic: Diagnostic {
5069 severity: DiagnosticSeverity::HINT,
5070 message: "error 2 hint 1".to_string(),
5071 group_id: 0,
5072 is_primary: false,
5073 source_kind: DiagnosticSourceKind::Pushed,
5074 ..Diagnostic::default()
5075 }
5076 },
5077 DiagnosticEntry {
5078 range: Point::new(1, 13)..Point::new(1, 15),
5079 diagnostic: Diagnostic {
5080 severity: DiagnosticSeverity::HINT,
5081 message: "error 2 hint 2".to_string(),
5082 group_id: 0,
5083 is_primary: false,
5084 source_kind: DiagnosticSourceKind::Pushed,
5085 ..Diagnostic::default()
5086 }
5087 },
5088 DiagnosticEntry {
5089 range: Point::new(2, 8)..Point::new(2, 17),
5090 diagnostic: Diagnostic {
5091 severity: DiagnosticSeverity::ERROR,
5092 message: "error 2".to_string(),
5093 group_id: 0,
5094 is_primary: true,
5095 source_kind: DiagnosticSourceKind::Pushed,
5096 ..Diagnostic::default()
5097 }
5098 }
5099 ]
5100 );
5101
5102 assert_eq!(
5103 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5104 &[
5105 DiagnosticEntry {
5106 range: Point::new(1, 13)..Point::new(1, 15),
5107 diagnostic: Diagnostic {
5108 severity: DiagnosticSeverity::HINT,
5109 message: "error 2 hint 1".to_string(),
5110 group_id: 0,
5111 is_primary: false,
5112 source_kind: DiagnosticSourceKind::Pushed,
5113 ..Diagnostic::default()
5114 }
5115 },
5116 DiagnosticEntry {
5117 range: Point::new(1, 13)..Point::new(1, 15),
5118 diagnostic: Diagnostic {
5119 severity: DiagnosticSeverity::HINT,
5120 message: "error 2 hint 2".to_string(),
5121 group_id: 0,
5122 is_primary: false,
5123 source_kind: DiagnosticSourceKind::Pushed,
5124 ..Diagnostic::default()
5125 }
5126 },
5127 DiagnosticEntry {
5128 range: Point::new(2, 8)..Point::new(2, 17),
5129 diagnostic: Diagnostic {
5130 severity: DiagnosticSeverity::ERROR,
5131 message: "error 2".to_string(),
5132 group_id: 0,
5133 is_primary: true,
5134 source_kind: DiagnosticSourceKind::Pushed,
5135 ..Diagnostic::default()
5136 }
5137 }
5138 ]
5139 );
5140
5141 assert_eq!(
5142 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5143 &[
5144 DiagnosticEntry {
5145 range: Point::new(1, 8)..Point::new(1, 9),
5146 diagnostic: Diagnostic {
5147 severity: DiagnosticSeverity::WARNING,
5148 message: "error 1".to_string(),
5149 group_id: 1,
5150 is_primary: true,
5151 source_kind: DiagnosticSourceKind::Pushed,
5152 ..Diagnostic::default()
5153 }
5154 },
5155 DiagnosticEntry {
5156 range: Point::new(1, 8)..Point::new(1, 9),
5157 diagnostic: Diagnostic {
5158 severity: DiagnosticSeverity::HINT,
5159 message: "error 1 hint 1".to_string(),
5160 group_id: 1,
5161 is_primary: false,
5162 source_kind: DiagnosticSourceKind::Pushed,
5163 ..Diagnostic::default()
5164 }
5165 },
5166 ]
5167 );
5168}
5169
5170#[gpui::test]
5171async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5172 init_test(cx);
5173
5174 let fs = FakeFs::new(cx.executor());
5175 fs.insert_tree(
5176 path!("/dir"),
5177 json!({
5178 "one.rs": "const ONE: usize = 1;",
5179 "two": {
5180 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5181 }
5182
5183 }),
5184 )
5185 .await;
5186 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5187
5188 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5189 language_registry.add(rust_lang());
5190 let watched_paths = lsp::FileOperationRegistrationOptions {
5191 filters: vec![
5192 FileOperationFilter {
5193 scheme: Some("file".to_owned()),
5194 pattern: lsp::FileOperationPattern {
5195 glob: "**/*.rs".to_owned(),
5196 matches: Some(lsp::FileOperationPatternKind::File),
5197 options: None,
5198 },
5199 },
5200 FileOperationFilter {
5201 scheme: Some("file".to_owned()),
5202 pattern: lsp::FileOperationPattern {
5203 glob: "**/**".to_owned(),
5204 matches: Some(lsp::FileOperationPatternKind::Folder),
5205 options: None,
5206 },
5207 },
5208 ],
5209 };
5210 let mut fake_servers = language_registry.register_fake_lsp(
5211 "Rust",
5212 FakeLspAdapter {
5213 capabilities: lsp::ServerCapabilities {
5214 workspace: Some(lsp::WorkspaceServerCapabilities {
5215 workspace_folders: None,
5216 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5217 did_rename: Some(watched_paths.clone()),
5218 will_rename: Some(watched_paths),
5219 ..Default::default()
5220 }),
5221 }),
5222 ..Default::default()
5223 },
5224 ..Default::default()
5225 },
5226 );
5227
5228 let _ = project
5229 .update(cx, |project, cx| {
5230 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5231 })
5232 .await
5233 .unwrap();
5234
5235 let fake_server = fake_servers.next().await.unwrap();
5236 let response = project.update(cx, |project, cx| {
5237 let worktree = project.worktrees(cx).next().unwrap();
5238 let entry = worktree
5239 .read(cx)
5240 .entry_for_path(rel_path("one.rs"))
5241 .unwrap();
5242 project.rename_entry(
5243 entry.id,
5244 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5245 cx,
5246 )
5247 });
5248 let expected_edit = lsp::WorkspaceEdit {
5249 changes: None,
5250 document_changes: Some(DocumentChanges::Edits({
5251 vec![TextDocumentEdit {
5252 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5253 range: lsp::Range {
5254 start: lsp::Position {
5255 line: 0,
5256 character: 1,
5257 },
5258 end: lsp::Position {
5259 line: 0,
5260 character: 3,
5261 },
5262 },
5263 new_text: "This is not a drill".to_owned(),
5264 })],
5265 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5266 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5267 version: Some(1337),
5268 },
5269 }]
5270 })),
5271 change_annotations: None,
5272 };
5273 let resolved_workspace_edit = Arc::new(OnceLock::new());
5274 fake_server
5275 .set_request_handler::<WillRenameFiles, _, _>({
5276 let resolved_workspace_edit = resolved_workspace_edit.clone();
5277 let expected_edit = expected_edit.clone();
5278 move |params, _| {
5279 let resolved_workspace_edit = resolved_workspace_edit.clone();
5280 let expected_edit = expected_edit.clone();
5281 async move {
5282 assert_eq!(params.files.len(), 1);
5283 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5284 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5285 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5286 Ok(Some(expected_edit))
5287 }
5288 }
5289 })
5290 .next()
5291 .await
5292 .unwrap();
5293 let _ = response.await.unwrap();
5294 fake_server
5295 .handle_notification::<DidRenameFiles, _>(|params, _| {
5296 assert_eq!(params.files.len(), 1);
5297 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5298 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5299 })
5300 .next()
5301 .await
5302 .unwrap();
5303 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5304}
5305
5306#[gpui::test]
5307async fn test_rename(cx: &mut gpui::TestAppContext) {
5308 // hi
5309 init_test(cx);
5310
5311 let fs = FakeFs::new(cx.executor());
5312 fs.insert_tree(
5313 path!("/dir"),
5314 json!({
5315 "one.rs": "const ONE: usize = 1;",
5316 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5317 }),
5318 )
5319 .await;
5320
5321 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5322
5323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5324 language_registry.add(rust_lang());
5325 let mut fake_servers = language_registry.register_fake_lsp(
5326 "Rust",
5327 FakeLspAdapter {
5328 capabilities: lsp::ServerCapabilities {
5329 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5330 prepare_provider: Some(true),
5331 work_done_progress_options: Default::default(),
5332 })),
5333 ..Default::default()
5334 },
5335 ..Default::default()
5336 },
5337 );
5338
5339 let (buffer, _handle) = project
5340 .update(cx, |project, cx| {
5341 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5342 })
5343 .await
5344 .unwrap();
5345
5346 let fake_server = fake_servers.next().await.unwrap();
5347
5348 let response = project.update(cx, |project, cx| {
5349 project.prepare_rename(buffer.clone(), 7, cx)
5350 });
5351 fake_server
5352 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5353 assert_eq!(
5354 params.text_document.uri.as_str(),
5355 uri!("file:///dir/one.rs")
5356 );
5357 assert_eq!(params.position, lsp::Position::new(0, 7));
5358 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5359 lsp::Position::new(0, 6),
5360 lsp::Position::new(0, 9),
5361 ))))
5362 })
5363 .next()
5364 .await
5365 .unwrap();
5366 let response = response.await.unwrap();
5367 let PrepareRenameResponse::Success(range) = response else {
5368 panic!("{:?}", response);
5369 };
5370 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5371 assert_eq!(range, 6..9);
5372
5373 let response = project.update(cx, |project, cx| {
5374 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5375 });
5376 fake_server
5377 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5378 assert_eq!(
5379 params.text_document_position.text_document.uri.as_str(),
5380 uri!("file:///dir/one.rs")
5381 );
5382 assert_eq!(
5383 params.text_document_position.position,
5384 lsp::Position::new(0, 7)
5385 );
5386 assert_eq!(params.new_name, "THREE");
5387 Ok(Some(lsp::WorkspaceEdit {
5388 changes: Some(
5389 [
5390 (
5391 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5392 vec![lsp::TextEdit::new(
5393 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5394 "THREE".to_string(),
5395 )],
5396 ),
5397 (
5398 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5399 vec![
5400 lsp::TextEdit::new(
5401 lsp::Range::new(
5402 lsp::Position::new(0, 24),
5403 lsp::Position::new(0, 27),
5404 ),
5405 "THREE".to_string(),
5406 ),
5407 lsp::TextEdit::new(
5408 lsp::Range::new(
5409 lsp::Position::new(0, 35),
5410 lsp::Position::new(0, 38),
5411 ),
5412 "THREE".to_string(),
5413 ),
5414 ],
5415 ),
5416 ]
5417 .into_iter()
5418 .collect(),
5419 ),
5420 ..Default::default()
5421 }))
5422 })
5423 .next()
5424 .await
5425 .unwrap();
5426 let mut transaction = response.await.unwrap().0;
5427 assert_eq!(transaction.len(), 2);
5428 assert_eq!(
5429 transaction
5430 .remove_entry(&buffer)
5431 .unwrap()
5432 .0
5433 .update(cx, |buffer, _| buffer.text()),
5434 "const THREE: usize = 1;"
5435 );
5436 assert_eq!(
5437 transaction
5438 .into_keys()
5439 .next()
5440 .unwrap()
5441 .update(cx, |buffer, _| buffer.text()),
5442 "const TWO: usize = one::THREE + one::THREE;"
5443 );
5444}
5445
5446#[gpui::test]
5447async fn test_search(cx: &mut gpui::TestAppContext) {
5448 init_test(cx);
5449
5450 let fs = FakeFs::new(cx.executor());
5451 fs.insert_tree(
5452 path!("/dir"),
5453 json!({
5454 "one.rs": "const ONE: usize = 1;",
5455 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5456 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5457 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5458 }),
5459 )
5460 .await;
5461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5462 assert_eq!(
5463 search(
5464 &project,
5465 SearchQuery::text(
5466 "TWO",
5467 false,
5468 true,
5469 false,
5470 Default::default(),
5471 Default::default(),
5472 false,
5473 None
5474 )
5475 .unwrap(),
5476 cx
5477 )
5478 .await
5479 .unwrap(),
5480 HashMap::from_iter([
5481 (path!("dir/two.rs").to_string(), vec![6..9]),
5482 (path!("dir/three.rs").to_string(), vec![37..40])
5483 ])
5484 );
5485
5486 let buffer_4 = project
5487 .update(cx, |project, cx| {
5488 project.open_local_buffer(path!("/dir/four.rs"), cx)
5489 })
5490 .await
5491 .unwrap();
5492 buffer_4.update(cx, |buffer, cx| {
5493 let text = "two::TWO";
5494 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5495 });
5496
5497 assert_eq!(
5498 search(
5499 &project,
5500 SearchQuery::text(
5501 "TWO",
5502 false,
5503 true,
5504 false,
5505 Default::default(),
5506 Default::default(),
5507 false,
5508 None,
5509 )
5510 .unwrap(),
5511 cx
5512 )
5513 .await
5514 .unwrap(),
5515 HashMap::from_iter([
5516 (path!("dir/two.rs").to_string(), vec![6..9]),
5517 (path!("dir/three.rs").to_string(), vec![37..40]),
5518 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5519 ])
5520 );
5521}
5522
5523#[gpui::test]
5524async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5525 init_test(cx);
5526
5527 let search_query = "file";
5528
5529 let fs = FakeFs::new(cx.executor());
5530 fs.insert_tree(
5531 path!("/dir"),
5532 json!({
5533 "one.rs": r#"// Rust file one"#,
5534 "one.ts": r#"// TypeScript file one"#,
5535 "two.rs": r#"// Rust file two"#,
5536 "two.ts": r#"// TypeScript file two"#,
5537 }),
5538 )
5539 .await;
5540 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5541
5542 assert!(
5543 search(
5544 &project,
5545 SearchQuery::text(
5546 search_query,
5547 false,
5548 true,
5549 false,
5550 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5551 Default::default(),
5552 false,
5553 None
5554 )
5555 .unwrap(),
5556 cx
5557 )
5558 .await
5559 .unwrap()
5560 .is_empty(),
5561 "If no inclusions match, no files should be returned"
5562 );
5563
5564 assert_eq!(
5565 search(
5566 &project,
5567 SearchQuery::text(
5568 search_query,
5569 false,
5570 true,
5571 false,
5572 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5573 Default::default(),
5574 false,
5575 None
5576 )
5577 .unwrap(),
5578 cx
5579 )
5580 .await
5581 .unwrap(),
5582 HashMap::from_iter([
5583 (path!("dir/one.rs").to_string(), vec![8..12]),
5584 (path!("dir/two.rs").to_string(), vec![8..12]),
5585 ]),
5586 "Rust only search should give only Rust files"
5587 );
5588
5589 assert_eq!(
5590 search(
5591 &project,
5592 SearchQuery::text(
5593 search_query,
5594 false,
5595 true,
5596 false,
5597 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5598 .unwrap(),
5599 Default::default(),
5600 false,
5601 None,
5602 )
5603 .unwrap(),
5604 cx
5605 )
5606 .await
5607 .unwrap(),
5608 HashMap::from_iter([
5609 (path!("dir/one.ts").to_string(), vec![14..18]),
5610 (path!("dir/two.ts").to_string(), vec![14..18]),
5611 ]),
5612 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5613 );
5614
5615 assert_eq!(
5616 search(
5617 &project,
5618 SearchQuery::text(
5619 search_query,
5620 false,
5621 true,
5622 false,
5623 PathMatcher::new(
5624 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5625 PathStyle::local()
5626 )
5627 .unwrap(),
5628 Default::default(),
5629 false,
5630 None,
5631 )
5632 .unwrap(),
5633 cx
5634 )
5635 .await
5636 .unwrap(),
5637 HashMap::from_iter([
5638 (path!("dir/two.ts").to_string(), vec![14..18]),
5639 (path!("dir/one.rs").to_string(), vec![8..12]),
5640 (path!("dir/one.ts").to_string(), vec![14..18]),
5641 (path!("dir/two.rs").to_string(), vec![8..12]),
5642 ]),
5643 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5644 );
5645}
5646
5647#[gpui::test]
5648async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5649 init_test(cx);
5650
5651 let search_query = "file";
5652
5653 let fs = FakeFs::new(cx.executor());
5654 fs.insert_tree(
5655 path!("/dir"),
5656 json!({
5657 "one.rs": r#"// Rust file one"#,
5658 "one.ts": r#"// TypeScript file one"#,
5659 "two.rs": r#"// Rust file two"#,
5660 "two.ts": r#"// TypeScript file two"#,
5661 }),
5662 )
5663 .await;
5664 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5665
5666 assert_eq!(
5667 search(
5668 &project,
5669 SearchQuery::text(
5670 search_query,
5671 false,
5672 true,
5673 false,
5674 Default::default(),
5675 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5676 false,
5677 None,
5678 )
5679 .unwrap(),
5680 cx
5681 )
5682 .await
5683 .unwrap(),
5684 HashMap::from_iter([
5685 (path!("dir/one.rs").to_string(), vec![8..12]),
5686 (path!("dir/one.ts").to_string(), vec![14..18]),
5687 (path!("dir/two.rs").to_string(), vec![8..12]),
5688 (path!("dir/two.ts").to_string(), vec![14..18]),
5689 ]),
5690 "If no exclusions match, all files should be returned"
5691 );
5692
5693 assert_eq!(
5694 search(
5695 &project,
5696 SearchQuery::text(
5697 search_query,
5698 false,
5699 true,
5700 false,
5701 Default::default(),
5702 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5703 false,
5704 None,
5705 )
5706 .unwrap(),
5707 cx
5708 )
5709 .await
5710 .unwrap(),
5711 HashMap::from_iter([
5712 (path!("dir/one.ts").to_string(), vec![14..18]),
5713 (path!("dir/two.ts").to_string(), vec![14..18]),
5714 ]),
5715 "Rust exclusion search should give only TypeScript files"
5716 );
5717
5718 assert_eq!(
5719 search(
5720 &project,
5721 SearchQuery::text(
5722 search_query,
5723 false,
5724 true,
5725 false,
5726 Default::default(),
5727 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5728 .unwrap(),
5729 false,
5730 None,
5731 )
5732 .unwrap(),
5733 cx
5734 )
5735 .await
5736 .unwrap(),
5737 HashMap::from_iter([
5738 (path!("dir/one.rs").to_string(), vec![8..12]),
5739 (path!("dir/two.rs").to_string(), vec![8..12]),
5740 ]),
5741 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5742 );
5743
5744 assert!(
5745 search(
5746 &project,
5747 SearchQuery::text(
5748 search_query,
5749 false,
5750 true,
5751 false,
5752 Default::default(),
5753 PathMatcher::new(
5754 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5755 PathStyle::local(),
5756 )
5757 .unwrap(),
5758 false,
5759 None,
5760 )
5761 .unwrap(),
5762 cx
5763 )
5764 .await
5765 .unwrap()
5766 .is_empty(),
5767 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5768 );
5769}
5770
5771#[gpui::test]
5772async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5773 init_test(cx);
5774
5775 let search_query = "file";
5776
5777 let fs = FakeFs::new(cx.executor());
5778 fs.insert_tree(
5779 path!("/dir"),
5780 json!({
5781 "one.rs": r#"// Rust file one"#,
5782 "one.ts": r#"// TypeScript file one"#,
5783 "two.rs": r#"// Rust file two"#,
5784 "two.ts": r#"// TypeScript file two"#,
5785 }),
5786 )
5787 .await;
5788
5789 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5790 let path_style = PathStyle::local();
5791 let _buffer = project.update(cx, |project, cx| {
5792 project.create_local_buffer("file", None, false, cx)
5793 });
5794
5795 assert_eq!(
5796 search(
5797 &project,
5798 SearchQuery::text(
5799 search_query,
5800 false,
5801 true,
5802 false,
5803 Default::default(),
5804 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5805 false,
5806 None,
5807 )
5808 .unwrap(),
5809 cx
5810 )
5811 .await
5812 .unwrap(),
5813 HashMap::from_iter([
5814 (path!("dir/one.rs").to_string(), vec![8..12]),
5815 (path!("dir/one.ts").to_string(), vec![14..18]),
5816 (path!("dir/two.rs").to_string(), vec![8..12]),
5817 (path!("dir/two.ts").to_string(), vec![14..18]),
5818 ]),
5819 "If no exclusions match, all files should be returned"
5820 );
5821
5822 assert_eq!(
5823 search(
5824 &project,
5825 SearchQuery::text(
5826 search_query,
5827 false,
5828 true,
5829 false,
5830 Default::default(),
5831 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5832 false,
5833 None,
5834 )
5835 .unwrap(),
5836 cx
5837 )
5838 .await
5839 .unwrap(),
5840 HashMap::from_iter([
5841 (path!("dir/one.ts").to_string(), vec![14..18]),
5842 (path!("dir/two.ts").to_string(), vec![14..18]),
5843 ]),
5844 "Rust exclusion search should give only TypeScript files"
5845 );
5846
5847 assert_eq!(
5848 search(
5849 &project,
5850 SearchQuery::text(
5851 search_query,
5852 false,
5853 true,
5854 false,
5855 Default::default(),
5856 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5857 false,
5858 None,
5859 )
5860 .unwrap(),
5861 cx
5862 )
5863 .await
5864 .unwrap(),
5865 HashMap::from_iter([
5866 (path!("dir/one.rs").to_string(), vec![8..12]),
5867 (path!("dir/two.rs").to_string(), vec![8..12]),
5868 ]),
5869 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5870 );
5871
5872 assert!(
5873 search(
5874 &project,
5875 SearchQuery::text(
5876 search_query,
5877 false,
5878 true,
5879 false,
5880 Default::default(),
5881 PathMatcher::new(
5882 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5883 PathStyle::local(),
5884 )
5885 .unwrap(),
5886 false,
5887 None,
5888 )
5889 .unwrap(),
5890 cx
5891 )
5892 .await
5893 .unwrap()
5894 .is_empty(),
5895 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5896 );
5897}
5898
5899#[gpui::test]
5900async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5901 init_test(cx);
5902
5903 let search_query = "file";
5904
5905 let fs = FakeFs::new(cx.executor());
5906 fs.insert_tree(
5907 path!("/dir"),
5908 json!({
5909 "one.rs": r#"// Rust file one"#,
5910 "one.ts": r#"// TypeScript file one"#,
5911 "two.rs": r#"// Rust file two"#,
5912 "two.ts": r#"// TypeScript file two"#,
5913 }),
5914 )
5915 .await;
5916 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5917 assert!(
5918 search(
5919 &project,
5920 SearchQuery::text(
5921 search_query,
5922 false,
5923 true,
5924 false,
5925 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5926 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5927 false,
5928 None,
5929 )
5930 .unwrap(),
5931 cx
5932 )
5933 .await
5934 .unwrap()
5935 .is_empty(),
5936 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5937 );
5938
5939 assert!(
5940 search(
5941 &project,
5942 SearchQuery::text(
5943 search_query,
5944 false,
5945 true,
5946 false,
5947 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5948 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5949 false,
5950 None,
5951 )
5952 .unwrap(),
5953 cx
5954 )
5955 .await
5956 .unwrap()
5957 .is_empty(),
5958 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5959 );
5960
5961 assert!(
5962 search(
5963 &project,
5964 SearchQuery::text(
5965 search_query,
5966 false,
5967 true,
5968 false,
5969 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5970 .unwrap(),
5971 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5972 .unwrap(),
5973 false,
5974 None,
5975 )
5976 .unwrap(),
5977 cx
5978 )
5979 .await
5980 .unwrap()
5981 .is_empty(),
5982 "Non-matching inclusions and exclusions should not change that."
5983 );
5984
5985 assert_eq!(
5986 search(
5987 &project,
5988 SearchQuery::text(
5989 search_query,
5990 false,
5991 true,
5992 false,
5993 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5994 .unwrap(),
5995 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5996 .unwrap(),
5997 false,
5998 None,
5999 )
6000 .unwrap(),
6001 cx
6002 )
6003 .await
6004 .unwrap(),
6005 HashMap::from_iter([
6006 (path!("dir/one.ts").to_string(), vec![14..18]),
6007 (path!("dir/two.ts").to_string(), vec![14..18]),
6008 ]),
6009 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6010 );
6011}
6012
6013#[gpui::test]
6014async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6015 init_test(cx);
6016
6017 let fs = FakeFs::new(cx.executor());
6018 fs.insert_tree(
6019 path!("/worktree-a"),
6020 json!({
6021 "haystack.rs": r#"// NEEDLE"#,
6022 "haystack.ts": r#"// NEEDLE"#,
6023 }),
6024 )
6025 .await;
6026 fs.insert_tree(
6027 path!("/worktree-b"),
6028 json!({
6029 "haystack.rs": r#"// NEEDLE"#,
6030 "haystack.ts": r#"// NEEDLE"#,
6031 }),
6032 )
6033 .await;
6034
6035 let path_style = PathStyle::local();
6036 let project = Project::test(
6037 fs.clone(),
6038 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6039 cx,
6040 )
6041 .await;
6042
6043 assert_eq!(
6044 search(
6045 &project,
6046 SearchQuery::text(
6047 "NEEDLE",
6048 false,
6049 true,
6050 false,
6051 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6052 Default::default(),
6053 true,
6054 None,
6055 )
6056 .unwrap(),
6057 cx
6058 )
6059 .await
6060 .unwrap(),
6061 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6062 "should only return results from included worktree"
6063 );
6064 assert_eq!(
6065 search(
6066 &project,
6067 SearchQuery::text(
6068 "NEEDLE",
6069 false,
6070 true,
6071 false,
6072 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6073 Default::default(),
6074 true,
6075 None,
6076 )
6077 .unwrap(),
6078 cx
6079 )
6080 .await
6081 .unwrap(),
6082 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6083 "should only return results from included worktree"
6084 );
6085
6086 assert_eq!(
6087 search(
6088 &project,
6089 SearchQuery::text(
6090 "NEEDLE",
6091 false,
6092 true,
6093 false,
6094 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6095 Default::default(),
6096 false,
6097 None,
6098 )
6099 .unwrap(),
6100 cx
6101 )
6102 .await
6103 .unwrap(),
6104 HashMap::from_iter([
6105 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6106 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6107 ]),
6108 "should return results from both worktrees"
6109 );
6110}
6111
6112#[gpui::test]
6113async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6114 init_test(cx);
6115
6116 let fs = FakeFs::new(cx.background_executor.clone());
6117 fs.insert_tree(
6118 path!("/dir"),
6119 json!({
6120 ".git": {},
6121 ".gitignore": "**/target\n/node_modules\n",
6122 "target": {
6123 "index.txt": "index_key:index_value"
6124 },
6125 "node_modules": {
6126 "eslint": {
6127 "index.ts": "const eslint_key = 'eslint value'",
6128 "package.json": r#"{ "some_key": "some value" }"#,
6129 },
6130 "prettier": {
6131 "index.ts": "const prettier_key = 'prettier value'",
6132 "package.json": r#"{ "other_key": "other value" }"#,
6133 },
6134 },
6135 "package.json": r#"{ "main_key": "main value" }"#,
6136 }),
6137 )
6138 .await;
6139 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6140
6141 let query = "key";
6142 assert_eq!(
6143 search(
6144 &project,
6145 SearchQuery::text(
6146 query,
6147 false,
6148 false,
6149 false,
6150 Default::default(),
6151 Default::default(),
6152 false,
6153 None,
6154 )
6155 .unwrap(),
6156 cx
6157 )
6158 .await
6159 .unwrap(),
6160 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6161 "Only one non-ignored file should have the query"
6162 );
6163
6164 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6165 let path_style = PathStyle::local();
6166 assert_eq!(
6167 search(
6168 &project,
6169 SearchQuery::text(
6170 query,
6171 false,
6172 false,
6173 true,
6174 Default::default(),
6175 Default::default(),
6176 false,
6177 None,
6178 )
6179 .unwrap(),
6180 cx
6181 )
6182 .await
6183 .unwrap(),
6184 HashMap::from_iter([
6185 (path!("dir/package.json").to_string(), vec![8..11]),
6186 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6187 (
6188 path!("dir/node_modules/prettier/package.json").to_string(),
6189 vec![9..12]
6190 ),
6191 (
6192 path!("dir/node_modules/prettier/index.ts").to_string(),
6193 vec![15..18]
6194 ),
6195 (
6196 path!("dir/node_modules/eslint/index.ts").to_string(),
6197 vec![13..16]
6198 ),
6199 (
6200 path!("dir/node_modules/eslint/package.json").to_string(),
6201 vec![8..11]
6202 ),
6203 ]),
6204 "Unrestricted search with ignored directories should find every file with the query"
6205 );
6206
6207 let files_to_include =
6208 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6209 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6210 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6211 assert_eq!(
6212 search(
6213 &project,
6214 SearchQuery::text(
6215 query,
6216 false,
6217 false,
6218 true,
6219 files_to_include,
6220 files_to_exclude,
6221 false,
6222 None,
6223 )
6224 .unwrap(),
6225 cx
6226 )
6227 .await
6228 .unwrap(),
6229 HashMap::from_iter([(
6230 path!("dir/node_modules/prettier/package.json").to_string(),
6231 vec![9..12]
6232 )]),
6233 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6234 );
6235}
6236
6237#[gpui::test]
6238async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6239 init_test(cx);
6240
6241 let fs = FakeFs::new(cx.executor());
6242 fs.insert_tree(
6243 path!("/dir"),
6244 json!({
6245 "one.rs": "// ПРИВЕТ? привет!",
6246 "two.rs": "// ПРИВЕТ.",
6247 "three.rs": "// привет",
6248 }),
6249 )
6250 .await;
6251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6252 let unicode_case_sensitive_query = SearchQuery::text(
6253 "привет",
6254 false,
6255 true,
6256 false,
6257 Default::default(),
6258 Default::default(),
6259 false,
6260 None,
6261 );
6262 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6263 assert_eq!(
6264 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6265 .await
6266 .unwrap(),
6267 HashMap::from_iter([
6268 (path!("dir/one.rs").to_string(), vec![17..29]),
6269 (path!("dir/three.rs").to_string(), vec![3..15]),
6270 ])
6271 );
6272
6273 let unicode_case_insensitive_query = SearchQuery::text(
6274 "привет",
6275 false,
6276 false,
6277 false,
6278 Default::default(),
6279 Default::default(),
6280 false,
6281 None,
6282 );
6283 assert_matches!(
6284 unicode_case_insensitive_query,
6285 Ok(SearchQuery::Regex { .. })
6286 );
6287 assert_eq!(
6288 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6289 .await
6290 .unwrap(),
6291 HashMap::from_iter([
6292 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6293 (path!("dir/two.rs").to_string(), vec![3..15]),
6294 (path!("dir/three.rs").to_string(), vec![3..15]),
6295 ])
6296 );
6297
6298 assert_eq!(
6299 search(
6300 &project,
6301 SearchQuery::text(
6302 "привет.",
6303 false,
6304 false,
6305 false,
6306 Default::default(),
6307 Default::default(),
6308 false,
6309 None,
6310 )
6311 .unwrap(),
6312 cx
6313 )
6314 .await
6315 .unwrap(),
6316 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6317 );
6318}
6319
6320#[gpui::test]
6321async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6322 init_test(cx);
6323
6324 let fs = FakeFs::new(cx.executor());
6325 fs.insert_tree(
6326 "/one/two",
6327 json!({
6328 "three": {
6329 "a.txt": "",
6330 "four": {}
6331 },
6332 "c.rs": ""
6333 }),
6334 )
6335 .await;
6336
6337 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6338 project
6339 .update(cx, |project, cx| {
6340 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6341 project.create_entry((id, rel_path("b..")), true, cx)
6342 })
6343 .await
6344 .unwrap()
6345 .into_included()
6346 .unwrap();
6347
6348 assert_eq!(
6349 fs.paths(true),
6350 vec![
6351 PathBuf::from(path!("/")),
6352 PathBuf::from(path!("/one")),
6353 PathBuf::from(path!("/one/two")),
6354 PathBuf::from(path!("/one/two/c.rs")),
6355 PathBuf::from(path!("/one/two/three")),
6356 PathBuf::from(path!("/one/two/three/a.txt")),
6357 PathBuf::from(path!("/one/two/three/b..")),
6358 PathBuf::from(path!("/one/two/three/four")),
6359 ]
6360 );
6361}
6362
6363#[gpui::test]
6364async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6365 init_test(cx);
6366
6367 let fs = FakeFs::new(cx.executor());
6368 fs.insert_tree(
6369 path!("/dir"),
6370 json!({
6371 "a.tsx": "a",
6372 }),
6373 )
6374 .await;
6375
6376 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6377
6378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6379 language_registry.add(tsx_lang());
6380 let language_server_names = [
6381 "TypeScriptServer",
6382 "TailwindServer",
6383 "ESLintServer",
6384 "NoHoverCapabilitiesServer",
6385 ];
6386 let mut language_servers = [
6387 language_registry.register_fake_lsp(
6388 "tsx",
6389 FakeLspAdapter {
6390 name: language_server_names[0],
6391 capabilities: lsp::ServerCapabilities {
6392 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6393 ..lsp::ServerCapabilities::default()
6394 },
6395 ..FakeLspAdapter::default()
6396 },
6397 ),
6398 language_registry.register_fake_lsp(
6399 "tsx",
6400 FakeLspAdapter {
6401 name: language_server_names[1],
6402 capabilities: lsp::ServerCapabilities {
6403 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6404 ..lsp::ServerCapabilities::default()
6405 },
6406 ..FakeLspAdapter::default()
6407 },
6408 ),
6409 language_registry.register_fake_lsp(
6410 "tsx",
6411 FakeLspAdapter {
6412 name: language_server_names[2],
6413 capabilities: lsp::ServerCapabilities {
6414 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6415 ..lsp::ServerCapabilities::default()
6416 },
6417 ..FakeLspAdapter::default()
6418 },
6419 ),
6420 language_registry.register_fake_lsp(
6421 "tsx",
6422 FakeLspAdapter {
6423 name: language_server_names[3],
6424 capabilities: lsp::ServerCapabilities {
6425 hover_provider: None,
6426 ..lsp::ServerCapabilities::default()
6427 },
6428 ..FakeLspAdapter::default()
6429 },
6430 ),
6431 ];
6432
6433 let (buffer, _handle) = project
6434 .update(cx, |p, cx| {
6435 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6436 })
6437 .await
6438 .unwrap();
6439 cx.executor().run_until_parked();
6440
6441 let mut servers_with_hover_requests = HashMap::default();
6442 for i in 0..language_server_names.len() {
6443 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6444 panic!(
6445 "Failed to get language server #{i} with name {}",
6446 &language_server_names[i]
6447 )
6448 });
6449 let new_server_name = new_server.server.name();
6450 assert!(
6451 !servers_with_hover_requests.contains_key(&new_server_name),
6452 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6453 );
6454 match new_server_name.as_ref() {
6455 "TailwindServer" | "TypeScriptServer" => {
6456 servers_with_hover_requests.insert(
6457 new_server_name.clone(),
6458 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6459 move |_, _| {
6460 let name = new_server_name.clone();
6461 async move {
6462 Ok(Some(lsp::Hover {
6463 contents: lsp::HoverContents::Scalar(
6464 lsp::MarkedString::String(format!("{name} hover")),
6465 ),
6466 range: None,
6467 }))
6468 }
6469 },
6470 ),
6471 );
6472 }
6473 "ESLintServer" => {
6474 servers_with_hover_requests.insert(
6475 new_server_name,
6476 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6477 |_, _| async move { Ok(None) },
6478 ),
6479 );
6480 }
6481 "NoHoverCapabilitiesServer" => {
6482 let _never_handled = new_server
6483 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6484 panic!(
6485 "Should not call for hovers server with no corresponding capabilities"
6486 )
6487 });
6488 }
6489 unexpected => panic!("Unexpected server name: {unexpected}"),
6490 }
6491 }
6492
6493 let hover_task = project.update(cx, |project, cx| {
6494 project.hover(&buffer, Point::new(0, 0), cx)
6495 });
6496 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6497 |mut hover_request| async move {
6498 hover_request
6499 .next()
6500 .await
6501 .expect("All hover requests should have been triggered")
6502 },
6503 ))
6504 .await;
6505 assert_eq!(
6506 vec!["TailwindServer hover", "TypeScriptServer hover"],
6507 hover_task
6508 .await
6509 .into_iter()
6510 .flatten()
6511 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6512 .sorted()
6513 .collect::<Vec<_>>(),
6514 "Should receive hover responses from all related servers with hover capabilities"
6515 );
6516}
6517
6518#[gpui::test]
6519async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6520 init_test(cx);
6521
6522 let fs = FakeFs::new(cx.executor());
6523 fs.insert_tree(
6524 path!("/dir"),
6525 json!({
6526 "a.ts": "a",
6527 }),
6528 )
6529 .await;
6530
6531 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6532
6533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6534 language_registry.add(typescript_lang());
6535 let mut fake_language_servers = language_registry.register_fake_lsp(
6536 "TypeScript",
6537 FakeLspAdapter {
6538 capabilities: lsp::ServerCapabilities {
6539 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6540 ..lsp::ServerCapabilities::default()
6541 },
6542 ..FakeLspAdapter::default()
6543 },
6544 );
6545
6546 let (buffer, _handle) = project
6547 .update(cx, |p, cx| {
6548 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6549 })
6550 .await
6551 .unwrap();
6552 cx.executor().run_until_parked();
6553
6554 let fake_server = fake_language_servers
6555 .next()
6556 .await
6557 .expect("failed to get the language server");
6558
6559 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6560 move |_, _| async move {
6561 Ok(Some(lsp::Hover {
6562 contents: lsp::HoverContents::Array(vec![
6563 lsp::MarkedString::String("".to_string()),
6564 lsp::MarkedString::String(" ".to_string()),
6565 lsp::MarkedString::String("\n\n\n".to_string()),
6566 ]),
6567 range: None,
6568 }))
6569 },
6570 );
6571
6572 let hover_task = project.update(cx, |project, cx| {
6573 project.hover(&buffer, Point::new(0, 0), cx)
6574 });
6575 let () = request_handled
6576 .next()
6577 .await
6578 .expect("All hover requests should have been triggered");
6579 assert_eq!(
6580 Vec::<String>::new(),
6581 hover_task
6582 .await
6583 .into_iter()
6584 .flatten()
6585 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6586 .sorted()
6587 .collect::<Vec<_>>(),
6588 "Empty hover parts should be ignored"
6589 );
6590}
6591
6592#[gpui::test]
6593async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6594 init_test(cx);
6595
6596 let fs = FakeFs::new(cx.executor());
6597 fs.insert_tree(
6598 path!("/dir"),
6599 json!({
6600 "a.ts": "a",
6601 }),
6602 )
6603 .await;
6604
6605 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6606
6607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6608 language_registry.add(typescript_lang());
6609 let mut fake_language_servers = language_registry.register_fake_lsp(
6610 "TypeScript",
6611 FakeLspAdapter {
6612 capabilities: lsp::ServerCapabilities {
6613 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6614 ..lsp::ServerCapabilities::default()
6615 },
6616 ..FakeLspAdapter::default()
6617 },
6618 );
6619
6620 let (buffer, _handle) = project
6621 .update(cx, |p, cx| {
6622 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6623 })
6624 .await
6625 .unwrap();
6626 cx.executor().run_until_parked();
6627
6628 let fake_server = fake_language_servers
6629 .next()
6630 .await
6631 .expect("failed to get the language server");
6632
6633 let mut request_handled = fake_server
6634 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6635 Ok(Some(vec![
6636 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6637 title: "organize imports".to_string(),
6638 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6639 ..lsp::CodeAction::default()
6640 }),
6641 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6642 title: "fix code".to_string(),
6643 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6644 ..lsp::CodeAction::default()
6645 }),
6646 ]))
6647 });
6648
6649 let code_actions_task = project.update(cx, |project, cx| {
6650 project.code_actions(
6651 &buffer,
6652 0..buffer.read(cx).len(),
6653 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6654 cx,
6655 )
6656 });
6657
6658 let () = request_handled
6659 .next()
6660 .await
6661 .expect("The code action request should have been triggered");
6662
6663 let code_actions = code_actions_task.await.unwrap().unwrap();
6664 assert_eq!(code_actions.len(), 1);
6665 assert_eq!(
6666 code_actions[0].lsp_action.action_kind(),
6667 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6668 );
6669}
6670
6671#[gpui::test]
6672async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6673 init_test(cx);
6674
6675 let fs = FakeFs::new(cx.executor());
6676 fs.insert_tree(
6677 path!("/dir"),
6678 json!({
6679 "a.tsx": "a",
6680 }),
6681 )
6682 .await;
6683
6684 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6685
6686 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6687 language_registry.add(tsx_lang());
6688 let language_server_names = [
6689 "TypeScriptServer",
6690 "TailwindServer",
6691 "ESLintServer",
6692 "NoActionsCapabilitiesServer",
6693 ];
6694
6695 let mut language_server_rxs = [
6696 language_registry.register_fake_lsp(
6697 "tsx",
6698 FakeLspAdapter {
6699 name: language_server_names[0],
6700 capabilities: lsp::ServerCapabilities {
6701 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6702 ..lsp::ServerCapabilities::default()
6703 },
6704 ..FakeLspAdapter::default()
6705 },
6706 ),
6707 language_registry.register_fake_lsp(
6708 "tsx",
6709 FakeLspAdapter {
6710 name: language_server_names[1],
6711 capabilities: lsp::ServerCapabilities {
6712 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6713 ..lsp::ServerCapabilities::default()
6714 },
6715 ..FakeLspAdapter::default()
6716 },
6717 ),
6718 language_registry.register_fake_lsp(
6719 "tsx",
6720 FakeLspAdapter {
6721 name: language_server_names[2],
6722 capabilities: lsp::ServerCapabilities {
6723 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6724 ..lsp::ServerCapabilities::default()
6725 },
6726 ..FakeLspAdapter::default()
6727 },
6728 ),
6729 language_registry.register_fake_lsp(
6730 "tsx",
6731 FakeLspAdapter {
6732 name: language_server_names[3],
6733 capabilities: lsp::ServerCapabilities {
6734 code_action_provider: None,
6735 ..lsp::ServerCapabilities::default()
6736 },
6737 ..FakeLspAdapter::default()
6738 },
6739 ),
6740 ];
6741
6742 let (buffer, _handle) = project
6743 .update(cx, |p, cx| {
6744 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6745 })
6746 .await
6747 .unwrap();
6748 cx.executor().run_until_parked();
6749
6750 let mut servers_with_actions_requests = HashMap::default();
6751 for i in 0..language_server_names.len() {
6752 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6753 panic!(
6754 "Failed to get language server #{i} with name {}",
6755 &language_server_names[i]
6756 )
6757 });
6758 let new_server_name = new_server.server.name();
6759
6760 assert!(
6761 !servers_with_actions_requests.contains_key(&new_server_name),
6762 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6763 );
6764 match new_server_name.0.as_ref() {
6765 "TailwindServer" | "TypeScriptServer" => {
6766 servers_with_actions_requests.insert(
6767 new_server_name.clone(),
6768 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6769 move |_, _| {
6770 let name = new_server_name.clone();
6771 async move {
6772 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6773 lsp::CodeAction {
6774 title: format!("{name} code action"),
6775 ..lsp::CodeAction::default()
6776 },
6777 )]))
6778 }
6779 },
6780 ),
6781 );
6782 }
6783 "ESLintServer" => {
6784 servers_with_actions_requests.insert(
6785 new_server_name,
6786 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6787 |_, _| async move { Ok(None) },
6788 ),
6789 );
6790 }
6791 "NoActionsCapabilitiesServer" => {
6792 let _never_handled = new_server
6793 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6794 panic!(
6795 "Should not call for code actions server with no corresponding capabilities"
6796 )
6797 });
6798 }
6799 unexpected => panic!("Unexpected server name: {unexpected}"),
6800 }
6801 }
6802
6803 let code_actions_task = project.update(cx, |project, cx| {
6804 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6805 });
6806
6807 // cx.run_until_parked();
6808 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6809 |mut code_actions_request| async move {
6810 code_actions_request
6811 .next()
6812 .await
6813 .expect("All code actions requests should have been triggered")
6814 },
6815 ))
6816 .await;
6817 assert_eq!(
6818 vec!["TailwindServer code action", "TypeScriptServer code action"],
6819 code_actions_task
6820 .await
6821 .unwrap()
6822 .unwrap()
6823 .into_iter()
6824 .map(|code_action| code_action.lsp_action.title().to_owned())
6825 .sorted()
6826 .collect::<Vec<_>>(),
6827 "Should receive code actions responses from all related servers with hover capabilities"
6828 );
6829}
6830
6831#[gpui::test]
6832async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6833 init_test(cx);
6834
6835 let fs = FakeFs::new(cx.executor());
6836 fs.insert_tree(
6837 "/dir",
6838 json!({
6839 "a.rs": "let a = 1;",
6840 "b.rs": "let b = 2;",
6841 "c.rs": "let c = 2;",
6842 }),
6843 )
6844 .await;
6845
6846 let project = Project::test(
6847 fs,
6848 [
6849 "/dir/a.rs".as_ref(),
6850 "/dir/b.rs".as_ref(),
6851 "/dir/c.rs".as_ref(),
6852 ],
6853 cx,
6854 )
6855 .await;
6856
6857 // check the initial state and get the worktrees
6858 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6859 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6860 assert_eq!(worktrees.len(), 3);
6861
6862 let worktree_a = worktrees[0].read(cx);
6863 let worktree_b = worktrees[1].read(cx);
6864 let worktree_c = worktrees[2].read(cx);
6865
6866 // check they start in the right order
6867 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6868 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6869 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6870
6871 (
6872 worktrees[0].clone(),
6873 worktrees[1].clone(),
6874 worktrees[2].clone(),
6875 )
6876 });
6877
6878 // move first worktree to after the second
6879 // [a, b, c] -> [b, a, c]
6880 project
6881 .update(cx, |project, cx| {
6882 let first = worktree_a.read(cx);
6883 let second = worktree_b.read(cx);
6884 project.move_worktree(first.id(), second.id(), cx)
6885 })
6886 .expect("moving first after second");
6887
6888 // check the state after moving
6889 project.update(cx, |project, cx| {
6890 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6891 assert_eq!(worktrees.len(), 3);
6892
6893 let first = worktrees[0].read(cx);
6894 let second = worktrees[1].read(cx);
6895 let third = worktrees[2].read(cx);
6896
6897 // check they are now in the right order
6898 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6899 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6900 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6901 });
6902
6903 // move the second worktree to before the first
6904 // [b, a, c] -> [a, b, c]
6905 project
6906 .update(cx, |project, cx| {
6907 let second = worktree_a.read(cx);
6908 let first = worktree_b.read(cx);
6909 project.move_worktree(first.id(), second.id(), cx)
6910 })
6911 .expect("moving second before first");
6912
6913 // check the state after moving
6914 project.update(cx, |project, cx| {
6915 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6916 assert_eq!(worktrees.len(), 3);
6917
6918 let first = worktrees[0].read(cx);
6919 let second = worktrees[1].read(cx);
6920 let third = worktrees[2].read(cx);
6921
6922 // check they are now in the right order
6923 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6924 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6925 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6926 });
6927
6928 // move the second worktree to after the third
6929 // [a, b, c] -> [a, c, b]
6930 project
6931 .update(cx, |project, cx| {
6932 let second = worktree_b.read(cx);
6933 let third = worktree_c.read(cx);
6934 project.move_worktree(second.id(), third.id(), cx)
6935 })
6936 .expect("moving second after third");
6937
6938 // check the state after moving
6939 project.update(cx, |project, cx| {
6940 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6941 assert_eq!(worktrees.len(), 3);
6942
6943 let first = worktrees[0].read(cx);
6944 let second = worktrees[1].read(cx);
6945 let third = worktrees[2].read(cx);
6946
6947 // check they are now in the right order
6948 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6949 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6950 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6951 });
6952
6953 // move the third worktree to before the second
6954 // [a, c, b] -> [a, b, c]
6955 project
6956 .update(cx, |project, cx| {
6957 let third = worktree_c.read(cx);
6958 let second = worktree_b.read(cx);
6959 project.move_worktree(third.id(), second.id(), cx)
6960 })
6961 .expect("moving third before second");
6962
6963 // check the state after moving
6964 project.update(cx, |project, cx| {
6965 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6966 assert_eq!(worktrees.len(), 3);
6967
6968 let first = worktrees[0].read(cx);
6969 let second = worktrees[1].read(cx);
6970 let third = worktrees[2].read(cx);
6971
6972 // check they are now in the right order
6973 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6974 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6975 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6976 });
6977
6978 // move the first worktree to after the third
6979 // [a, b, c] -> [b, c, a]
6980 project
6981 .update(cx, |project, cx| {
6982 let first = worktree_a.read(cx);
6983 let third = worktree_c.read(cx);
6984 project.move_worktree(first.id(), third.id(), cx)
6985 })
6986 .expect("moving first after third");
6987
6988 // check the state after moving
6989 project.update(cx, |project, cx| {
6990 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6991 assert_eq!(worktrees.len(), 3);
6992
6993 let first = worktrees[0].read(cx);
6994 let second = worktrees[1].read(cx);
6995 let third = worktrees[2].read(cx);
6996
6997 // check they are now in the right order
6998 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6999 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7000 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7001 });
7002
7003 // move the third worktree to before the first
7004 // [b, c, a] -> [a, b, c]
7005 project
7006 .update(cx, |project, cx| {
7007 let third = worktree_a.read(cx);
7008 let first = worktree_b.read(cx);
7009 project.move_worktree(third.id(), first.id(), cx)
7010 })
7011 .expect("moving third before first");
7012
7013 // check the state after moving
7014 project.update(cx, |project, cx| {
7015 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7016 assert_eq!(worktrees.len(), 3);
7017
7018 let first = worktrees[0].read(cx);
7019 let second = worktrees[1].read(cx);
7020 let third = worktrees[2].read(cx);
7021
7022 // check they are now in the right order
7023 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7024 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7025 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7026 });
7027}
7028
7029#[gpui::test]
7030async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7031 init_test(cx);
7032
7033 let staged_contents = r#"
7034 fn main() {
7035 println!("hello world");
7036 }
7037 "#
7038 .unindent();
7039 let file_contents = r#"
7040 // print goodbye
7041 fn main() {
7042 println!("goodbye world");
7043 }
7044 "#
7045 .unindent();
7046
7047 let fs = FakeFs::new(cx.background_executor.clone());
7048 fs.insert_tree(
7049 "/dir",
7050 json!({
7051 ".git": {},
7052 "src": {
7053 "main.rs": file_contents,
7054 }
7055 }),
7056 )
7057 .await;
7058
7059 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7060
7061 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7062
7063 let buffer = project
7064 .update(cx, |project, cx| {
7065 project.open_local_buffer("/dir/src/main.rs", cx)
7066 })
7067 .await
7068 .unwrap();
7069 let unstaged_diff = project
7070 .update(cx, |project, cx| {
7071 project.open_unstaged_diff(buffer.clone(), cx)
7072 })
7073 .await
7074 .unwrap();
7075
7076 cx.run_until_parked();
7077 unstaged_diff.update(cx, |unstaged_diff, cx| {
7078 let snapshot = buffer.read(cx).snapshot();
7079 assert_hunks(
7080 unstaged_diff.hunks(&snapshot, cx),
7081 &snapshot,
7082 &unstaged_diff.base_text_string().unwrap(),
7083 &[
7084 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7085 (
7086 2..3,
7087 " println!(\"hello world\");\n",
7088 " println!(\"goodbye world\");\n",
7089 DiffHunkStatus::modified_none(),
7090 ),
7091 ],
7092 );
7093 });
7094
7095 let staged_contents = r#"
7096 // print goodbye
7097 fn main() {
7098 }
7099 "#
7100 .unindent();
7101
7102 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7103
7104 cx.run_until_parked();
7105 unstaged_diff.update(cx, |unstaged_diff, cx| {
7106 let snapshot = buffer.read(cx).snapshot();
7107 assert_hunks(
7108 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7109 &snapshot,
7110 &unstaged_diff.base_text().text(),
7111 &[(
7112 2..3,
7113 "",
7114 " println!(\"goodbye world\");\n",
7115 DiffHunkStatus::added_none(),
7116 )],
7117 );
7118 });
7119}
7120
7121#[gpui::test]
7122async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7123 init_test(cx);
7124
7125 let committed_contents = r#"
7126 fn main() {
7127 println!("hello world");
7128 }
7129 "#
7130 .unindent();
7131 let staged_contents = r#"
7132 fn main() {
7133 println!("goodbye world");
7134 }
7135 "#
7136 .unindent();
7137 let file_contents = r#"
7138 // print goodbye
7139 fn main() {
7140 println!("goodbye world");
7141 }
7142 "#
7143 .unindent();
7144
7145 let fs = FakeFs::new(cx.background_executor.clone());
7146 fs.insert_tree(
7147 "/dir",
7148 json!({
7149 ".git": {},
7150 "src": {
7151 "modification.rs": file_contents,
7152 }
7153 }),
7154 )
7155 .await;
7156
7157 fs.set_head_for_repo(
7158 Path::new("/dir/.git"),
7159 &[
7160 ("src/modification.rs", committed_contents),
7161 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7162 ],
7163 "deadbeef",
7164 );
7165 fs.set_index_for_repo(
7166 Path::new("/dir/.git"),
7167 &[
7168 ("src/modification.rs", staged_contents),
7169 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7170 ],
7171 );
7172
7173 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7174 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7175 let language = rust_lang();
7176 language_registry.add(language.clone());
7177
7178 let buffer_1 = project
7179 .update(cx, |project, cx| {
7180 project.open_local_buffer("/dir/src/modification.rs", cx)
7181 })
7182 .await
7183 .unwrap();
7184 let diff_1 = project
7185 .update(cx, |project, cx| {
7186 project.open_uncommitted_diff(buffer_1.clone(), cx)
7187 })
7188 .await
7189 .unwrap();
7190 diff_1.read_with(cx, |diff, _| {
7191 assert_eq!(diff.base_text().language().cloned(), Some(language))
7192 });
7193 cx.run_until_parked();
7194 diff_1.update(cx, |diff, cx| {
7195 let snapshot = buffer_1.read(cx).snapshot();
7196 assert_hunks(
7197 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7198 &snapshot,
7199 &diff.base_text_string().unwrap(),
7200 &[
7201 (
7202 0..1,
7203 "",
7204 "// print goodbye\n",
7205 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7206 ),
7207 (
7208 2..3,
7209 " println!(\"hello world\");\n",
7210 " println!(\"goodbye world\");\n",
7211 DiffHunkStatus::modified_none(),
7212 ),
7213 ],
7214 );
7215 });
7216
7217 // Reset HEAD to a version that differs from both the buffer and the index.
7218 let committed_contents = r#"
7219 // print goodbye
7220 fn main() {
7221 }
7222 "#
7223 .unindent();
7224 fs.set_head_for_repo(
7225 Path::new("/dir/.git"),
7226 &[
7227 ("src/modification.rs", committed_contents.clone()),
7228 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7229 ],
7230 "deadbeef",
7231 );
7232
7233 // Buffer now has an unstaged hunk.
7234 cx.run_until_parked();
7235 diff_1.update(cx, |diff, cx| {
7236 let snapshot = buffer_1.read(cx).snapshot();
7237 assert_hunks(
7238 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7239 &snapshot,
7240 &diff.base_text().text(),
7241 &[(
7242 2..3,
7243 "",
7244 " println!(\"goodbye world\");\n",
7245 DiffHunkStatus::added_none(),
7246 )],
7247 );
7248 });
7249
7250 // Open a buffer for a file that's been deleted.
7251 let buffer_2 = project
7252 .update(cx, |project, cx| {
7253 project.open_local_buffer("/dir/src/deletion.rs", cx)
7254 })
7255 .await
7256 .unwrap();
7257 let diff_2 = project
7258 .update(cx, |project, cx| {
7259 project.open_uncommitted_diff(buffer_2.clone(), cx)
7260 })
7261 .await
7262 .unwrap();
7263 cx.run_until_parked();
7264 diff_2.update(cx, |diff, cx| {
7265 let snapshot = buffer_2.read(cx).snapshot();
7266 assert_hunks(
7267 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7268 &snapshot,
7269 &diff.base_text_string().unwrap(),
7270 &[(
7271 0..0,
7272 "// the-deleted-contents\n",
7273 "",
7274 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7275 )],
7276 );
7277 });
7278
7279 // Stage the deletion of this file
7280 fs.set_index_for_repo(
7281 Path::new("/dir/.git"),
7282 &[("src/modification.rs", committed_contents.clone())],
7283 );
7284 cx.run_until_parked();
7285 diff_2.update(cx, |diff, cx| {
7286 let snapshot = buffer_2.read(cx).snapshot();
7287 assert_hunks(
7288 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7289 &snapshot,
7290 &diff.base_text_string().unwrap(),
7291 &[(
7292 0..0,
7293 "// the-deleted-contents\n",
7294 "",
7295 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7296 )],
7297 );
7298 });
7299}
7300
7301#[gpui::test]
7302async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7303 use DiffHunkSecondaryStatus::*;
7304 init_test(cx);
7305
7306 let committed_contents = r#"
7307 zero
7308 one
7309 two
7310 three
7311 four
7312 five
7313 "#
7314 .unindent();
7315 let file_contents = r#"
7316 one
7317 TWO
7318 three
7319 FOUR
7320 five
7321 "#
7322 .unindent();
7323
7324 let fs = FakeFs::new(cx.background_executor.clone());
7325 fs.insert_tree(
7326 "/dir",
7327 json!({
7328 ".git": {},
7329 "file.txt": file_contents.clone()
7330 }),
7331 )
7332 .await;
7333
7334 fs.set_head_and_index_for_repo(
7335 path!("/dir/.git").as_ref(),
7336 &[("file.txt", committed_contents.clone())],
7337 );
7338
7339 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7340
7341 let buffer = project
7342 .update(cx, |project, cx| {
7343 project.open_local_buffer("/dir/file.txt", cx)
7344 })
7345 .await
7346 .unwrap();
7347 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7348 let uncommitted_diff = project
7349 .update(cx, |project, cx| {
7350 project.open_uncommitted_diff(buffer.clone(), cx)
7351 })
7352 .await
7353 .unwrap();
7354 let mut diff_events = cx.events(&uncommitted_diff);
7355
7356 // The hunks are initially unstaged.
7357 uncommitted_diff.read_with(cx, |diff, cx| {
7358 assert_hunks(
7359 diff.hunks(&snapshot, cx),
7360 &snapshot,
7361 &diff.base_text_string().unwrap(),
7362 &[
7363 (
7364 0..0,
7365 "zero\n",
7366 "",
7367 DiffHunkStatus::deleted(HasSecondaryHunk),
7368 ),
7369 (
7370 1..2,
7371 "two\n",
7372 "TWO\n",
7373 DiffHunkStatus::modified(HasSecondaryHunk),
7374 ),
7375 (
7376 3..4,
7377 "four\n",
7378 "FOUR\n",
7379 DiffHunkStatus::modified(HasSecondaryHunk),
7380 ),
7381 ],
7382 );
7383 });
7384
7385 // Stage a hunk. It appears as optimistically staged.
7386 uncommitted_diff.update(cx, |diff, cx| {
7387 let range =
7388 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7389 let hunks = diff
7390 .hunks_intersecting_range(range, &snapshot, cx)
7391 .collect::<Vec<_>>();
7392 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7393
7394 assert_hunks(
7395 diff.hunks(&snapshot, cx),
7396 &snapshot,
7397 &diff.base_text_string().unwrap(),
7398 &[
7399 (
7400 0..0,
7401 "zero\n",
7402 "",
7403 DiffHunkStatus::deleted(HasSecondaryHunk),
7404 ),
7405 (
7406 1..2,
7407 "two\n",
7408 "TWO\n",
7409 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7410 ),
7411 (
7412 3..4,
7413 "four\n",
7414 "FOUR\n",
7415 DiffHunkStatus::modified(HasSecondaryHunk),
7416 ),
7417 ],
7418 );
7419 });
7420
7421 // The diff emits a change event for the range of the staged hunk.
7422 assert!(matches!(
7423 diff_events.next().await.unwrap(),
7424 BufferDiffEvent::HunksStagedOrUnstaged(_)
7425 ));
7426 let event = diff_events.next().await.unwrap();
7427 if let BufferDiffEvent::DiffChanged {
7428 changed_range: Some(changed_range),
7429 } = event
7430 {
7431 let changed_range = changed_range.to_point(&snapshot);
7432 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7433 } else {
7434 panic!("Unexpected event {event:?}");
7435 }
7436
7437 // When the write to the index completes, it appears as staged.
7438 cx.run_until_parked();
7439 uncommitted_diff.update(cx, |diff, cx| {
7440 assert_hunks(
7441 diff.hunks(&snapshot, cx),
7442 &snapshot,
7443 &diff.base_text_string().unwrap(),
7444 &[
7445 (
7446 0..0,
7447 "zero\n",
7448 "",
7449 DiffHunkStatus::deleted(HasSecondaryHunk),
7450 ),
7451 (
7452 1..2,
7453 "two\n",
7454 "TWO\n",
7455 DiffHunkStatus::modified(NoSecondaryHunk),
7456 ),
7457 (
7458 3..4,
7459 "four\n",
7460 "FOUR\n",
7461 DiffHunkStatus::modified(HasSecondaryHunk),
7462 ),
7463 ],
7464 );
7465 });
7466
7467 // The diff emits a change event for the changed index text.
7468 let event = diff_events.next().await.unwrap();
7469 if let BufferDiffEvent::DiffChanged {
7470 changed_range: Some(changed_range),
7471 } = event
7472 {
7473 let changed_range = changed_range.to_point(&snapshot);
7474 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7475 } else {
7476 panic!("Unexpected event {event:?}");
7477 }
7478
7479 // Simulate a problem writing to the git index.
7480 fs.set_error_message_for_index_write(
7481 "/dir/.git".as_ref(),
7482 Some("failed to write git index".into()),
7483 );
7484
7485 // Stage another hunk.
7486 uncommitted_diff.update(cx, |diff, cx| {
7487 let range =
7488 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7489 let hunks = diff
7490 .hunks_intersecting_range(range, &snapshot, cx)
7491 .collect::<Vec<_>>();
7492 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7493
7494 assert_hunks(
7495 diff.hunks(&snapshot, cx),
7496 &snapshot,
7497 &diff.base_text_string().unwrap(),
7498 &[
7499 (
7500 0..0,
7501 "zero\n",
7502 "",
7503 DiffHunkStatus::deleted(HasSecondaryHunk),
7504 ),
7505 (
7506 1..2,
7507 "two\n",
7508 "TWO\n",
7509 DiffHunkStatus::modified(NoSecondaryHunk),
7510 ),
7511 (
7512 3..4,
7513 "four\n",
7514 "FOUR\n",
7515 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7516 ),
7517 ],
7518 );
7519 });
7520 assert!(matches!(
7521 diff_events.next().await.unwrap(),
7522 BufferDiffEvent::HunksStagedOrUnstaged(_)
7523 ));
7524 let event = diff_events.next().await.unwrap();
7525 if let BufferDiffEvent::DiffChanged {
7526 changed_range: Some(changed_range),
7527 } = event
7528 {
7529 let changed_range = changed_range.to_point(&snapshot);
7530 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7531 } else {
7532 panic!("Unexpected event {event:?}");
7533 }
7534
7535 // When the write fails, the hunk returns to being unstaged.
7536 cx.run_until_parked();
7537 uncommitted_diff.update(cx, |diff, cx| {
7538 assert_hunks(
7539 diff.hunks(&snapshot, cx),
7540 &snapshot,
7541 &diff.base_text_string().unwrap(),
7542 &[
7543 (
7544 0..0,
7545 "zero\n",
7546 "",
7547 DiffHunkStatus::deleted(HasSecondaryHunk),
7548 ),
7549 (
7550 1..2,
7551 "two\n",
7552 "TWO\n",
7553 DiffHunkStatus::modified(NoSecondaryHunk),
7554 ),
7555 (
7556 3..4,
7557 "four\n",
7558 "FOUR\n",
7559 DiffHunkStatus::modified(HasSecondaryHunk),
7560 ),
7561 ],
7562 );
7563 });
7564
7565 let event = diff_events.next().await.unwrap();
7566 if let BufferDiffEvent::DiffChanged {
7567 changed_range: Some(changed_range),
7568 } = event
7569 {
7570 let changed_range = changed_range.to_point(&snapshot);
7571 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7572 } else {
7573 panic!("Unexpected event {event:?}");
7574 }
7575
7576 // Allow writing to the git index to succeed again.
7577 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7578
7579 // Stage two hunks with separate operations.
7580 uncommitted_diff.update(cx, |diff, cx| {
7581 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7582 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7583 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7584 });
7585
7586 // Both staged hunks appear as pending.
7587 uncommitted_diff.update(cx, |diff, cx| {
7588 assert_hunks(
7589 diff.hunks(&snapshot, cx),
7590 &snapshot,
7591 &diff.base_text_string().unwrap(),
7592 &[
7593 (
7594 0..0,
7595 "zero\n",
7596 "",
7597 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7598 ),
7599 (
7600 1..2,
7601 "two\n",
7602 "TWO\n",
7603 DiffHunkStatus::modified(NoSecondaryHunk),
7604 ),
7605 (
7606 3..4,
7607 "four\n",
7608 "FOUR\n",
7609 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7610 ),
7611 ],
7612 );
7613 });
7614
7615 // Both staging operations take effect.
7616 cx.run_until_parked();
7617 uncommitted_diff.update(cx, |diff, cx| {
7618 assert_hunks(
7619 diff.hunks(&snapshot, cx),
7620 &snapshot,
7621 &diff.base_text_string().unwrap(),
7622 &[
7623 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7624 (
7625 1..2,
7626 "two\n",
7627 "TWO\n",
7628 DiffHunkStatus::modified(NoSecondaryHunk),
7629 ),
7630 (
7631 3..4,
7632 "four\n",
7633 "FOUR\n",
7634 DiffHunkStatus::modified(NoSecondaryHunk),
7635 ),
7636 ],
7637 );
7638 });
7639}
7640
7641#[gpui::test(seeds(340, 472))]
7642async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7643 use DiffHunkSecondaryStatus::*;
7644 init_test(cx);
7645
7646 let committed_contents = r#"
7647 zero
7648 one
7649 two
7650 three
7651 four
7652 five
7653 "#
7654 .unindent();
7655 let file_contents = r#"
7656 one
7657 TWO
7658 three
7659 FOUR
7660 five
7661 "#
7662 .unindent();
7663
7664 let fs = FakeFs::new(cx.background_executor.clone());
7665 fs.insert_tree(
7666 "/dir",
7667 json!({
7668 ".git": {},
7669 "file.txt": file_contents.clone()
7670 }),
7671 )
7672 .await;
7673
7674 fs.set_head_for_repo(
7675 "/dir/.git".as_ref(),
7676 &[("file.txt", committed_contents.clone())],
7677 "deadbeef",
7678 );
7679 fs.set_index_for_repo(
7680 "/dir/.git".as_ref(),
7681 &[("file.txt", committed_contents.clone())],
7682 );
7683
7684 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7685
7686 let buffer = project
7687 .update(cx, |project, cx| {
7688 project.open_local_buffer("/dir/file.txt", cx)
7689 })
7690 .await
7691 .unwrap();
7692 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7693 let uncommitted_diff = project
7694 .update(cx, |project, cx| {
7695 project.open_uncommitted_diff(buffer.clone(), cx)
7696 })
7697 .await
7698 .unwrap();
7699
7700 // The hunks are initially unstaged.
7701 uncommitted_diff.read_with(cx, |diff, cx| {
7702 assert_hunks(
7703 diff.hunks(&snapshot, cx),
7704 &snapshot,
7705 &diff.base_text_string().unwrap(),
7706 &[
7707 (
7708 0..0,
7709 "zero\n",
7710 "",
7711 DiffHunkStatus::deleted(HasSecondaryHunk),
7712 ),
7713 (
7714 1..2,
7715 "two\n",
7716 "TWO\n",
7717 DiffHunkStatus::modified(HasSecondaryHunk),
7718 ),
7719 (
7720 3..4,
7721 "four\n",
7722 "FOUR\n",
7723 DiffHunkStatus::modified(HasSecondaryHunk),
7724 ),
7725 ],
7726 );
7727 });
7728
7729 // Pause IO events
7730 fs.pause_events();
7731
7732 // Stage the first hunk.
7733 uncommitted_diff.update(cx, |diff, cx| {
7734 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7735 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7736 assert_hunks(
7737 diff.hunks(&snapshot, cx),
7738 &snapshot,
7739 &diff.base_text_string().unwrap(),
7740 &[
7741 (
7742 0..0,
7743 "zero\n",
7744 "",
7745 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7746 ),
7747 (
7748 1..2,
7749 "two\n",
7750 "TWO\n",
7751 DiffHunkStatus::modified(HasSecondaryHunk),
7752 ),
7753 (
7754 3..4,
7755 "four\n",
7756 "FOUR\n",
7757 DiffHunkStatus::modified(HasSecondaryHunk),
7758 ),
7759 ],
7760 );
7761 });
7762
7763 // Stage the second hunk *before* receiving the FS event for the first hunk.
7764 cx.run_until_parked();
7765 uncommitted_diff.update(cx, |diff, cx| {
7766 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7767 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7768 assert_hunks(
7769 diff.hunks(&snapshot, cx),
7770 &snapshot,
7771 &diff.base_text_string().unwrap(),
7772 &[
7773 (
7774 0..0,
7775 "zero\n",
7776 "",
7777 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7778 ),
7779 (
7780 1..2,
7781 "two\n",
7782 "TWO\n",
7783 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7784 ),
7785 (
7786 3..4,
7787 "four\n",
7788 "FOUR\n",
7789 DiffHunkStatus::modified(HasSecondaryHunk),
7790 ),
7791 ],
7792 );
7793 });
7794
7795 // Process the FS event for staging the first hunk (second event is still pending).
7796 fs.flush_events(1);
7797 cx.run_until_parked();
7798
7799 // Stage the third hunk before receiving the second FS event.
7800 uncommitted_diff.update(cx, |diff, cx| {
7801 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7802 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7803 });
7804
7805 // Wait for all remaining IO.
7806 cx.run_until_parked();
7807 fs.flush_events(fs.buffered_event_count());
7808
7809 // Now all hunks are staged.
7810 cx.run_until_parked();
7811 uncommitted_diff.update(cx, |diff, cx| {
7812 assert_hunks(
7813 diff.hunks(&snapshot, cx),
7814 &snapshot,
7815 &diff.base_text_string().unwrap(),
7816 &[
7817 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7818 (
7819 1..2,
7820 "two\n",
7821 "TWO\n",
7822 DiffHunkStatus::modified(NoSecondaryHunk),
7823 ),
7824 (
7825 3..4,
7826 "four\n",
7827 "FOUR\n",
7828 DiffHunkStatus::modified(NoSecondaryHunk),
7829 ),
7830 ],
7831 );
7832 });
7833}
7834
7835#[gpui::test(iterations = 25)]
7836async fn test_staging_random_hunks(
7837 mut rng: StdRng,
7838 executor: BackgroundExecutor,
7839 cx: &mut gpui::TestAppContext,
7840) {
7841 let operations = env::var("OPERATIONS")
7842 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7843 .unwrap_or(20);
7844
7845 // Try to induce races between diff recalculation and index writes.
7846 if rng.random_bool(0.5) {
7847 executor.deprioritize(*CALCULATE_DIFF_TASK);
7848 }
7849
7850 use DiffHunkSecondaryStatus::*;
7851 init_test(cx);
7852
7853 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7854 let index_text = committed_text.clone();
7855 let buffer_text = (0..30)
7856 .map(|i| match i % 5 {
7857 0 => format!("line {i} (modified)\n"),
7858 _ => format!("line {i}\n"),
7859 })
7860 .collect::<String>();
7861
7862 let fs = FakeFs::new(cx.background_executor.clone());
7863 fs.insert_tree(
7864 path!("/dir"),
7865 json!({
7866 ".git": {},
7867 "file.txt": buffer_text.clone()
7868 }),
7869 )
7870 .await;
7871 fs.set_head_for_repo(
7872 path!("/dir/.git").as_ref(),
7873 &[("file.txt", committed_text.clone())],
7874 "deadbeef",
7875 );
7876 fs.set_index_for_repo(
7877 path!("/dir/.git").as_ref(),
7878 &[("file.txt", index_text.clone())],
7879 );
7880 let repo = fs
7881 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7882 .unwrap();
7883
7884 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7885 let buffer = project
7886 .update(cx, |project, cx| {
7887 project.open_local_buffer(path!("/dir/file.txt"), cx)
7888 })
7889 .await
7890 .unwrap();
7891 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7892 let uncommitted_diff = project
7893 .update(cx, |project, cx| {
7894 project.open_uncommitted_diff(buffer.clone(), cx)
7895 })
7896 .await
7897 .unwrap();
7898
7899 let mut hunks =
7900 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7901 assert_eq!(hunks.len(), 6);
7902
7903 for _i in 0..operations {
7904 let hunk_ix = rng.random_range(0..hunks.len());
7905 let hunk = &mut hunks[hunk_ix];
7906 let row = hunk.range.start.row;
7907
7908 if hunk.status().has_secondary_hunk() {
7909 log::info!("staging hunk at {row}");
7910 uncommitted_diff.update(cx, |diff, cx| {
7911 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7912 });
7913 hunk.secondary_status = SecondaryHunkRemovalPending;
7914 } else {
7915 log::info!("unstaging hunk at {row}");
7916 uncommitted_diff.update(cx, |diff, cx| {
7917 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7918 });
7919 hunk.secondary_status = SecondaryHunkAdditionPending;
7920 }
7921
7922 for _ in 0..rng.random_range(0..10) {
7923 log::info!("yielding");
7924 cx.executor().simulate_random_delay().await;
7925 }
7926 }
7927
7928 cx.executor().run_until_parked();
7929
7930 for hunk in &mut hunks {
7931 if hunk.secondary_status == SecondaryHunkRemovalPending {
7932 hunk.secondary_status = NoSecondaryHunk;
7933 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7934 hunk.secondary_status = HasSecondaryHunk;
7935 }
7936 }
7937
7938 log::info!(
7939 "index text:\n{}",
7940 repo.load_index_text(rel_path("file.txt").into())
7941 .await
7942 .unwrap()
7943 );
7944
7945 uncommitted_diff.update(cx, |diff, cx| {
7946 let expected_hunks = hunks
7947 .iter()
7948 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7949 .collect::<Vec<_>>();
7950 let actual_hunks = diff
7951 .hunks(&snapshot, cx)
7952 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7953 .collect::<Vec<_>>();
7954 assert_eq!(actual_hunks, expected_hunks);
7955 });
7956}
7957
7958#[gpui::test]
7959async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7960 init_test(cx);
7961
7962 let committed_contents = r#"
7963 fn main() {
7964 println!("hello from HEAD");
7965 }
7966 "#
7967 .unindent();
7968 let file_contents = r#"
7969 fn main() {
7970 println!("hello from the working copy");
7971 }
7972 "#
7973 .unindent();
7974
7975 let fs = FakeFs::new(cx.background_executor.clone());
7976 fs.insert_tree(
7977 "/dir",
7978 json!({
7979 ".git": {},
7980 "src": {
7981 "main.rs": file_contents,
7982 }
7983 }),
7984 )
7985 .await;
7986
7987 fs.set_head_for_repo(
7988 Path::new("/dir/.git"),
7989 &[("src/main.rs", committed_contents.clone())],
7990 "deadbeef",
7991 );
7992 fs.set_index_for_repo(
7993 Path::new("/dir/.git"),
7994 &[("src/main.rs", committed_contents.clone())],
7995 );
7996
7997 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7998
7999 let buffer = project
8000 .update(cx, |project, cx| {
8001 project.open_local_buffer("/dir/src/main.rs", cx)
8002 })
8003 .await
8004 .unwrap();
8005 let uncommitted_diff = project
8006 .update(cx, |project, cx| {
8007 project.open_uncommitted_diff(buffer.clone(), cx)
8008 })
8009 .await
8010 .unwrap();
8011
8012 cx.run_until_parked();
8013 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8014 let snapshot = buffer.read(cx).snapshot();
8015 assert_hunks(
8016 uncommitted_diff.hunks(&snapshot, cx),
8017 &snapshot,
8018 &uncommitted_diff.base_text_string().unwrap(),
8019 &[(
8020 1..2,
8021 " println!(\"hello from HEAD\");\n",
8022 " println!(\"hello from the working copy\");\n",
8023 DiffHunkStatus {
8024 kind: DiffHunkStatusKind::Modified,
8025 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8026 },
8027 )],
8028 );
8029 });
8030}
8031
8032#[gpui::test]
8033async fn test_repository_and_path_for_project_path(
8034 background_executor: BackgroundExecutor,
8035 cx: &mut gpui::TestAppContext,
8036) {
8037 init_test(cx);
8038 let fs = FakeFs::new(background_executor);
8039 fs.insert_tree(
8040 path!("/root"),
8041 json!({
8042 "c.txt": "",
8043 "dir1": {
8044 ".git": {},
8045 "deps": {
8046 "dep1": {
8047 ".git": {},
8048 "src": {
8049 "a.txt": ""
8050 }
8051 }
8052 },
8053 "src": {
8054 "b.txt": ""
8055 }
8056 },
8057 }),
8058 )
8059 .await;
8060
8061 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8062 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8063 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8064 project
8065 .update(cx, |project, cx| project.git_scans_complete(cx))
8066 .await;
8067 cx.run_until_parked();
8068
8069 project.read_with(cx, |project, cx| {
8070 let git_store = project.git_store().read(cx);
8071 let pairs = [
8072 ("c.txt", None),
8073 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8074 (
8075 "dir1/deps/dep1/src/a.txt",
8076 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8077 ),
8078 ];
8079 let expected = pairs
8080 .iter()
8081 .map(|(path, result)| {
8082 (
8083 path,
8084 result.map(|(repo, repo_path)| {
8085 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8086 }),
8087 )
8088 })
8089 .collect::<Vec<_>>();
8090 let actual = pairs
8091 .iter()
8092 .map(|(path, _)| {
8093 let project_path = (tree_id, rel_path(path)).into();
8094 let result = maybe!({
8095 let (repo, repo_path) =
8096 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8097 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8098 });
8099 (path, result)
8100 })
8101 .collect::<Vec<_>>();
8102 pretty_assertions::assert_eq!(expected, actual);
8103 });
8104
8105 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8106 .await
8107 .unwrap();
8108 cx.run_until_parked();
8109
8110 project.read_with(cx, |project, cx| {
8111 let git_store = project.git_store().read(cx);
8112 assert_eq!(
8113 git_store.repository_and_path_for_project_path(
8114 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8115 cx
8116 ),
8117 None
8118 );
8119 });
8120}
8121
8122#[gpui::test]
8123async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8124 init_test(cx);
8125 let fs = FakeFs::new(cx.background_executor.clone());
8126 let home = paths::home_dir();
8127 fs.insert_tree(
8128 home,
8129 json!({
8130 ".git": {},
8131 "project": {
8132 "a.txt": "A"
8133 },
8134 }),
8135 )
8136 .await;
8137
8138 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8139 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8140 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8141
8142 project
8143 .update(cx, |project, cx| project.git_scans_complete(cx))
8144 .await;
8145 tree.flush_fs_events(cx).await;
8146
8147 project.read_with(cx, |project, cx| {
8148 let containing = project
8149 .git_store()
8150 .read(cx)
8151 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8152 assert!(containing.is_none());
8153 });
8154
8155 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8156 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8157 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8158 project
8159 .update(cx, |project, cx| project.git_scans_complete(cx))
8160 .await;
8161 tree.flush_fs_events(cx).await;
8162
8163 project.read_with(cx, |project, cx| {
8164 let containing = project
8165 .git_store()
8166 .read(cx)
8167 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8168 assert_eq!(
8169 containing
8170 .unwrap()
8171 .0
8172 .read(cx)
8173 .work_directory_abs_path
8174 .as_ref(),
8175 home,
8176 );
8177 });
8178}
8179
8180#[gpui::test]
8181async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8182 init_test(cx);
8183 cx.executor().allow_parking();
8184
8185 let root = TempTree::new(json!({
8186 "project": {
8187 "a.txt": "a", // Modified
8188 "b.txt": "bb", // Added
8189 "c.txt": "ccc", // Unchanged
8190 "d.txt": "dddd", // Deleted
8191 },
8192 }));
8193
8194 // Set up git repository before creating the project.
8195 let work_dir = root.path().join("project");
8196 let repo = git_init(work_dir.as_path());
8197 git_add("a.txt", &repo);
8198 git_add("c.txt", &repo);
8199 git_add("d.txt", &repo);
8200 git_commit("Initial commit", &repo);
8201 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8202 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8203
8204 let project = Project::test(
8205 Arc::new(RealFs::new(None, cx.executor())),
8206 [root.path()],
8207 cx,
8208 )
8209 .await;
8210
8211 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8212 tree.flush_fs_events(cx).await;
8213 project
8214 .update(cx, |project, cx| project.git_scans_complete(cx))
8215 .await;
8216 cx.executor().run_until_parked();
8217
8218 let repository = project.read_with(cx, |project, cx| {
8219 project.repositories(cx).values().next().unwrap().clone()
8220 });
8221
8222 // Check that the right git state is observed on startup
8223 repository.read_with(cx, |repository, _| {
8224 let entries = repository.cached_status().collect::<Vec<_>>();
8225 assert_eq!(
8226 entries,
8227 [
8228 StatusEntry {
8229 repo_path: repo_path("a.txt"),
8230 status: StatusCode::Modified.worktree(),
8231 },
8232 StatusEntry {
8233 repo_path: repo_path("b.txt"),
8234 status: FileStatus::Untracked,
8235 },
8236 StatusEntry {
8237 repo_path: repo_path("d.txt"),
8238 status: StatusCode::Deleted.worktree(),
8239 },
8240 ]
8241 );
8242 });
8243
8244 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8245
8246 tree.flush_fs_events(cx).await;
8247 project
8248 .update(cx, |project, cx| project.git_scans_complete(cx))
8249 .await;
8250 cx.executor().run_until_parked();
8251
8252 repository.read_with(cx, |repository, _| {
8253 let entries = repository.cached_status().collect::<Vec<_>>();
8254 assert_eq!(
8255 entries,
8256 [
8257 StatusEntry {
8258 repo_path: repo_path("a.txt"),
8259 status: StatusCode::Modified.worktree(),
8260 },
8261 StatusEntry {
8262 repo_path: repo_path("b.txt"),
8263 status: FileStatus::Untracked,
8264 },
8265 StatusEntry {
8266 repo_path: repo_path("c.txt"),
8267 status: StatusCode::Modified.worktree(),
8268 },
8269 StatusEntry {
8270 repo_path: repo_path("d.txt"),
8271 status: StatusCode::Deleted.worktree(),
8272 },
8273 ]
8274 );
8275 });
8276
8277 git_add("a.txt", &repo);
8278 git_add("c.txt", &repo);
8279 git_remove_index(Path::new("d.txt"), &repo);
8280 git_commit("Another commit", &repo);
8281 tree.flush_fs_events(cx).await;
8282 project
8283 .update(cx, |project, cx| project.git_scans_complete(cx))
8284 .await;
8285 cx.executor().run_until_parked();
8286
8287 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8288 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8289 tree.flush_fs_events(cx).await;
8290 project
8291 .update(cx, |project, cx| project.git_scans_complete(cx))
8292 .await;
8293 cx.executor().run_until_parked();
8294
8295 repository.read_with(cx, |repository, _cx| {
8296 let entries = repository.cached_status().collect::<Vec<_>>();
8297
8298 // Deleting an untracked entry, b.txt, should leave no status
8299 // a.txt was tracked, and so should have a status
8300 assert_eq!(
8301 entries,
8302 [StatusEntry {
8303 repo_path: repo_path("a.txt"),
8304 status: StatusCode::Deleted.worktree(),
8305 }]
8306 );
8307 });
8308}
8309
8310#[gpui::test]
8311async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8312 init_test(cx);
8313 cx.executor().allow_parking();
8314
8315 let root = TempTree::new(json!({
8316 "project": {
8317 "sub": {},
8318 "a.txt": "",
8319 },
8320 }));
8321
8322 let work_dir = root.path().join("project");
8323 let repo = git_init(work_dir.as_path());
8324 // a.txt exists in HEAD and the working copy but is deleted in the index.
8325 git_add("a.txt", &repo);
8326 git_commit("Initial commit", &repo);
8327 git_remove_index("a.txt".as_ref(), &repo);
8328 // `sub` is a nested git repository.
8329 let _sub = git_init(&work_dir.join("sub"));
8330
8331 let project = Project::test(
8332 Arc::new(RealFs::new(None, cx.executor())),
8333 [root.path()],
8334 cx,
8335 )
8336 .await;
8337
8338 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8339 tree.flush_fs_events(cx).await;
8340 project
8341 .update(cx, |project, cx| project.git_scans_complete(cx))
8342 .await;
8343 cx.executor().run_until_parked();
8344
8345 let repository = project.read_with(cx, |project, cx| {
8346 project
8347 .repositories(cx)
8348 .values()
8349 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8350 .unwrap()
8351 .clone()
8352 });
8353
8354 repository.read_with(cx, |repository, _cx| {
8355 let entries = repository.cached_status().collect::<Vec<_>>();
8356
8357 // `sub` doesn't appear in our computed statuses.
8358 // a.txt appears with a combined `DA` status.
8359 assert_eq!(
8360 entries,
8361 [StatusEntry {
8362 repo_path: repo_path("a.txt"),
8363 status: TrackedStatus {
8364 index_status: StatusCode::Deleted,
8365 worktree_status: StatusCode::Added
8366 }
8367 .into(),
8368 }]
8369 )
8370 });
8371}
8372
8373#[track_caller]
8374/// We merge lhs into rhs.
8375fn merge_pending_ops_snapshots(
8376 source: Vec<pending_op::PendingOps>,
8377 mut target: Vec<pending_op::PendingOps>,
8378) -> Vec<pending_op::PendingOps> {
8379 for s_ops in source {
8380 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8381 if ops.repo_path == s_ops.repo_path {
8382 Some(idx)
8383 } else {
8384 None
8385 }
8386 }) {
8387 let t_ops = &mut target[idx];
8388 for s_op in s_ops.ops {
8389 if let Some(op_idx) = t_ops
8390 .ops
8391 .iter()
8392 .zip(0..)
8393 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8394 {
8395 let t_op = &mut t_ops.ops[op_idx];
8396 match (s_op.job_status, t_op.job_status) {
8397 (pending_op::JobStatus::Running, _) => {}
8398 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8399 (s_st, t_st) if s_st == t_st => {}
8400 _ => unreachable!(),
8401 }
8402 } else {
8403 t_ops.ops.push(s_op);
8404 }
8405 }
8406 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8407 } else {
8408 target.push(s_ops);
8409 }
8410 }
8411 target
8412}
8413
8414#[gpui::test]
8415async fn test_repository_pending_ops_staging(
8416 executor: gpui::BackgroundExecutor,
8417 cx: &mut gpui::TestAppContext,
8418) {
8419 init_test(cx);
8420
8421 let fs = FakeFs::new(executor);
8422 fs.insert_tree(
8423 path!("/root"),
8424 json!({
8425 "my-repo": {
8426 ".git": {},
8427 "a.txt": "a",
8428 }
8429
8430 }),
8431 )
8432 .await;
8433
8434 fs.set_status_for_repo(
8435 path!("/root/my-repo/.git").as_ref(),
8436 &[("a.txt", FileStatus::Untracked)],
8437 );
8438
8439 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8440 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8441 project.update(cx, |project, cx| {
8442 let pending_ops_all = pending_ops_all.clone();
8443 cx.subscribe(project.git_store(), move |_, _, e, _| {
8444 if let GitStoreEvent::RepositoryUpdated(
8445 _,
8446 RepositoryEvent::PendingOpsChanged { pending_ops },
8447 _,
8448 ) = e
8449 {
8450 let merged = merge_pending_ops_snapshots(
8451 pending_ops.items(()),
8452 pending_ops_all.lock().items(()),
8453 );
8454 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8455 }
8456 })
8457 .detach();
8458 });
8459 project
8460 .update(cx, |project, cx| project.git_scans_complete(cx))
8461 .await;
8462
8463 let repo = project.read_with(cx, |project, cx| {
8464 project.repositories(cx).values().next().unwrap().clone()
8465 });
8466
8467 // Ensure we have no pending ops for any of the untracked files
8468 repo.read_with(cx, |repo, _cx| {
8469 assert!(repo.pending_ops_by_path.is_empty());
8470 });
8471
8472 let mut id = 1u16;
8473
8474 let mut assert_stage = async |path: RepoPath, stage| {
8475 let git_status = if stage {
8476 pending_op::GitStatus::Staged
8477 } else {
8478 pending_op::GitStatus::Unstaged
8479 };
8480 repo.update(cx, |repo, cx| {
8481 let task = if stage {
8482 repo.stage_entries(vec![path.clone()], cx)
8483 } else {
8484 repo.unstage_entries(vec![path.clone()], cx)
8485 };
8486 let ops = repo.pending_ops_for_path(&path).unwrap();
8487 assert_eq!(
8488 ops.ops.last(),
8489 Some(&pending_op::PendingOp {
8490 id: id.into(),
8491 git_status,
8492 job_status: pending_op::JobStatus::Running
8493 })
8494 );
8495 task
8496 })
8497 .await
8498 .unwrap();
8499
8500 repo.read_with(cx, |repo, _cx| {
8501 let ops = repo.pending_ops_for_path(&path).unwrap();
8502 assert_eq!(
8503 ops.ops.last(),
8504 Some(&pending_op::PendingOp {
8505 id: id.into(),
8506 git_status,
8507 job_status: pending_op::JobStatus::Finished
8508 })
8509 );
8510 });
8511
8512 id += 1;
8513 };
8514
8515 assert_stage(repo_path("a.txt"), true).await;
8516 assert_stage(repo_path("a.txt"), false).await;
8517 assert_stage(repo_path("a.txt"), true).await;
8518 assert_stage(repo_path("a.txt"), false).await;
8519 assert_stage(repo_path("a.txt"), true).await;
8520
8521 cx.run_until_parked();
8522
8523 assert_eq!(
8524 pending_ops_all
8525 .lock()
8526 .get(&worktree::PathKey(repo_path("a.txt").0), ())
8527 .unwrap()
8528 .ops,
8529 vec![
8530 pending_op::PendingOp {
8531 id: 1u16.into(),
8532 git_status: pending_op::GitStatus::Staged,
8533 job_status: pending_op::JobStatus::Finished
8534 },
8535 pending_op::PendingOp {
8536 id: 2u16.into(),
8537 git_status: pending_op::GitStatus::Unstaged,
8538 job_status: pending_op::JobStatus::Finished
8539 },
8540 pending_op::PendingOp {
8541 id: 3u16.into(),
8542 git_status: pending_op::GitStatus::Staged,
8543 job_status: pending_op::JobStatus::Finished
8544 },
8545 pending_op::PendingOp {
8546 id: 4u16.into(),
8547 git_status: pending_op::GitStatus::Unstaged,
8548 job_status: pending_op::JobStatus::Finished
8549 },
8550 pending_op::PendingOp {
8551 id: 5u16.into(),
8552 git_status: pending_op::GitStatus::Staged,
8553 job_status: pending_op::JobStatus::Finished
8554 }
8555 ],
8556 );
8557
8558 repo.update(cx, |repo, _cx| {
8559 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8560
8561 assert_eq!(
8562 git_statuses,
8563 [StatusEntry {
8564 repo_path: repo_path("a.txt"),
8565 status: TrackedStatus {
8566 index_status: StatusCode::Added,
8567 worktree_status: StatusCode::Unmodified
8568 }
8569 .into(),
8570 }]
8571 );
8572 });
8573}
8574
8575#[gpui::test]
8576async fn test_repository_pending_ops_long_running_staging(
8577 executor: gpui::BackgroundExecutor,
8578 cx: &mut gpui::TestAppContext,
8579) {
8580 init_test(cx);
8581
8582 let fs = FakeFs::new(executor);
8583 fs.insert_tree(
8584 path!("/root"),
8585 json!({
8586 "my-repo": {
8587 ".git": {},
8588 "a.txt": "a",
8589 }
8590
8591 }),
8592 )
8593 .await;
8594
8595 fs.set_status_for_repo(
8596 path!("/root/my-repo/.git").as_ref(),
8597 &[("a.txt", FileStatus::Untracked)],
8598 );
8599
8600 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8601 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8602 project.update(cx, |project, cx| {
8603 let pending_ops_all = pending_ops_all.clone();
8604 cx.subscribe(project.git_store(), move |_, _, e, _| {
8605 if let GitStoreEvent::RepositoryUpdated(
8606 _,
8607 RepositoryEvent::PendingOpsChanged { pending_ops },
8608 _,
8609 ) = e
8610 {
8611 let merged = merge_pending_ops_snapshots(
8612 pending_ops.items(()),
8613 pending_ops_all.lock().items(()),
8614 );
8615 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8616 }
8617 })
8618 .detach();
8619 });
8620
8621 project
8622 .update(cx, |project, cx| project.git_scans_complete(cx))
8623 .await;
8624
8625 let repo = project.read_with(cx, |project, cx| {
8626 project.repositories(cx).values().next().unwrap().clone()
8627 });
8628
8629 repo.update(cx, |repo, cx| {
8630 repo.stage_entries(vec![repo_path("a.txt")], cx)
8631 })
8632 .detach();
8633
8634 repo.update(cx, |repo, cx| {
8635 repo.stage_entries(vec![repo_path("a.txt")], cx)
8636 })
8637 .unwrap()
8638 .with_timeout(Duration::from_secs(1), &cx.executor())
8639 .await
8640 .unwrap();
8641
8642 cx.run_until_parked();
8643
8644 assert_eq!(
8645 pending_ops_all
8646 .lock()
8647 .get(&worktree::PathKey(repo_path("a.txt").0), ())
8648 .unwrap()
8649 .ops,
8650 vec![
8651 pending_op::PendingOp {
8652 id: 1u16.into(),
8653 git_status: pending_op::GitStatus::Staged,
8654 job_status: pending_op::JobStatus::Skipped
8655 },
8656 pending_op::PendingOp {
8657 id: 2u16.into(),
8658 git_status: pending_op::GitStatus::Staged,
8659 job_status: pending_op::JobStatus::Finished
8660 }
8661 ],
8662 );
8663
8664 repo.update(cx, |repo, _cx| {
8665 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8666
8667 assert_eq!(
8668 git_statuses,
8669 [StatusEntry {
8670 repo_path: repo_path("a.txt"),
8671 status: TrackedStatus {
8672 index_status: StatusCode::Added,
8673 worktree_status: StatusCode::Unmodified
8674 }
8675 .into(),
8676 }]
8677 );
8678 });
8679}
8680
8681#[gpui::test]
8682async fn test_repository_pending_ops_stage_all(
8683 executor: gpui::BackgroundExecutor,
8684 cx: &mut gpui::TestAppContext,
8685) {
8686 init_test(cx);
8687
8688 let fs = FakeFs::new(executor);
8689 fs.insert_tree(
8690 path!("/root"),
8691 json!({
8692 "my-repo": {
8693 ".git": {},
8694 "a.txt": "a",
8695 "b.txt": "b"
8696 }
8697
8698 }),
8699 )
8700 .await;
8701
8702 fs.set_status_for_repo(
8703 path!("/root/my-repo/.git").as_ref(),
8704 &[
8705 ("a.txt", FileStatus::Untracked),
8706 ("b.txt", FileStatus::Untracked),
8707 ],
8708 );
8709
8710 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8711 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8712 project.update(cx, |project, cx| {
8713 let pending_ops_all = pending_ops_all.clone();
8714 cx.subscribe(project.git_store(), move |_, _, e, _| {
8715 if let GitStoreEvent::RepositoryUpdated(
8716 _,
8717 RepositoryEvent::PendingOpsChanged { pending_ops },
8718 _,
8719 ) = e
8720 {
8721 let merged = merge_pending_ops_snapshots(
8722 pending_ops.items(()),
8723 pending_ops_all.lock().items(()),
8724 );
8725 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8726 }
8727 })
8728 .detach();
8729 });
8730 project
8731 .update(cx, |project, cx| project.git_scans_complete(cx))
8732 .await;
8733
8734 let repo = project.read_with(cx, |project, cx| {
8735 project.repositories(cx).values().next().unwrap().clone()
8736 });
8737
8738 repo.update(cx, |repo, cx| {
8739 repo.stage_entries(vec![repo_path("a.txt")], cx)
8740 })
8741 .await
8742 .unwrap();
8743 repo.update(cx, |repo, cx| repo.stage_all(cx))
8744 .await
8745 .unwrap();
8746 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8747 .await
8748 .unwrap();
8749
8750 cx.run_until_parked();
8751
8752 assert_eq!(
8753 pending_ops_all
8754 .lock()
8755 .get(&worktree::PathKey(repo_path("a.txt").0), ())
8756 .unwrap()
8757 .ops,
8758 vec![
8759 pending_op::PendingOp {
8760 id: 1u16.into(),
8761 git_status: pending_op::GitStatus::Staged,
8762 job_status: pending_op::JobStatus::Finished
8763 },
8764 pending_op::PendingOp {
8765 id: 2u16.into(),
8766 git_status: pending_op::GitStatus::Unstaged,
8767 job_status: pending_op::JobStatus::Finished
8768 },
8769 ],
8770 );
8771 assert_eq!(
8772 pending_ops_all
8773 .lock()
8774 .get(&worktree::PathKey(repo_path("b.txt").0), ())
8775 .unwrap()
8776 .ops,
8777 vec![
8778 pending_op::PendingOp {
8779 id: 1u16.into(),
8780 git_status: pending_op::GitStatus::Staged,
8781 job_status: pending_op::JobStatus::Finished
8782 },
8783 pending_op::PendingOp {
8784 id: 2u16.into(),
8785 git_status: pending_op::GitStatus::Unstaged,
8786 job_status: pending_op::JobStatus::Finished
8787 },
8788 ],
8789 );
8790
8791 repo.update(cx, |repo, _cx| {
8792 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8793
8794 assert_eq!(
8795 git_statuses,
8796 [
8797 StatusEntry {
8798 repo_path: repo_path("a.txt"),
8799 status: FileStatus::Untracked,
8800 },
8801 StatusEntry {
8802 repo_path: repo_path("b.txt"),
8803 status: FileStatus::Untracked,
8804 },
8805 ]
8806 );
8807 });
8808}
8809
8810#[gpui::test]
8811async fn test_repository_subfolder_git_status(
8812 executor: gpui::BackgroundExecutor,
8813 cx: &mut gpui::TestAppContext,
8814) {
8815 init_test(cx);
8816
8817 let fs = FakeFs::new(executor);
8818 fs.insert_tree(
8819 path!("/root"),
8820 json!({
8821 "my-repo": {
8822 ".git": {},
8823 "a.txt": "a",
8824 "sub-folder-1": {
8825 "sub-folder-2": {
8826 "c.txt": "cc",
8827 "d": {
8828 "e.txt": "eee"
8829 }
8830 },
8831 }
8832 },
8833 }),
8834 )
8835 .await;
8836
8837 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8838 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8839
8840 fs.set_status_for_repo(
8841 path!("/root/my-repo/.git").as_ref(),
8842 &[(E_TXT, FileStatus::Untracked)],
8843 );
8844
8845 let project = Project::test(
8846 fs.clone(),
8847 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8848 cx,
8849 )
8850 .await;
8851
8852 project
8853 .update(cx, |project, cx| project.git_scans_complete(cx))
8854 .await;
8855 cx.run_until_parked();
8856
8857 let repository = project.read_with(cx, |project, cx| {
8858 project.repositories(cx).values().next().unwrap().clone()
8859 });
8860
8861 // Ensure that the git status is loaded correctly
8862 repository.read_with(cx, |repository, _cx| {
8863 assert_eq!(
8864 repository.work_directory_abs_path,
8865 Path::new(path!("/root/my-repo")).into()
8866 );
8867
8868 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8869 assert_eq!(
8870 repository
8871 .status_for_path(&repo_path(E_TXT))
8872 .unwrap()
8873 .status,
8874 FileStatus::Untracked
8875 );
8876 });
8877
8878 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8879 project
8880 .update(cx, |project, cx| project.git_scans_complete(cx))
8881 .await;
8882 cx.run_until_parked();
8883
8884 repository.read_with(cx, |repository, _cx| {
8885 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8886 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8887 });
8888}
8889
8890// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8891#[cfg(any())]
8892#[gpui::test]
8893async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8894 init_test(cx);
8895 cx.executor().allow_parking();
8896
8897 let root = TempTree::new(json!({
8898 "project": {
8899 "a.txt": "a",
8900 },
8901 }));
8902 let root_path = root.path();
8903
8904 let repo = git_init(&root_path.join("project"));
8905 git_add("a.txt", &repo);
8906 git_commit("init", &repo);
8907
8908 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8909
8910 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8911 tree.flush_fs_events(cx).await;
8912 project
8913 .update(cx, |project, cx| project.git_scans_complete(cx))
8914 .await;
8915 cx.executor().run_until_parked();
8916
8917 let repository = project.read_with(cx, |project, cx| {
8918 project.repositories(cx).values().next().unwrap().clone()
8919 });
8920
8921 git_branch("other-branch", &repo);
8922 git_checkout("refs/heads/other-branch", &repo);
8923 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8924 git_add("a.txt", &repo);
8925 git_commit("capitalize", &repo);
8926 let commit = repo
8927 .head()
8928 .expect("Failed to get HEAD")
8929 .peel_to_commit()
8930 .expect("HEAD is not a commit");
8931 git_checkout("refs/heads/main", &repo);
8932 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8933 git_add("a.txt", &repo);
8934 git_commit("improve letter", &repo);
8935 git_cherry_pick(&commit, &repo);
8936 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8937 .expect("No CHERRY_PICK_HEAD");
8938 pretty_assertions::assert_eq!(
8939 git_status(&repo),
8940 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8941 );
8942 tree.flush_fs_events(cx).await;
8943 project
8944 .update(cx, |project, cx| project.git_scans_complete(cx))
8945 .await;
8946 cx.executor().run_until_parked();
8947 let conflicts = repository.update(cx, |repository, _| {
8948 repository
8949 .merge_conflicts
8950 .iter()
8951 .cloned()
8952 .collect::<Vec<_>>()
8953 });
8954 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8955
8956 git_add("a.txt", &repo);
8957 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8958 git_commit("whatevs", &repo);
8959 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8960 .expect("Failed to remove CHERRY_PICK_HEAD");
8961 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8962 tree.flush_fs_events(cx).await;
8963 let conflicts = repository.update(cx, |repository, _| {
8964 repository
8965 .merge_conflicts
8966 .iter()
8967 .cloned()
8968 .collect::<Vec<_>>()
8969 });
8970 pretty_assertions::assert_eq!(conflicts, []);
8971}
8972
8973#[gpui::test]
8974async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8975 init_test(cx);
8976 let fs = FakeFs::new(cx.background_executor.clone());
8977 fs.insert_tree(
8978 path!("/root"),
8979 json!({
8980 ".git": {},
8981 ".gitignore": "*.txt\n",
8982 "a.xml": "<a></a>",
8983 "b.txt": "Some text"
8984 }),
8985 )
8986 .await;
8987
8988 fs.set_head_and_index_for_repo(
8989 path!("/root/.git").as_ref(),
8990 &[
8991 (".gitignore", "*.txt\n".into()),
8992 ("a.xml", "<a></a>".into()),
8993 ],
8994 );
8995
8996 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8997
8998 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8999 tree.flush_fs_events(cx).await;
9000 project
9001 .update(cx, |project, cx| project.git_scans_complete(cx))
9002 .await;
9003 cx.executor().run_until_parked();
9004
9005 let repository = project.read_with(cx, |project, cx| {
9006 project.repositories(cx).values().next().unwrap().clone()
9007 });
9008
9009 // One file is unmodified, the other is ignored.
9010 cx.read(|cx| {
9011 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9012 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9013 });
9014
9015 // Change the gitignore, and stage the newly non-ignored file.
9016 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9017 .await
9018 .unwrap();
9019 fs.set_index_for_repo(
9020 Path::new(path!("/root/.git")),
9021 &[
9022 (".gitignore", "*.txt\n".into()),
9023 ("a.xml", "<a></a>".into()),
9024 ("b.txt", "Some text".into()),
9025 ],
9026 );
9027
9028 cx.executor().run_until_parked();
9029 cx.read(|cx| {
9030 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9031 assert_entry_git_state(
9032 tree.read(cx),
9033 repository.read(cx),
9034 "b.txt",
9035 Some(StatusCode::Added),
9036 false,
9037 );
9038 });
9039}
9040
9041// NOTE:
9042// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9043// a directory which some program has already open.
9044// This is a limitation of the Windows.
9045// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9046// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9047#[gpui::test]
9048#[cfg_attr(target_os = "windows", ignore)]
9049async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9050 init_test(cx);
9051 cx.executor().allow_parking();
9052 let root = TempTree::new(json!({
9053 "projects": {
9054 "project1": {
9055 "a": "",
9056 "b": "",
9057 }
9058 },
9059
9060 }));
9061 let root_path = root.path();
9062
9063 let repo = git_init(&root_path.join("projects/project1"));
9064 git_add("a", &repo);
9065 git_commit("init", &repo);
9066 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9067
9068 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9069
9070 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9071 tree.flush_fs_events(cx).await;
9072 project
9073 .update(cx, |project, cx| project.git_scans_complete(cx))
9074 .await;
9075 cx.executor().run_until_parked();
9076
9077 let repository = project.read_with(cx, |project, cx| {
9078 project.repositories(cx).values().next().unwrap().clone()
9079 });
9080
9081 repository.read_with(cx, |repository, _| {
9082 assert_eq!(
9083 repository.work_directory_abs_path.as_ref(),
9084 root_path.join("projects/project1").as_path()
9085 );
9086 assert_eq!(
9087 repository
9088 .status_for_path(&repo_path("a"))
9089 .map(|entry| entry.status),
9090 Some(StatusCode::Modified.worktree()),
9091 );
9092 assert_eq!(
9093 repository
9094 .status_for_path(&repo_path("b"))
9095 .map(|entry| entry.status),
9096 Some(FileStatus::Untracked),
9097 );
9098 });
9099
9100 std::fs::rename(
9101 root_path.join("projects/project1"),
9102 root_path.join("projects/project2"),
9103 )
9104 .unwrap();
9105 tree.flush_fs_events(cx).await;
9106
9107 repository.read_with(cx, |repository, _| {
9108 assert_eq!(
9109 repository.work_directory_abs_path.as_ref(),
9110 root_path.join("projects/project2").as_path()
9111 );
9112 assert_eq!(
9113 repository.status_for_path(&repo_path("a")).unwrap().status,
9114 StatusCode::Modified.worktree(),
9115 );
9116 assert_eq!(
9117 repository.status_for_path(&repo_path("b")).unwrap().status,
9118 FileStatus::Untracked,
9119 );
9120 });
9121}
9122
9123// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9124// you can't rename a directory which some program has already open. This is a
9125// limitation of the Windows. See:
9126// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9127// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9128#[gpui::test]
9129#[cfg_attr(target_os = "windows", ignore)]
9130async fn test_file_status(cx: &mut gpui::TestAppContext) {
9131 init_test(cx);
9132 cx.executor().allow_parking();
9133 const IGNORE_RULE: &str = "**/target";
9134
9135 let root = TempTree::new(json!({
9136 "project": {
9137 "a.txt": "a",
9138 "b.txt": "bb",
9139 "c": {
9140 "d": {
9141 "e.txt": "eee"
9142 }
9143 },
9144 "f.txt": "ffff",
9145 "target": {
9146 "build_file": "???"
9147 },
9148 ".gitignore": IGNORE_RULE
9149 },
9150
9151 }));
9152 let root_path = root.path();
9153
9154 const A_TXT: &str = "a.txt";
9155 const B_TXT: &str = "b.txt";
9156 const E_TXT: &str = "c/d/e.txt";
9157 const F_TXT: &str = "f.txt";
9158 const DOTGITIGNORE: &str = ".gitignore";
9159 const BUILD_FILE: &str = "target/build_file";
9160
9161 // Set up git repository before creating the worktree.
9162 let work_dir = root.path().join("project");
9163 let mut repo = git_init(work_dir.as_path());
9164 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9165 git_add(A_TXT, &repo);
9166 git_add(E_TXT, &repo);
9167 git_add(DOTGITIGNORE, &repo);
9168 git_commit("Initial commit", &repo);
9169
9170 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9171
9172 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9173 tree.flush_fs_events(cx).await;
9174 project
9175 .update(cx, |project, cx| project.git_scans_complete(cx))
9176 .await;
9177 cx.executor().run_until_parked();
9178
9179 let repository = project.read_with(cx, |project, cx| {
9180 project.repositories(cx).values().next().unwrap().clone()
9181 });
9182
9183 // Check that the right git state is observed on startup
9184 repository.read_with(cx, |repository, _cx| {
9185 assert_eq!(
9186 repository.work_directory_abs_path.as_ref(),
9187 root_path.join("project").as_path()
9188 );
9189
9190 assert_eq!(
9191 repository
9192 .status_for_path(&repo_path(B_TXT))
9193 .unwrap()
9194 .status,
9195 FileStatus::Untracked,
9196 );
9197 assert_eq!(
9198 repository
9199 .status_for_path(&repo_path(F_TXT))
9200 .unwrap()
9201 .status,
9202 FileStatus::Untracked,
9203 );
9204 });
9205
9206 // Modify a file in the working copy.
9207 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9208 tree.flush_fs_events(cx).await;
9209 project
9210 .update(cx, |project, cx| project.git_scans_complete(cx))
9211 .await;
9212 cx.executor().run_until_parked();
9213
9214 // The worktree detects that the file's git status has changed.
9215 repository.read_with(cx, |repository, _| {
9216 assert_eq!(
9217 repository
9218 .status_for_path(&repo_path(A_TXT))
9219 .unwrap()
9220 .status,
9221 StatusCode::Modified.worktree(),
9222 );
9223 });
9224
9225 // Create a commit in the git repository.
9226 git_add(A_TXT, &repo);
9227 git_add(B_TXT, &repo);
9228 git_commit("Committing modified and added", &repo);
9229 tree.flush_fs_events(cx).await;
9230 project
9231 .update(cx, |project, cx| project.git_scans_complete(cx))
9232 .await;
9233 cx.executor().run_until_parked();
9234
9235 // The worktree detects that the files' git status have changed.
9236 repository.read_with(cx, |repository, _cx| {
9237 assert_eq!(
9238 repository
9239 .status_for_path(&repo_path(F_TXT))
9240 .unwrap()
9241 .status,
9242 FileStatus::Untracked,
9243 );
9244 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9245 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9246 });
9247
9248 // Modify files in the working copy and perform git operations on other files.
9249 git_reset(0, &repo);
9250 git_remove_index(Path::new(B_TXT), &repo);
9251 git_stash(&mut repo);
9252 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9253 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9254 tree.flush_fs_events(cx).await;
9255 project
9256 .update(cx, |project, cx| project.git_scans_complete(cx))
9257 .await;
9258 cx.executor().run_until_parked();
9259
9260 // Check that more complex repo changes are tracked
9261 repository.read_with(cx, |repository, _cx| {
9262 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9263 assert_eq!(
9264 repository
9265 .status_for_path(&repo_path(B_TXT))
9266 .unwrap()
9267 .status,
9268 FileStatus::Untracked,
9269 );
9270 assert_eq!(
9271 repository
9272 .status_for_path(&repo_path(E_TXT))
9273 .unwrap()
9274 .status,
9275 StatusCode::Modified.worktree(),
9276 );
9277 });
9278
9279 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9280 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9281 std::fs::write(
9282 work_dir.join(DOTGITIGNORE),
9283 [IGNORE_RULE, "f.txt"].join("\n"),
9284 )
9285 .unwrap();
9286
9287 git_add(Path::new(DOTGITIGNORE), &repo);
9288 git_commit("Committing modified git ignore", &repo);
9289
9290 tree.flush_fs_events(cx).await;
9291 cx.executor().run_until_parked();
9292
9293 let mut renamed_dir_name = "first_directory/second_directory";
9294 const RENAMED_FILE: &str = "rf.txt";
9295
9296 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9297 std::fs::write(
9298 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9299 "new-contents",
9300 )
9301 .unwrap();
9302
9303 tree.flush_fs_events(cx).await;
9304 project
9305 .update(cx, |project, cx| project.git_scans_complete(cx))
9306 .await;
9307 cx.executor().run_until_parked();
9308
9309 repository.read_with(cx, |repository, _cx| {
9310 assert_eq!(
9311 repository
9312 .status_for_path(
9313 &rel_path(renamed_dir_name)
9314 .join(rel_path(RENAMED_FILE))
9315 .into()
9316 )
9317 .unwrap()
9318 .status,
9319 FileStatus::Untracked,
9320 );
9321 });
9322
9323 renamed_dir_name = "new_first_directory/second_directory";
9324
9325 std::fs::rename(
9326 work_dir.join("first_directory"),
9327 work_dir.join("new_first_directory"),
9328 )
9329 .unwrap();
9330
9331 tree.flush_fs_events(cx).await;
9332 project
9333 .update(cx, |project, cx| project.git_scans_complete(cx))
9334 .await;
9335 cx.executor().run_until_parked();
9336
9337 repository.read_with(cx, |repository, _cx| {
9338 assert_eq!(
9339 repository
9340 .status_for_path(
9341 &rel_path(renamed_dir_name)
9342 .join(rel_path(RENAMED_FILE))
9343 .into()
9344 )
9345 .unwrap()
9346 .status,
9347 FileStatus::Untracked,
9348 );
9349 });
9350}
9351
9352#[gpui::test]
9353#[ignore]
9354async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9355 init_test(cx);
9356 cx.executor().allow_parking();
9357
9358 const IGNORE_RULE: &str = "**/target";
9359
9360 let root = TempTree::new(json!({
9361 "project": {
9362 "src": {
9363 "main.rs": "fn main() {}"
9364 },
9365 "target": {
9366 "debug": {
9367 "important_text.txt": "important text",
9368 },
9369 },
9370 ".gitignore": IGNORE_RULE
9371 },
9372
9373 }));
9374 let root_path = root.path();
9375
9376 // Set up git repository before creating the worktree.
9377 let work_dir = root.path().join("project");
9378 let repo = git_init(work_dir.as_path());
9379 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9380 git_add("src/main.rs", &repo);
9381 git_add(".gitignore", &repo);
9382 git_commit("Initial commit", &repo);
9383
9384 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9385 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9386 let project_events = Arc::new(Mutex::new(Vec::new()));
9387 project.update(cx, |project, cx| {
9388 let repo_events = repository_updates.clone();
9389 cx.subscribe(project.git_store(), move |_, _, e, _| {
9390 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9391 repo_events.lock().push(e.clone());
9392 }
9393 })
9394 .detach();
9395 let project_events = project_events.clone();
9396 cx.subscribe_self(move |_, e, _| {
9397 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9398 project_events.lock().extend(
9399 updates
9400 .iter()
9401 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9402 .filter(|(path, _)| path != "fs-event-sentinel"),
9403 );
9404 }
9405 })
9406 .detach();
9407 });
9408
9409 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9410 tree.flush_fs_events(cx).await;
9411 tree.update(cx, |tree, cx| {
9412 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9413 })
9414 .await
9415 .unwrap();
9416 tree.update(cx, |tree, _| {
9417 assert_eq!(
9418 tree.entries(true, 0)
9419 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9420 .collect::<Vec<_>>(),
9421 vec![
9422 (rel_path(""), false),
9423 (rel_path("project/"), false),
9424 (rel_path("project/.gitignore"), false),
9425 (rel_path("project/src"), false),
9426 (rel_path("project/src/main.rs"), false),
9427 (rel_path("project/target"), true),
9428 (rel_path("project/target/debug"), true),
9429 (rel_path("project/target/debug/important_text.txt"), true),
9430 ]
9431 );
9432 });
9433
9434 assert_eq!(
9435 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9436 vec![
9437 RepositoryEvent::StatusesChanged { full_scan: true },
9438 RepositoryEvent::MergeHeadsChanged,
9439 ],
9440 "Initial worktree scan should produce a repo update event"
9441 );
9442 assert_eq!(
9443 project_events.lock().drain(..).collect::<Vec<_>>(),
9444 vec![
9445 ("project/target".to_string(), PathChange::Loaded),
9446 ("project/target/debug".to_string(), PathChange::Loaded),
9447 (
9448 "project/target/debug/important_text.txt".to_string(),
9449 PathChange::Loaded
9450 ),
9451 ],
9452 "Initial project changes should show that all not-ignored and all opened files are loaded"
9453 );
9454
9455 let deps_dir = work_dir.join("target").join("debug").join("deps");
9456 std::fs::create_dir_all(&deps_dir).unwrap();
9457 tree.flush_fs_events(cx).await;
9458 project
9459 .update(cx, |project, cx| project.git_scans_complete(cx))
9460 .await;
9461 cx.executor().run_until_parked();
9462 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9463 tree.flush_fs_events(cx).await;
9464 project
9465 .update(cx, |project, cx| project.git_scans_complete(cx))
9466 .await;
9467 cx.executor().run_until_parked();
9468 std::fs::remove_dir_all(&deps_dir).unwrap();
9469 tree.flush_fs_events(cx).await;
9470 project
9471 .update(cx, |project, cx| project.git_scans_complete(cx))
9472 .await;
9473 cx.executor().run_until_parked();
9474
9475 tree.update(cx, |tree, _| {
9476 assert_eq!(
9477 tree.entries(true, 0)
9478 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9479 .collect::<Vec<_>>(),
9480 vec![
9481 (rel_path(""), false),
9482 (rel_path("project/"), false),
9483 (rel_path("project/.gitignore"), false),
9484 (rel_path("project/src"), false),
9485 (rel_path("project/src/main.rs"), false),
9486 (rel_path("project/target"), true),
9487 (rel_path("project/target/debug"), true),
9488 (rel_path("project/target/debug/important_text.txt"), true),
9489 ],
9490 "No stray temp files should be left after the flycheck changes"
9491 );
9492 });
9493
9494 assert_eq!(
9495 repository_updates
9496 .lock()
9497 .iter()
9498 .cloned()
9499 .collect::<Vec<_>>(),
9500 Vec::new(),
9501 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9502 );
9503 assert_eq!(
9504 project_events.lock().as_slice(),
9505 vec![
9506 ("project/target/debug/deps".to_string(), PathChange::Added),
9507 ("project/target/debug/deps".to_string(), PathChange::Removed),
9508 ],
9509 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9510 No updates for more nested directories should happen as those are ignored",
9511 );
9512}
9513
9514#[gpui::test]
9515async fn test_odd_events_for_ignored_dirs(
9516 executor: BackgroundExecutor,
9517 cx: &mut gpui::TestAppContext,
9518) {
9519 init_test(cx);
9520 let fs = FakeFs::new(executor);
9521 fs.insert_tree(
9522 path!("/root"),
9523 json!({
9524 ".git": {},
9525 ".gitignore": "**/target/",
9526 "src": {
9527 "main.rs": "fn main() {}",
9528 },
9529 "target": {
9530 "debug": {
9531 "foo.txt": "foo",
9532 "deps": {}
9533 }
9534 }
9535 }),
9536 )
9537 .await;
9538 fs.set_head_and_index_for_repo(
9539 path!("/root/.git").as_ref(),
9540 &[
9541 (".gitignore", "**/target/".into()),
9542 ("src/main.rs", "fn main() {}".into()),
9543 ],
9544 );
9545
9546 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9547 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9548 let project_events = Arc::new(Mutex::new(Vec::new()));
9549 project.update(cx, |project, cx| {
9550 let repository_updates = repository_updates.clone();
9551 cx.subscribe(project.git_store(), move |_, _, e, _| {
9552 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9553 repository_updates.lock().push(e.clone());
9554 }
9555 })
9556 .detach();
9557 let project_events = project_events.clone();
9558 cx.subscribe_self(move |_, e, _| {
9559 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9560 project_events.lock().extend(
9561 updates
9562 .iter()
9563 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9564 .filter(|(path, _)| path != "fs-event-sentinel"),
9565 );
9566 }
9567 })
9568 .detach();
9569 });
9570
9571 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9572 tree.update(cx, |tree, cx| {
9573 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9574 })
9575 .await
9576 .unwrap();
9577 tree.flush_fs_events(cx).await;
9578 project
9579 .update(cx, |project, cx| project.git_scans_complete(cx))
9580 .await;
9581 cx.run_until_parked();
9582 tree.update(cx, |tree, _| {
9583 assert_eq!(
9584 tree.entries(true, 0)
9585 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9586 .collect::<Vec<_>>(),
9587 vec![
9588 (rel_path(""), false),
9589 (rel_path(".gitignore"), false),
9590 (rel_path("src"), false),
9591 (rel_path("src/main.rs"), false),
9592 (rel_path("target"), true),
9593 (rel_path("target/debug"), true),
9594 (rel_path("target/debug/deps"), true),
9595 (rel_path("target/debug/foo.txt"), true),
9596 ]
9597 );
9598 });
9599
9600 assert_eq!(
9601 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9602 vec![
9603 RepositoryEvent::MergeHeadsChanged,
9604 RepositoryEvent::BranchChanged,
9605 RepositoryEvent::StatusesChanged { full_scan: false },
9606 RepositoryEvent::StatusesChanged { full_scan: false },
9607 ],
9608 "Initial worktree scan should produce a repo update event"
9609 );
9610 assert_eq!(
9611 project_events.lock().drain(..).collect::<Vec<_>>(),
9612 vec![
9613 ("target".to_string(), PathChange::Loaded),
9614 ("target/debug".to_string(), PathChange::Loaded),
9615 ("target/debug/deps".to_string(), PathChange::Loaded),
9616 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9617 ],
9618 "All non-ignored entries and all opened firs should be getting a project event",
9619 );
9620
9621 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9622 // This may happen multiple times during a single flycheck, but once is enough for testing.
9623 fs.emit_fs_event("/root/target/debug/deps", None);
9624 tree.flush_fs_events(cx).await;
9625 project
9626 .update(cx, |project, cx| project.git_scans_complete(cx))
9627 .await;
9628 cx.executor().run_until_parked();
9629
9630 assert_eq!(
9631 repository_updates
9632 .lock()
9633 .iter()
9634 .cloned()
9635 .collect::<Vec<_>>(),
9636 Vec::new(),
9637 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9638 );
9639 assert_eq!(
9640 project_events.lock().as_slice(),
9641 Vec::new(),
9642 "No further project events should happen, as only ignored dirs received FS events",
9643 );
9644}
9645
9646#[gpui::test]
9647async fn test_repos_in_invisible_worktrees(
9648 executor: BackgroundExecutor,
9649 cx: &mut gpui::TestAppContext,
9650) {
9651 init_test(cx);
9652 let fs = FakeFs::new(executor);
9653 fs.insert_tree(
9654 path!("/root"),
9655 json!({
9656 "dir1": {
9657 ".git": {},
9658 "dep1": {
9659 ".git": {},
9660 "src": {
9661 "a.txt": "",
9662 },
9663 },
9664 "b.txt": "",
9665 },
9666 }),
9667 )
9668 .await;
9669
9670 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9671 let _visible_worktree =
9672 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9673 project
9674 .update(cx, |project, cx| project.git_scans_complete(cx))
9675 .await;
9676
9677 let repos = project.read_with(cx, |project, cx| {
9678 project
9679 .repositories(cx)
9680 .values()
9681 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9682 .collect::<Vec<_>>()
9683 });
9684 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9685
9686 let (_invisible_worktree, _) = project
9687 .update(cx, |project, cx| {
9688 project.worktree_store.update(cx, |worktree_store, cx| {
9689 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9690 })
9691 })
9692 .await
9693 .expect("failed to create worktree");
9694 project
9695 .update(cx, |project, cx| project.git_scans_complete(cx))
9696 .await;
9697
9698 let repos = project.read_with(cx, |project, cx| {
9699 project
9700 .repositories(cx)
9701 .values()
9702 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9703 .collect::<Vec<_>>()
9704 });
9705 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9706}
9707
9708#[gpui::test(iterations = 10)]
9709async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9710 init_test(cx);
9711 cx.update(|cx| {
9712 cx.update_global::<SettingsStore, _>(|store, cx| {
9713 store.update_user_settings(cx, |settings| {
9714 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9715 });
9716 });
9717 });
9718 let fs = FakeFs::new(cx.background_executor.clone());
9719 fs.insert_tree(
9720 path!("/root"),
9721 json!({
9722 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9723 "tree": {
9724 ".git": {},
9725 ".gitignore": "ignored-dir\n",
9726 "tracked-dir": {
9727 "tracked-file1": "",
9728 "ancestor-ignored-file1": "",
9729 },
9730 "ignored-dir": {
9731 "ignored-file1": ""
9732 }
9733 }
9734 }),
9735 )
9736 .await;
9737 fs.set_head_and_index_for_repo(
9738 path!("/root/tree/.git").as_ref(),
9739 &[
9740 (".gitignore", "ignored-dir\n".into()),
9741 ("tracked-dir/tracked-file1", "".into()),
9742 ],
9743 );
9744
9745 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9746
9747 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9748 tree.flush_fs_events(cx).await;
9749 project
9750 .update(cx, |project, cx| project.git_scans_complete(cx))
9751 .await;
9752 cx.executor().run_until_parked();
9753
9754 let repository = project.read_with(cx, |project, cx| {
9755 project.repositories(cx).values().next().unwrap().clone()
9756 });
9757
9758 tree.read_with(cx, |tree, _| {
9759 tree.as_local()
9760 .unwrap()
9761 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9762 })
9763 .recv()
9764 .await;
9765
9766 cx.read(|cx| {
9767 assert_entry_git_state(
9768 tree.read(cx),
9769 repository.read(cx),
9770 "tracked-dir/tracked-file1",
9771 None,
9772 false,
9773 );
9774 assert_entry_git_state(
9775 tree.read(cx),
9776 repository.read(cx),
9777 "tracked-dir/ancestor-ignored-file1",
9778 None,
9779 false,
9780 );
9781 assert_entry_git_state(
9782 tree.read(cx),
9783 repository.read(cx),
9784 "ignored-dir/ignored-file1",
9785 None,
9786 true,
9787 );
9788 });
9789
9790 fs.create_file(
9791 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9792 Default::default(),
9793 )
9794 .await
9795 .unwrap();
9796 fs.set_index_for_repo(
9797 path!("/root/tree/.git").as_ref(),
9798 &[
9799 (".gitignore", "ignored-dir\n".into()),
9800 ("tracked-dir/tracked-file1", "".into()),
9801 ("tracked-dir/tracked-file2", "".into()),
9802 ],
9803 );
9804 fs.create_file(
9805 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9806 Default::default(),
9807 )
9808 .await
9809 .unwrap();
9810 fs.create_file(
9811 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9812 Default::default(),
9813 )
9814 .await
9815 .unwrap();
9816
9817 cx.executor().run_until_parked();
9818 cx.read(|cx| {
9819 assert_entry_git_state(
9820 tree.read(cx),
9821 repository.read(cx),
9822 "tracked-dir/tracked-file2",
9823 Some(StatusCode::Added),
9824 false,
9825 );
9826 assert_entry_git_state(
9827 tree.read(cx),
9828 repository.read(cx),
9829 "tracked-dir/ancestor-ignored-file2",
9830 None,
9831 false,
9832 );
9833 assert_entry_git_state(
9834 tree.read(cx),
9835 repository.read(cx),
9836 "ignored-dir/ignored-file2",
9837 None,
9838 true,
9839 );
9840 assert!(
9841 tree.read(cx)
9842 .entry_for_path(&rel_path(".git"))
9843 .unwrap()
9844 .is_ignored
9845 );
9846 });
9847}
9848
9849#[gpui::test]
9850async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9851 init_test(cx);
9852
9853 let fs = FakeFs::new(cx.executor());
9854 fs.insert_tree(
9855 path!("/project"),
9856 json!({
9857 ".git": {
9858 "worktrees": {
9859 "some-worktree": {
9860 "commondir": "../..\n",
9861 // For is_git_dir
9862 "HEAD": "",
9863 "config": ""
9864 }
9865 },
9866 "modules": {
9867 "subdir": {
9868 "some-submodule": {
9869 // For is_git_dir
9870 "HEAD": "",
9871 "config": "",
9872 }
9873 }
9874 }
9875 },
9876 "src": {
9877 "a.txt": "A",
9878 },
9879 "some-worktree": {
9880 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9881 "src": {
9882 "b.txt": "B",
9883 }
9884 },
9885 "subdir": {
9886 "some-submodule": {
9887 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9888 "c.txt": "C",
9889 }
9890 }
9891 }),
9892 )
9893 .await;
9894
9895 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9896 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9897 scan_complete.await;
9898
9899 let mut repositories = project.update(cx, |project, cx| {
9900 project
9901 .repositories(cx)
9902 .values()
9903 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9904 .collect::<Vec<_>>()
9905 });
9906 repositories.sort();
9907 pretty_assertions::assert_eq!(
9908 repositories,
9909 [
9910 Path::new(path!("/project")).into(),
9911 Path::new(path!("/project/some-worktree")).into(),
9912 Path::new(path!("/project/subdir/some-submodule")).into(),
9913 ]
9914 );
9915
9916 // Generate a git-related event for the worktree and check that it's refreshed.
9917 fs.with_git_state(
9918 path!("/project/some-worktree/.git").as_ref(),
9919 true,
9920 |state| {
9921 state
9922 .head_contents
9923 .insert(repo_path("src/b.txt"), "b".to_owned());
9924 state
9925 .index_contents
9926 .insert(repo_path("src/b.txt"), "b".to_owned());
9927 },
9928 )
9929 .unwrap();
9930 cx.run_until_parked();
9931
9932 let buffer = project
9933 .update(cx, |project, cx| {
9934 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9935 })
9936 .await
9937 .unwrap();
9938 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9939 let (repo, _) = project
9940 .git_store()
9941 .read(cx)
9942 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9943 .unwrap();
9944 pretty_assertions::assert_eq!(
9945 repo.read(cx).work_directory_abs_path,
9946 Path::new(path!("/project/some-worktree")).into(),
9947 );
9948 let barrier = repo.update(cx, |repo, _| repo.barrier());
9949 (repo.clone(), barrier)
9950 });
9951 barrier.await.unwrap();
9952 worktree_repo.update(cx, |repo, _| {
9953 pretty_assertions::assert_eq!(
9954 repo.status_for_path(&repo_path("src/b.txt"))
9955 .unwrap()
9956 .status,
9957 StatusCode::Modified.worktree(),
9958 );
9959 });
9960
9961 // The same for the submodule.
9962 fs.with_git_state(
9963 path!("/project/subdir/some-submodule/.git").as_ref(),
9964 true,
9965 |state| {
9966 state
9967 .head_contents
9968 .insert(repo_path("c.txt"), "c".to_owned());
9969 state
9970 .index_contents
9971 .insert(repo_path("c.txt"), "c".to_owned());
9972 },
9973 )
9974 .unwrap();
9975 cx.run_until_parked();
9976
9977 let buffer = project
9978 .update(cx, |project, cx| {
9979 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9980 })
9981 .await
9982 .unwrap();
9983 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9984 let (repo, _) = project
9985 .git_store()
9986 .read(cx)
9987 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9988 .unwrap();
9989 pretty_assertions::assert_eq!(
9990 repo.read(cx).work_directory_abs_path,
9991 Path::new(path!("/project/subdir/some-submodule")).into(),
9992 );
9993 let barrier = repo.update(cx, |repo, _| repo.barrier());
9994 (repo.clone(), barrier)
9995 });
9996 barrier.await.unwrap();
9997 submodule_repo.update(cx, |repo, _| {
9998 pretty_assertions::assert_eq!(
9999 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10000 StatusCode::Modified.worktree(),
10001 );
10002 });
10003}
10004
10005#[gpui::test]
10006async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10007 init_test(cx);
10008 let fs = FakeFs::new(cx.background_executor.clone());
10009 fs.insert_tree(
10010 path!("/root"),
10011 json!({
10012 "project": {
10013 ".git": {},
10014 "child1": {
10015 "a.txt": "A",
10016 },
10017 "child2": {
10018 "b.txt": "B",
10019 }
10020 }
10021 }),
10022 )
10023 .await;
10024
10025 let project = Project::test(
10026 fs.clone(),
10027 [
10028 path!("/root/project/child1").as_ref(),
10029 path!("/root/project/child2").as_ref(),
10030 ],
10031 cx,
10032 )
10033 .await;
10034
10035 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10036 tree.flush_fs_events(cx).await;
10037 project
10038 .update(cx, |project, cx| project.git_scans_complete(cx))
10039 .await;
10040 cx.executor().run_until_parked();
10041
10042 let repos = project.read_with(cx, |project, cx| {
10043 project
10044 .repositories(cx)
10045 .values()
10046 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10047 .collect::<Vec<_>>()
10048 });
10049 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10050}
10051
10052async fn search(
10053 project: &Entity<Project>,
10054 query: SearchQuery,
10055 cx: &mut gpui::TestAppContext,
10056) -> Result<HashMap<String, Vec<Range<usize>>>> {
10057 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10058 let mut results = HashMap::default();
10059 while let Ok(search_result) = search_rx.recv().await {
10060 match search_result {
10061 SearchResult::Buffer { buffer, ranges } => {
10062 results.entry(buffer).or_insert(ranges);
10063 }
10064 SearchResult::LimitReached => {}
10065 }
10066 }
10067 Ok(results
10068 .into_iter()
10069 .map(|(buffer, ranges)| {
10070 buffer.update(cx, |buffer, cx| {
10071 let path = buffer
10072 .file()
10073 .unwrap()
10074 .full_path(cx)
10075 .to_string_lossy()
10076 .to_string();
10077 let ranges = ranges
10078 .into_iter()
10079 .map(|range| range.to_offset(buffer))
10080 .collect::<Vec<_>>();
10081 (path, ranges)
10082 })
10083 })
10084 .collect())
10085}
10086
10087pub fn init_test(cx: &mut gpui::TestAppContext) {
10088 zlog::init_test();
10089
10090 cx.update(|cx| {
10091 let settings_store = SettingsStore::test(cx);
10092 cx.set_global(settings_store);
10093 release_channel::init(SemanticVersion::default(), cx);
10094 });
10095}
10096
10097fn json_lang() -> Arc<Language> {
10098 Arc::new(Language::new(
10099 LanguageConfig {
10100 name: "JSON".into(),
10101 matcher: LanguageMatcher {
10102 path_suffixes: vec!["json".to_string()],
10103 ..Default::default()
10104 },
10105 ..Default::default()
10106 },
10107 None,
10108 ))
10109}
10110
10111fn js_lang() -> Arc<Language> {
10112 Arc::new(Language::new(
10113 LanguageConfig {
10114 name: "JavaScript".into(),
10115 matcher: LanguageMatcher {
10116 path_suffixes: vec!["js".to_string()],
10117 ..Default::default()
10118 },
10119 ..Default::default()
10120 },
10121 None,
10122 ))
10123}
10124
10125fn rust_lang() -> Arc<Language> {
10126 Arc::new(Language::new(
10127 LanguageConfig {
10128 name: "Rust".into(),
10129 matcher: LanguageMatcher {
10130 path_suffixes: vec!["rs".to_string()],
10131 ..Default::default()
10132 },
10133 ..Default::default()
10134 },
10135 Some(tree_sitter_rust::LANGUAGE.into()),
10136 ))
10137}
10138
10139fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10140 struct PythonMootToolchainLister(Arc<FakeFs>);
10141 #[async_trait]
10142 impl ToolchainLister for PythonMootToolchainLister {
10143 async fn list(
10144 &self,
10145 worktree_root: PathBuf,
10146 subroot_relative_path: Arc<RelPath>,
10147 _: Option<HashMap<String, String>>,
10148 _: &dyn Fs,
10149 ) -> ToolchainList {
10150 // This lister will always return a path .venv directories within ancestors
10151 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10152 let mut toolchains = vec![];
10153 for ancestor in ancestors {
10154 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10155 if self.0.is_dir(&venv_path).await {
10156 toolchains.push(Toolchain {
10157 name: SharedString::new("Python Venv"),
10158 path: venv_path.to_string_lossy().into_owned().into(),
10159 language_name: LanguageName(SharedString::new_static("Python")),
10160 as_json: serde_json::Value::Null,
10161 })
10162 }
10163 }
10164 ToolchainList {
10165 toolchains,
10166 ..Default::default()
10167 }
10168 }
10169 async fn resolve(
10170 &self,
10171 _: PathBuf,
10172 _: Option<HashMap<String, String>>,
10173 _: &dyn Fs,
10174 ) -> anyhow::Result<Toolchain> {
10175 Err(anyhow::anyhow!("Not implemented"))
10176 }
10177 fn meta(&self) -> ToolchainMetadata {
10178 ToolchainMetadata {
10179 term: SharedString::new_static("Virtual Environment"),
10180 new_toolchain_placeholder: SharedString::new_static(
10181 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10182 ),
10183 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10184 }
10185 }
10186 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10187 vec![]
10188 }
10189 }
10190 Arc::new(
10191 Language::new(
10192 LanguageConfig {
10193 name: "Python".into(),
10194 matcher: LanguageMatcher {
10195 path_suffixes: vec!["py".to_string()],
10196 ..Default::default()
10197 },
10198 ..Default::default()
10199 },
10200 None, // We're not testing Python parsing with this language.
10201 )
10202 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10203 "pyproject.toml",
10204 ))))
10205 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10206 )
10207}
10208
10209fn typescript_lang() -> Arc<Language> {
10210 Arc::new(Language::new(
10211 LanguageConfig {
10212 name: "TypeScript".into(),
10213 matcher: LanguageMatcher {
10214 path_suffixes: vec!["ts".to_string()],
10215 ..Default::default()
10216 },
10217 ..Default::default()
10218 },
10219 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10220 ))
10221}
10222
10223fn tsx_lang() -> Arc<Language> {
10224 Arc::new(Language::new(
10225 LanguageConfig {
10226 name: "tsx".into(),
10227 matcher: LanguageMatcher {
10228 path_suffixes: vec!["tsx".to_string()],
10229 ..Default::default()
10230 },
10231 ..Default::default()
10232 },
10233 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10234 ))
10235}
10236
10237fn get_all_tasks(
10238 project: &Entity<Project>,
10239 task_contexts: Arc<TaskContexts>,
10240 cx: &mut App,
10241) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10242 let new_tasks = project.update(cx, |project, cx| {
10243 project.task_store.update(cx, |task_store, cx| {
10244 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10245 this.used_and_current_resolved_tasks(task_contexts, cx)
10246 })
10247 })
10248 });
10249
10250 cx.background_spawn(async move {
10251 let (mut old, new) = new_tasks.await;
10252 old.extend(new);
10253 old
10254 })
10255}
10256
10257#[track_caller]
10258fn assert_entry_git_state(
10259 tree: &Worktree,
10260 repository: &Repository,
10261 path: &str,
10262 index_status: Option<StatusCode>,
10263 is_ignored: bool,
10264) {
10265 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10266 let entry = tree
10267 .entry_for_path(&rel_path(path))
10268 .unwrap_or_else(|| panic!("entry {path} not found"));
10269 let status = repository
10270 .status_for_path(&repo_path(path))
10271 .map(|entry| entry.status);
10272 let expected = index_status.map(|index_status| {
10273 TrackedStatus {
10274 index_status,
10275 worktree_status: StatusCode::Unmodified,
10276 }
10277 .into()
10278 });
10279 assert_eq!(
10280 status, expected,
10281 "expected {path} to have git status: {expected:?}"
10282 );
10283 assert_eq!(
10284 entry.is_ignored, is_ignored,
10285 "expected {path} to have is_ignored: {is_ignored}"
10286 );
10287}
10288
10289#[track_caller]
10290fn git_init(path: &Path) -> git2::Repository {
10291 let mut init_opts = RepositoryInitOptions::new();
10292 init_opts.initial_head("main");
10293 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10294}
10295
10296#[track_caller]
10297fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10298 let path = path.as_ref();
10299 let mut index = repo.index().expect("Failed to get index");
10300 index.add_path(path).expect("Failed to add file");
10301 index.write().expect("Failed to write index");
10302}
10303
10304#[track_caller]
10305fn git_remove_index(path: &Path, repo: &git2::Repository) {
10306 let mut index = repo.index().expect("Failed to get index");
10307 index.remove_path(path).expect("Failed to add file");
10308 index.write().expect("Failed to write index");
10309}
10310
10311#[track_caller]
10312fn git_commit(msg: &'static str, repo: &git2::Repository) {
10313 use git2::Signature;
10314
10315 let signature = Signature::now("test", "test@zed.dev").unwrap();
10316 let oid = repo.index().unwrap().write_tree().unwrap();
10317 let tree = repo.find_tree(oid).unwrap();
10318 if let Ok(head) = repo.head() {
10319 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10320
10321 let parent_commit = parent_obj.as_commit().unwrap();
10322
10323 repo.commit(
10324 Some("HEAD"),
10325 &signature,
10326 &signature,
10327 msg,
10328 &tree,
10329 &[parent_commit],
10330 )
10331 .expect("Failed to commit with parent");
10332 } else {
10333 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10334 .expect("Failed to commit");
10335 }
10336}
10337
10338#[cfg(any())]
10339#[track_caller]
10340fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10341 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10342}
10343
10344#[track_caller]
10345fn git_stash(repo: &mut git2::Repository) {
10346 use git2::Signature;
10347
10348 let signature = Signature::now("test", "test@zed.dev").unwrap();
10349 repo.stash_save(&signature, "N/A", None)
10350 .expect("Failed to stash");
10351}
10352
10353#[track_caller]
10354fn git_reset(offset: usize, repo: &git2::Repository) {
10355 let head = repo.head().expect("Couldn't get repo head");
10356 let object = head.peel(git2::ObjectType::Commit).unwrap();
10357 let commit = object.as_commit().unwrap();
10358 let new_head = commit
10359 .parents()
10360 .inspect(|parnet| {
10361 parnet.message();
10362 })
10363 .nth(offset)
10364 .expect("Not enough history");
10365 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10366 .expect("Could not reset");
10367}
10368
10369#[cfg(any())]
10370#[track_caller]
10371fn git_branch(name: &str, repo: &git2::Repository) {
10372 let head = repo
10373 .head()
10374 .expect("Couldn't get repo head")
10375 .peel_to_commit()
10376 .expect("HEAD is not a commit");
10377 repo.branch(name, &head, false).expect("Failed to commit");
10378}
10379
10380#[cfg(any())]
10381#[track_caller]
10382fn git_checkout(name: &str, repo: &git2::Repository) {
10383 repo.set_head(name).expect("Failed to set head");
10384 repo.checkout_head(None).expect("Failed to check out head");
10385}
10386
10387#[cfg(any())]
10388#[track_caller]
10389fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10390 repo.statuses(None)
10391 .unwrap()
10392 .iter()
10393 .map(|status| (status.path().unwrap().to_string(), status.status()))
10394 .collect()
10395}
10396
10397#[gpui::test]
10398async fn test_find_project_path_abs(
10399 background_executor: BackgroundExecutor,
10400 cx: &mut gpui::TestAppContext,
10401) {
10402 // find_project_path should work with absolute paths
10403 init_test(cx);
10404
10405 let fs = FakeFs::new(background_executor);
10406 fs.insert_tree(
10407 path!("/root"),
10408 json!({
10409 "project1": {
10410 "file1.txt": "content1",
10411 "subdir": {
10412 "file2.txt": "content2"
10413 }
10414 },
10415 "project2": {
10416 "file3.txt": "content3"
10417 }
10418 }),
10419 )
10420 .await;
10421
10422 let project = Project::test(
10423 fs.clone(),
10424 [
10425 path!("/root/project1").as_ref(),
10426 path!("/root/project2").as_ref(),
10427 ],
10428 cx,
10429 )
10430 .await;
10431
10432 // Make sure the worktrees are fully initialized
10433 project
10434 .update(cx, |project, cx| project.git_scans_complete(cx))
10435 .await;
10436 cx.run_until_parked();
10437
10438 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10439 project.read_with(cx, |project, cx| {
10440 let worktrees: Vec<_> = project.worktrees(cx).collect();
10441 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10442 let id1 = worktrees[0].read(cx).id();
10443 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10444 let id2 = worktrees[1].read(cx).id();
10445 (abs_path1, id1, abs_path2, id2)
10446 });
10447
10448 project.update(cx, |project, cx| {
10449 let abs_path = project1_abs_path.join("file1.txt");
10450 let found_path = project.find_project_path(abs_path, cx).unwrap();
10451 assert_eq!(found_path.worktree_id, project1_id);
10452 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10453
10454 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10455 let found_path = project.find_project_path(abs_path, cx).unwrap();
10456 assert_eq!(found_path.worktree_id, project1_id);
10457 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10458
10459 let abs_path = project2_abs_path.join("file3.txt");
10460 let found_path = project.find_project_path(abs_path, cx).unwrap();
10461 assert_eq!(found_path.worktree_id, project2_id);
10462 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10463
10464 let abs_path = project1_abs_path.join("nonexistent.txt");
10465 let found_path = project.find_project_path(abs_path, cx);
10466 assert!(
10467 found_path.is_some(),
10468 "Should find project path for nonexistent file in worktree"
10469 );
10470
10471 // Test with an absolute path outside any worktree
10472 let abs_path = Path::new("/some/other/path");
10473 let found_path = project.find_project_path(abs_path, cx);
10474 assert!(
10475 found_path.is_none(),
10476 "Should not find project path for path outside any worktree"
10477 );
10478 });
10479}