1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 // file exists, so this is treated as a relative path
1225 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1226 }
1227 },
1228 "lsp_on_path": {
1229 "binary": {
1230 // file doesn't exist, so it will fall back on PATH env var
1231 "path": path!("lsp_on_path.exe").to_string(),
1232 }
1233 }
1234 },
1235 });
1236
1237 let fs = FakeFs::new(cx.executor());
1238 fs.insert_tree(
1239 path!("/the-root"),
1240 json!({
1241 ".zed": {
1242 "settings.json": settings_json_contents.to_string(),
1243 },
1244 ".relative_path": {
1245 "to": {
1246 "my_fake_lsp.exe": "",
1247 },
1248 },
1249 "src": {
1250 "main.rs": "",
1251 }
1252 }),
1253 )
1254 .await;
1255
1256 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1258 language_registry.add(rust_lang());
1259
1260 let mut my_fake_lsp = language_registry.register_fake_lsp(
1261 "Rust",
1262 FakeLspAdapter {
1263 name: "my_fake_lsp",
1264 ..Default::default()
1265 },
1266 );
1267 let mut lsp_on_path = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "lsp_on_path",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1286 assert_eq!(
1287 lsp_path.to_string_lossy(),
1288 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1289 );
1290
1291 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1292 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1293}
1294
1295#[gpui::test]
1296async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let settings_json_contents = json!({
1300 "languages": {
1301 "Rust": {
1302 "language_servers": ["tilde_lsp"]
1303 }
1304 },
1305 "lsp": {
1306 "tilde_lsp": {
1307 "binary": {
1308 "path": "~/.local/bin/rust-analyzer",
1309 }
1310 }
1311 },
1312 });
1313
1314 let fs = FakeFs::new(cx.executor());
1315 fs.insert_tree(
1316 path!("/root"),
1317 json!({
1318 ".zed": {
1319 "settings.json": settings_json_contents.to_string(),
1320 },
1321 "src": {
1322 "main.rs": "fn main() {}",
1323 }
1324 }),
1325 )
1326 .await;
1327
1328 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1329 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1330 language_registry.add(rust_lang());
1331
1332 let mut tilde_lsp = language_registry.register_fake_lsp(
1333 "Rust",
1334 FakeLspAdapter {
1335 name: "tilde_lsp",
1336 ..Default::default()
1337 },
1338 );
1339 cx.run_until_parked();
1340
1341 project
1342 .update(cx, |project, cx| {
1343 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1344 })
1345 .await
1346 .unwrap();
1347
1348 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1349 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1350 assert_eq!(
1351 lsp_path, expected_path,
1352 "Tilde path should expand to home directory"
1353 );
1354}
1355
1356#[gpui::test]
1357async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let fs = FakeFs::new(cx.executor());
1361 fs.insert_tree(
1362 path!("/the-root"),
1363 json!({
1364 ".gitignore": "target\n",
1365 "Cargo.lock": "",
1366 "src": {
1367 "a.rs": "",
1368 "b.rs": "",
1369 },
1370 "target": {
1371 "x": {
1372 "out": {
1373 "x.rs": ""
1374 }
1375 },
1376 "y": {
1377 "out": {
1378 "y.rs": "",
1379 }
1380 },
1381 "z": {
1382 "out": {
1383 "z.rs": ""
1384 }
1385 }
1386 }
1387 }),
1388 )
1389 .await;
1390 fs.insert_tree(
1391 path!("/the-registry"),
1392 json!({
1393 "dep1": {
1394 "src": {
1395 "dep1.rs": "",
1396 }
1397 },
1398 "dep2": {
1399 "src": {
1400 "dep2.rs": "",
1401 }
1402 },
1403 }),
1404 )
1405 .await;
1406 fs.insert_tree(
1407 path!("/the/stdlib"),
1408 json!({
1409 "LICENSE": "",
1410 "src": {
1411 "string.rs": "",
1412 }
1413 }),
1414 )
1415 .await;
1416
1417 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1418 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1419 (project.languages().clone(), project.lsp_store())
1420 });
1421 language_registry.add(rust_lang());
1422 let mut fake_servers = language_registry.register_fake_lsp(
1423 "Rust",
1424 FakeLspAdapter {
1425 name: "the-language-server",
1426 ..Default::default()
1427 },
1428 );
1429
1430 cx.executor().run_until_parked();
1431
1432 // Start the language server by opening a buffer with a compatible file extension.
1433 project
1434 .update(cx, |project, cx| {
1435 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1436 })
1437 .await
1438 .unwrap();
1439
1440 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1441 project.update(cx, |project, cx| {
1442 let worktree = project.worktrees(cx).next().unwrap();
1443 assert_eq!(
1444 worktree
1445 .read(cx)
1446 .snapshot()
1447 .entries(true, 0)
1448 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1449 .collect::<Vec<_>>(),
1450 &[
1451 ("", false),
1452 (".gitignore", false),
1453 ("Cargo.lock", false),
1454 ("src", false),
1455 ("src/a.rs", false),
1456 ("src/b.rs", false),
1457 ("target", true),
1458 ]
1459 );
1460 });
1461
1462 let prev_read_dir_count = fs.read_dir_call_count();
1463
1464 let fake_server = fake_servers.next().await.unwrap();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 fake_server.start_progress(progress_token).await;
2081
2082 // Restart the server before the diagnostics finish updating.
2083 project.update(cx, |project, cx| {
2084 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2085 });
2086 let mut events = cx.events(&project);
2087
2088 // Simulate the newly started server sending more diagnostics.
2089 let fake_server = fake_servers.next().await.unwrap();
2090 assert_eq!(
2091 events.next().await.unwrap(),
2092 Event::LanguageServerRemoved(LanguageServerId(0))
2093 );
2094 assert_eq!(
2095 events.next().await.unwrap(),
2096 Event::LanguageServerAdded(
2097 LanguageServerId(1),
2098 fake_server.server.name(),
2099 Some(worktree_id)
2100 )
2101 );
2102 fake_server.start_progress(progress_token).await;
2103 assert_eq!(
2104 events.next().await.unwrap(),
2105 Event::LanguageServerBufferRegistered {
2106 server_id: LanguageServerId(1),
2107 buffer_id,
2108 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2109 name: Some(fake_server.server.name())
2110 }
2111 );
2112 assert_eq!(
2113 events.next().await.unwrap(),
2114 Event::DiskBasedDiagnosticsStarted {
2115 language_server_id: LanguageServerId(1)
2116 }
2117 );
2118 project.update(cx, |project, cx| {
2119 assert_eq!(
2120 project
2121 .language_servers_running_disk_based_diagnostics(cx)
2122 .collect::<Vec<_>>(),
2123 [LanguageServerId(1)]
2124 );
2125 });
2126
2127 // All diagnostics are considered done, despite the old server's diagnostic
2128 // task never completing.
2129 fake_server.end_progress(progress_token);
2130 assert_eq!(
2131 events.next().await.unwrap(),
2132 Event::DiskBasedDiagnosticsFinished {
2133 language_server_id: LanguageServerId(1)
2134 }
2135 );
2136 project.update(cx, |project, cx| {
2137 assert_eq!(
2138 project
2139 .language_servers_running_disk_based_diagnostics(cx)
2140 .collect::<Vec<_>>(),
2141 [] as [language::LanguageServerId; 0]
2142 );
2143 });
2144}
2145
2146#[gpui::test]
2147async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2148 init_test(cx);
2149
2150 let fs = FakeFs::new(cx.executor());
2151 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2152
2153 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2154
2155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2156 language_registry.add(rust_lang());
2157 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2158
2159 let (buffer, _) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Publish diagnostics
2167 let fake_server = fake_servers.next().await.unwrap();
2168 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2169 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2170 version: None,
2171 diagnostics: vec![lsp::Diagnostic {
2172 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2173 severity: Some(lsp::DiagnosticSeverity::ERROR),
2174 message: "the message".to_string(),
2175 ..Default::default()
2176 }],
2177 });
2178
2179 cx.executor().run_until_parked();
2180 buffer.update(cx, |buffer, _| {
2181 assert_eq!(
2182 buffer
2183 .snapshot()
2184 .diagnostics_in_range::<_, usize>(0..1, false)
2185 .map(|entry| entry.diagnostic.message.clone())
2186 .collect::<Vec<_>>(),
2187 ["the message".to_string()]
2188 );
2189 });
2190 project.update(cx, |project, cx| {
2191 assert_eq!(
2192 project.diagnostic_summary(false, cx),
2193 DiagnosticSummary {
2194 error_count: 1,
2195 warning_count: 0,
2196 }
2197 );
2198 });
2199
2200 project.update(cx, |project, cx| {
2201 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2202 });
2203
2204 // The diagnostics are cleared.
2205 cx.executor().run_until_parked();
2206 buffer.update(cx, |buffer, _| {
2207 assert_eq!(
2208 buffer
2209 .snapshot()
2210 .diagnostics_in_range::<_, usize>(0..1, false)
2211 .map(|entry| entry.diagnostic.message.clone())
2212 .collect::<Vec<_>>(),
2213 Vec::<String>::new(),
2214 );
2215 });
2216 project.update(cx, |project, cx| {
2217 assert_eq!(
2218 project.diagnostic_summary(false, cx),
2219 DiagnosticSummary {
2220 error_count: 0,
2221 warning_count: 0,
2222 }
2223 );
2224 });
2225}
2226
2227#[gpui::test]
2228async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2229 init_test(cx);
2230
2231 let fs = FakeFs::new(cx.executor());
2232 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2233
2234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2235 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2236
2237 language_registry.add(rust_lang());
2238 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2239
2240 let (buffer, _handle) = project
2241 .update(cx, |project, cx| {
2242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2243 })
2244 .await
2245 .unwrap();
2246
2247 // Before restarting the server, report diagnostics with an unknown buffer version.
2248 let fake_server = fake_servers.next().await.unwrap();
2249 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2250 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2251 version: Some(10000),
2252 diagnostics: Vec::new(),
2253 });
2254 cx.executor().run_until_parked();
2255 project.update(cx, |project, cx| {
2256 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2257 });
2258
2259 let mut fake_server = fake_servers.next().await.unwrap();
2260 let notification = fake_server
2261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2262 .await
2263 .text_document;
2264 assert_eq!(notification.version, 0);
2265}
2266
2267#[gpui::test]
2268async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2269 init_test(cx);
2270
2271 let progress_token = "the-progress-token";
2272
2273 let fs = FakeFs::new(cx.executor());
2274 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2275
2276 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2277
2278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2279 language_registry.add(rust_lang());
2280 let mut fake_servers = language_registry.register_fake_lsp(
2281 "Rust",
2282 FakeLspAdapter {
2283 name: "the-language-server",
2284 disk_based_diagnostics_sources: vec!["disk".into()],
2285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2286 ..Default::default()
2287 },
2288 );
2289
2290 let (buffer, _handle) = project
2291 .update(cx, |project, cx| {
2292 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2293 })
2294 .await
2295 .unwrap();
2296
2297 // Simulate diagnostics starting to update.
2298 let mut fake_server = fake_servers.next().await.unwrap();
2299 fake_server
2300 .start_progress_with(
2301 "another-token",
2302 lsp::WorkDoneProgressBegin {
2303 cancellable: Some(false),
2304 ..Default::default()
2305 },
2306 )
2307 .await;
2308 fake_server
2309 .start_progress_with(
2310 progress_token,
2311 lsp::WorkDoneProgressBegin {
2312 cancellable: Some(true),
2313 ..Default::default()
2314 },
2315 )
2316 .await;
2317 cx.executor().run_until_parked();
2318
2319 project.update(cx, |project, cx| {
2320 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2321 });
2322
2323 let cancel_notification = fake_server
2324 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2325 .await;
2326 assert_eq!(
2327 cancel_notification.token,
2328 NumberOrString::String(progress_token.into())
2329 );
2330}
2331
2332#[gpui::test]
2333async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let fs = FakeFs::new(cx.executor());
2337 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2338 .await;
2339
2340 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2342
2343 let mut fake_rust_servers = language_registry.register_fake_lsp(
2344 "Rust",
2345 FakeLspAdapter {
2346 name: "rust-lsp",
2347 ..Default::default()
2348 },
2349 );
2350 let mut fake_js_servers = language_registry.register_fake_lsp(
2351 "JavaScript",
2352 FakeLspAdapter {
2353 name: "js-lsp",
2354 ..Default::default()
2355 },
2356 );
2357 language_registry.add(rust_lang());
2358 language_registry.add(js_lang());
2359
2360 let _rs_buffer = project
2361 .update(cx, |project, cx| {
2362 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2363 })
2364 .await
2365 .unwrap();
2366 let _js_buffer = project
2367 .update(cx, |project, cx| {
2368 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2369 })
2370 .await
2371 .unwrap();
2372
2373 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2374 assert_eq!(
2375 fake_rust_server_1
2376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2377 .await
2378 .text_document
2379 .uri
2380 .as_str(),
2381 uri!("file:///dir/a.rs")
2382 );
2383
2384 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2385 assert_eq!(
2386 fake_js_server
2387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2388 .await
2389 .text_document
2390 .uri
2391 .as_str(),
2392 uri!("file:///dir/b.js")
2393 );
2394
2395 // Disable Rust language server, ensuring only that server gets stopped.
2396 cx.update(|cx| {
2397 SettingsStore::update_global(cx, |settings, cx| {
2398 settings.update_user_settings(cx, |settings| {
2399 settings.languages_mut().insert(
2400 "Rust".into(),
2401 LanguageSettingsContent {
2402 enable_language_server: Some(false),
2403 ..Default::default()
2404 },
2405 );
2406 });
2407 })
2408 });
2409 fake_rust_server_1
2410 .receive_notification::<lsp::notification::Exit>()
2411 .await;
2412
2413 // Enable Rust and disable JavaScript language servers, ensuring that the
2414 // former gets started again and that the latter stops.
2415 cx.update(|cx| {
2416 SettingsStore::update_global(cx, |settings, cx| {
2417 settings.update_user_settings(cx, |settings| {
2418 settings.languages_mut().insert(
2419 "Rust".into(),
2420 LanguageSettingsContent {
2421 enable_language_server: Some(true),
2422 ..Default::default()
2423 },
2424 );
2425 settings.languages_mut().insert(
2426 "JavaScript".into(),
2427 LanguageSettingsContent {
2428 enable_language_server: Some(false),
2429 ..Default::default()
2430 },
2431 );
2432 });
2433 })
2434 });
2435 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2436 assert_eq!(
2437 fake_rust_server_2
2438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2439 .await
2440 .text_document
2441 .uri
2442 .as_str(),
2443 uri!("file:///dir/a.rs")
2444 );
2445 fake_js_server
2446 .receive_notification::<lsp::notification::Exit>()
2447 .await;
2448}
2449
2450#[gpui::test(iterations = 3)]
2451async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2452 init_test(cx);
2453
2454 let text = "
2455 fn a() { A }
2456 fn b() { BB }
2457 fn c() { CCC }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2463
2464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2466
2467 language_registry.add(rust_lang());
2468 let mut fake_servers = language_registry.register_fake_lsp(
2469 "Rust",
2470 FakeLspAdapter {
2471 disk_based_diagnostics_sources: vec!["disk".into()],
2472 ..Default::default()
2473 },
2474 );
2475
2476 let buffer = project
2477 .update(cx, |project, cx| {
2478 project.open_local_buffer(path!("/dir/a.rs"), cx)
2479 })
2480 .await
2481 .unwrap();
2482
2483 let _handle = project.update(cx, |project, cx| {
2484 project.register_buffer_with_language_servers(&buffer, cx)
2485 });
2486
2487 let mut fake_server = fake_servers.next().await.unwrap();
2488 let open_notification = fake_server
2489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2490 .await;
2491
2492 // Edit the buffer, moving the content down
2493 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2494 let change_notification_1 = fake_server
2495 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2496 .await;
2497 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2498
2499 // Report some diagnostics for the initial version of the buffer
2500 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2501 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: Some(open_notification.text_document.version),
2503 diagnostics: vec![
2504 lsp::Diagnostic {
2505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2506 severity: Some(DiagnosticSeverity::ERROR),
2507 message: "undefined variable 'A'".to_string(),
2508 source: Some("disk".to_string()),
2509 ..Default::default()
2510 },
2511 lsp::Diagnostic {
2512 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2513 severity: Some(DiagnosticSeverity::ERROR),
2514 message: "undefined variable 'BB'".to_string(),
2515 source: Some("disk".to_string()),
2516 ..Default::default()
2517 },
2518 lsp::Diagnostic {
2519 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2520 severity: Some(DiagnosticSeverity::ERROR),
2521 source: Some("disk".to_string()),
2522 message: "undefined variable 'CCC'".to_string(),
2523 ..Default::default()
2524 },
2525 ],
2526 });
2527
2528 // The diagnostics have moved down since they were created.
2529 cx.executor().run_until_parked();
2530 buffer.update(cx, |buffer, _| {
2531 assert_eq!(
2532 buffer
2533 .snapshot()
2534 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2535 .collect::<Vec<_>>(),
2536 &[
2537 DiagnosticEntry {
2538 range: Point::new(3, 9)..Point::new(3, 11),
2539 diagnostic: Diagnostic {
2540 source: Some("disk".into()),
2541 severity: DiagnosticSeverity::ERROR,
2542 message: "undefined variable 'BB'".to_string(),
2543 is_disk_based: true,
2544 group_id: 1,
2545 is_primary: true,
2546 source_kind: DiagnosticSourceKind::Pushed,
2547 ..Diagnostic::default()
2548 },
2549 },
2550 DiagnosticEntry {
2551 range: Point::new(4, 9)..Point::new(4, 12),
2552 diagnostic: Diagnostic {
2553 source: Some("disk".into()),
2554 severity: DiagnosticSeverity::ERROR,
2555 message: "undefined variable 'CCC'".to_string(),
2556 is_disk_based: true,
2557 group_id: 2,
2558 is_primary: true,
2559 source_kind: DiagnosticSourceKind::Pushed,
2560 ..Diagnostic::default()
2561 }
2562 }
2563 ]
2564 );
2565 assert_eq!(
2566 chunks_with_diagnostics(buffer, 0..buffer.len()),
2567 [
2568 ("\n\nfn a() { ".to_string(), None),
2569 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2570 (" }\nfn b() { ".to_string(), None),
2571 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }\nfn c() { ".to_string(), None),
2573 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2574 (" }\n".to_string(), None),
2575 ]
2576 );
2577 assert_eq!(
2578 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2579 [
2580 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2581 (" }\nfn c() { ".to_string(), None),
2582 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2583 ]
2584 );
2585 });
2586
2587 // Ensure overlapping diagnostics are highlighted correctly.
2588 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2589 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2590 version: Some(open_notification.text_document.version),
2591 diagnostics: vec![
2592 lsp::Diagnostic {
2593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2594 severity: Some(DiagnosticSeverity::ERROR),
2595 message: "undefined variable 'A'".to_string(),
2596 source: Some("disk".to_string()),
2597 ..Default::default()
2598 },
2599 lsp::Diagnostic {
2600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2601 severity: Some(DiagnosticSeverity::WARNING),
2602 message: "unreachable statement".to_string(),
2603 source: Some("disk".to_string()),
2604 ..Default::default()
2605 },
2606 ],
2607 });
2608
2609 cx.executor().run_until_parked();
2610 buffer.update(cx, |buffer, _| {
2611 assert_eq!(
2612 buffer
2613 .snapshot()
2614 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2615 .collect::<Vec<_>>(),
2616 &[
2617 DiagnosticEntry {
2618 range: Point::new(2, 9)..Point::new(2, 12),
2619 diagnostic: Diagnostic {
2620 source: Some("disk".into()),
2621 severity: DiagnosticSeverity::WARNING,
2622 message: "unreachable statement".to_string(),
2623 is_disk_based: true,
2624 group_id: 4,
2625 is_primary: true,
2626 source_kind: DiagnosticSourceKind::Pushed,
2627 ..Diagnostic::default()
2628 }
2629 },
2630 DiagnosticEntry {
2631 range: Point::new(2, 9)..Point::new(2, 10),
2632 diagnostic: Diagnostic {
2633 source: Some("disk".into()),
2634 severity: DiagnosticSeverity::ERROR,
2635 message: "undefined variable 'A'".to_string(),
2636 is_disk_based: true,
2637 group_id: 3,
2638 is_primary: true,
2639 source_kind: DiagnosticSourceKind::Pushed,
2640 ..Diagnostic::default()
2641 },
2642 }
2643 ]
2644 );
2645 assert_eq!(
2646 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2647 [
2648 ("fn a() { ".to_string(), None),
2649 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2650 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2651 ("\n".to_string(), None),
2652 ]
2653 );
2654 assert_eq!(
2655 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2656 [
2657 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2658 ("\n".to_string(), None),
2659 ]
2660 );
2661 });
2662
2663 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2664 // changes since the last save.
2665 buffer.update(cx, |buffer, cx| {
2666 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2667 buffer.edit(
2668 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2669 None,
2670 cx,
2671 );
2672 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2673 });
2674 let change_notification_2 = fake_server
2675 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2676 .await;
2677 assert!(
2678 change_notification_2.text_document.version > change_notification_1.text_document.version
2679 );
2680
2681 // Handle out-of-order diagnostics
2682 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2683 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2684 version: Some(change_notification_2.text_document.version),
2685 diagnostics: vec![
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2688 severity: Some(DiagnosticSeverity::ERROR),
2689 message: "undefined variable 'BB'".to_string(),
2690 source: Some("disk".to_string()),
2691 ..Default::default()
2692 },
2693 lsp::Diagnostic {
2694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2695 severity: Some(DiagnosticSeverity::WARNING),
2696 message: "undefined variable 'A'".to_string(),
2697 source: Some("disk".to_string()),
2698 ..Default::default()
2699 },
2700 ],
2701 });
2702
2703 cx.executor().run_until_parked();
2704 buffer.update(cx, |buffer, _| {
2705 assert_eq!(
2706 buffer
2707 .snapshot()
2708 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2709 .collect::<Vec<_>>(),
2710 &[
2711 DiagnosticEntry {
2712 range: Point::new(2, 21)..Point::new(2, 22),
2713 diagnostic: Diagnostic {
2714 source: Some("disk".into()),
2715 severity: DiagnosticSeverity::WARNING,
2716 message: "undefined variable 'A'".to_string(),
2717 is_disk_based: true,
2718 group_id: 6,
2719 is_primary: true,
2720 source_kind: DiagnosticSourceKind::Pushed,
2721 ..Diagnostic::default()
2722 }
2723 },
2724 DiagnosticEntry {
2725 range: Point::new(3, 9)..Point::new(3, 14),
2726 diagnostic: Diagnostic {
2727 source: Some("disk".into()),
2728 severity: DiagnosticSeverity::ERROR,
2729 message: "undefined variable 'BB'".to_string(),
2730 is_disk_based: true,
2731 group_id: 5,
2732 is_primary: true,
2733 source_kind: DiagnosticSourceKind::Pushed,
2734 ..Diagnostic::default()
2735 },
2736 }
2737 ]
2738 );
2739 });
2740}
2741
2742#[gpui::test]
2743async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2744 init_test(cx);
2745
2746 let text = concat!(
2747 "let one = ;\n", //
2748 "let two = \n",
2749 "let three = 3;\n",
2750 );
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2754
2755 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2756 let buffer = project
2757 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2758 .await
2759 .unwrap();
2760
2761 project.update(cx, |project, cx| {
2762 project.lsp_store.update(cx, |lsp_store, cx| {
2763 lsp_store
2764 .update_diagnostic_entries(
2765 LanguageServerId(0),
2766 PathBuf::from("/dir/a.rs"),
2767 None,
2768 None,
2769 vec![
2770 DiagnosticEntry {
2771 range: Unclipped(PointUtf16::new(0, 10))
2772 ..Unclipped(PointUtf16::new(0, 10)),
2773 diagnostic: Diagnostic {
2774 severity: DiagnosticSeverity::ERROR,
2775 message: "syntax error 1".to_string(),
2776 source_kind: DiagnosticSourceKind::Pushed,
2777 ..Diagnostic::default()
2778 },
2779 },
2780 DiagnosticEntry {
2781 range: Unclipped(PointUtf16::new(1, 10))
2782 ..Unclipped(PointUtf16::new(1, 10)),
2783 diagnostic: Diagnostic {
2784 severity: DiagnosticSeverity::ERROR,
2785 message: "syntax error 2".to_string(),
2786 source_kind: DiagnosticSourceKind::Pushed,
2787 ..Diagnostic::default()
2788 },
2789 },
2790 ],
2791 cx,
2792 )
2793 .unwrap();
2794 })
2795 });
2796
2797 // An empty range is extended forward to include the following character.
2798 // At the end of a line, an empty range is extended backward to include
2799 // the preceding character.
2800 buffer.update(cx, |buffer, _| {
2801 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2802 assert_eq!(
2803 chunks
2804 .iter()
2805 .map(|(s, d)| (s.as_str(), *d))
2806 .collect::<Vec<_>>(),
2807 &[
2808 ("let one = ", None),
2809 (";", Some(DiagnosticSeverity::ERROR)),
2810 ("\nlet two =", None),
2811 (" ", Some(DiagnosticSeverity::ERROR)),
2812 ("\nlet three = 3;\n", None)
2813 ]
2814 );
2815 });
2816}
2817
2818#[gpui::test]
2819async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2820 init_test(cx);
2821
2822 let fs = FakeFs::new(cx.executor());
2823 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2824 .await;
2825
2826 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2827 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2828
2829 lsp_store.update(cx, |lsp_store, cx| {
2830 lsp_store
2831 .update_diagnostic_entries(
2832 LanguageServerId(0),
2833 Path::new("/dir/a.rs").to_owned(),
2834 None,
2835 None,
2836 vec![DiagnosticEntry {
2837 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2838 diagnostic: Diagnostic {
2839 severity: DiagnosticSeverity::ERROR,
2840 is_primary: true,
2841 message: "syntax error a1".to_string(),
2842 source_kind: DiagnosticSourceKind::Pushed,
2843 ..Diagnostic::default()
2844 },
2845 }],
2846 cx,
2847 )
2848 .unwrap();
2849 lsp_store
2850 .update_diagnostic_entries(
2851 LanguageServerId(1),
2852 Path::new("/dir/a.rs").to_owned(),
2853 None,
2854 None,
2855 vec![DiagnosticEntry {
2856 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2857 diagnostic: Diagnostic {
2858 severity: DiagnosticSeverity::ERROR,
2859 is_primary: true,
2860 message: "syntax error b1".to_string(),
2861 source_kind: DiagnosticSourceKind::Pushed,
2862 ..Diagnostic::default()
2863 },
2864 }],
2865 cx,
2866 )
2867 .unwrap();
2868
2869 assert_eq!(
2870 lsp_store.diagnostic_summary(false, cx),
2871 DiagnosticSummary {
2872 error_count: 2,
2873 warning_count: 0,
2874 }
2875 );
2876 });
2877}
2878
2879#[gpui::test]
2880async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2881 init_test(cx);
2882
2883 let text = "
2884 fn a() {
2885 f1();
2886 }
2887 fn b() {
2888 f2();
2889 }
2890 fn c() {
2891 f3();
2892 }
2893 "
2894 .unindent();
2895
2896 let fs = FakeFs::new(cx.executor());
2897 fs.insert_tree(
2898 path!("/dir"),
2899 json!({
2900 "a.rs": text.clone(),
2901 }),
2902 )
2903 .await;
2904
2905 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2906 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2907
2908 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2909 language_registry.add(rust_lang());
2910 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2911
2912 let (buffer, _handle) = project
2913 .update(cx, |project, cx| {
2914 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2915 })
2916 .await
2917 .unwrap();
2918
2919 let mut fake_server = fake_servers.next().await.unwrap();
2920 let lsp_document_version = fake_server
2921 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2922 .await
2923 .text_document
2924 .version;
2925
2926 // Simulate editing the buffer after the language server computes some edits.
2927 buffer.update(cx, |buffer, cx| {
2928 buffer.edit(
2929 [(
2930 Point::new(0, 0)..Point::new(0, 0),
2931 "// above first function\n",
2932 )],
2933 None,
2934 cx,
2935 );
2936 buffer.edit(
2937 [(
2938 Point::new(2, 0)..Point::new(2, 0),
2939 " // inside first function\n",
2940 )],
2941 None,
2942 cx,
2943 );
2944 buffer.edit(
2945 [(
2946 Point::new(6, 4)..Point::new(6, 4),
2947 "// inside second function ",
2948 )],
2949 None,
2950 cx,
2951 );
2952
2953 assert_eq!(
2954 buffer.text(),
2955 "
2956 // above first function
2957 fn a() {
2958 // inside first function
2959 f1();
2960 }
2961 fn b() {
2962 // inside second function f2();
2963 }
2964 fn c() {
2965 f3();
2966 }
2967 "
2968 .unindent()
2969 );
2970 });
2971
2972 let edits = lsp_store
2973 .update(cx, |lsp_store, cx| {
2974 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2975 &buffer,
2976 vec![
2977 // replace body of first function
2978 lsp::TextEdit {
2979 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2980 new_text: "
2981 fn a() {
2982 f10();
2983 }
2984 "
2985 .unindent(),
2986 },
2987 // edit inside second function
2988 lsp::TextEdit {
2989 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2990 new_text: "00".into(),
2991 },
2992 // edit inside third function via two distinct edits
2993 lsp::TextEdit {
2994 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2995 new_text: "4000".into(),
2996 },
2997 lsp::TextEdit {
2998 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2999 new_text: "".into(),
3000 },
3001 ],
3002 LanguageServerId(0),
3003 Some(lsp_document_version),
3004 cx,
3005 )
3006 })
3007 .await
3008 .unwrap();
3009
3010 buffer.update(cx, |buffer, cx| {
3011 for (range, new_text) in edits {
3012 buffer.edit([(range, new_text)], None, cx);
3013 }
3014 assert_eq!(
3015 buffer.text(),
3016 "
3017 // above first function
3018 fn a() {
3019 // inside first function
3020 f10();
3021 }
3022 fn b() {
3023 // inside second function f200();
3024 }
3025 fn c() {
3026 f4000();
3027 }
3028 "
3029 .unindent()
3030 );
3031 });
3032}
3033
3034#[gpui::test]
3035async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3036 init_test(cx);
3037
3038 let text = "
3039 use a::b;
3040 use a::c;
3041
3042 fn f() {
3043 b();
3044 c();
3045 }
3046 "
3047 .unindent();
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 path!("/dir"),
3052 json!({
3053 "a.rs": text.clone(),
3054 }),
3055 )
3056 .await;
3057
3058 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3059 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3060 let buffer = project
3061 .update(cx, |project, cx| {
3062 project.open_local_buffer(path!("/dir/a.rs"), cx)
3063 })
3064 .await
3065 .unwrap();
3066
3067 // Simulate the language server sending us a small edit in the form of a very large diff.
3068 // Rust-analyzer does this when performing a merge-imports code action.
3069 let edits = lsp_store
3070 .update(cx, |lsp_store, cx| {
3071 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3072 &buffer,
3073 [
3074 // Replace the first use statement without editing the semicolon.
3075 lsp::TextEdit {
3076 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3077 new_text: "a::{b, c}".into(),
3078 },
3079 // Reinsert the remainder of the file between the semicolon and the final
3080 // newline of the file.
3081 lsp::TextEdit {
3082 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3083 new_text: "\n\n".into(),
3084 },
3085 lsp::TextEdit {
3086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3087 new_text: "
3088 fn f() {
3089 b();
3090 c();
3091 }"
3092 .unindent(),
3093 },
3094 // Delete everything after the first newline of the file.
3095 lsp::TextEdit {
3096 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3097 new_text: "".into(),
3098 },
3099 ],
3100 LanguageServerId(0),
3101 None,
3102 cx,
3103 )
3104 })
3105 .await
3106 .unwrap();
3107
3108 buffer.update(cx, |buffer, cx| {
3109 let edits = edits
3110 .into_iter()
3111 .map(|(range, text)| {
3112 (
3113 range.start.to_point(buffer)..range.end.to_point(buffer),
3114 text,
3115 )
3116 })
3117 .collect::<Vec<_>>();
3118
3119 assert_eq!(
3120 edits,
3121 [
3122 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3123 (Point::new(1, 0)..Point::new(2, 0), "".into())
3124 ]
3125 );
3126
3127 for (range, new_text) in edits {
3128 buffer.edit([(range, new_text)], None, cx);
3129 }
3130 assert_eq!(
3131 buffer.text(),
3132 "
3133 use a::{b, c};
3134
3135 fn f() {
3136 b();
3137 c();
3138 }
3139 "
3140 .unindent()
3141 );
3142 });
3143}
3144
3145#[gpui::test]
3146async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3147 cx: &mut gpui::TestAppContext,
3148) {
3149 init_test(cx);
3150
3151 let text = "Path()";
3152
3153 let fs = FakeFs::new(cx.executor());
3154 fs.insert_tree(
3155 path!("/dir"),
3156 json!({
3157 "a.rs": text
3158 }),
3159 )
3160 .await;
3161
3162 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3163 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3164 let buffer = project
3165 .update(cx, |project, cx| {
3166 project.open_local_buffer(path!("/dir/a.rs"), cx)
3167 })
3168 .await
3169 .unwrap();
3170
3171 // Simulate the language server sending us a pair of edits at the same location,
3172 // with an insertion following a replacement (which violates the LSP spec).
3173 let edits = lsp_store
3174 .update(cx, |lsp_store, cx| {
3175 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3176 &buffer,
3177 [
3178 lsp::TextEdit {
3179 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3180 new_text: "Path".into(),
3181 },
3182 lsp::TextEdit {
3183 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3184 new_text: "from path import Path\n\n\n".into(),
3185 },
3186 ],
3187 LanguageServerId(0),
3188 None,
3189 cx,
3190 )
3191 })
3192 .await
3193 .unwrap();
3194
3195 buffer.update(cx, |buffer, cx| {
3196 buffer.edit(edits, None, cx);
3197 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3198 });
3199}
3200
3201#[gpui::test]
3202async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3203 init_test(cx);
3204
3205 let text = "
3206 use a::b;
3207 use a::c;
3208
3209 fn f() {
3210 b();
3211 c();
3212 }
3213 "
3214 .unindent();
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(
3218 path!("/dir"),
3219 json!({
3220 "a.rs": text.clone(),
3221 }),
3222 )
3223 .await;
3224
3225 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3226 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3227 let buffer = project
3228 .update(cx, |project, cx| {
3229 project.open_local_buffer(path!("/dir/a.rs"), cx)
3230 })
3231 .await
3232 .unwrap();
3233
3234 // Simulate the language server sending us edits in a non-ordered fashion,
3235 // with ranges sometimes being inverted or pointing to invalid locations.
3236 let edits = lsp_store
3237 .update(cx, |lsp_store, cx| {
3238 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3239 &buffer,
3240 [
3241 lsp::TextEdit {
3242 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3243 new_text: "\n\n".into(),
3244 },
3245 lsp::TextEdit {
3246 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3247 new_text: "a::{b, c}".into(),
3248 },
3249 lsp::TextEdit {
3250 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3251 new_text: "".into(),
3252 },
3253 lsp::TextEdit {
3254 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3255 new_text: "
3256 fn f() {
3257 b();
3258 c();
3259 }"
3260 .unindent(),
3261 },
3262 ],
3263 LanguageServerId(0),
3264 None,
3265 cx,
3266 )
3267 })
3268 .await
3269 .unwrap();
3270
3271 buffer.update(cx, |buffer, cx| {
3272 let edits = edits
3273 .into_iter()
3274 .map(|(range, text)| {
3275 (
3276 range.start.to_point(buffer)..range.end.to_point(buffer),
3277 text,
3278 )
3279 })
3280 .collect::<Vec<_>>();
3281
3282 assert_eq!(
3283 edits,
3284 [
3285 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3286 (Point::new(1, 0)..Point::new(2, 0), "".into())
3287 ]
3288 );
3289
3290 for (range, new_text) in edits {
3291 buffer.edit([(range, new_text)], None, cx);
3292 }
3293 assert_eq!(
3294 buffer.text(),
3295 "
3296 use a::{b, c};
3297
3298 fn f() {
3299 b();
3300 c();
3301 }
3302 "
3303 .unindent()
3304 );
3305 });
3306}
3307
3308fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3309 buffer: &Buffer,
3310 range: Range<T>,
3311) -> Vec<(String, Option<DiagnosticSeverity>)> {
3312 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3313 for chunk in buffer.snapshot().chunks(range, true) {
3314 if chunks
3315 .last()
3316 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3317 {
3318 chunks.last_mut().unwrap().0.push_str(chunk.text);
3319 } else {
3320 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3321 }
3322 }
3323 chunks
3324}
3325
3326#[gpui::test(iterations = 10)]
3327async fn test_definition(cx: &mut gpui::TestAppContext) {
3328 init_test(cx);
3329
3330 let fs = FakeFs::new(cx.executor());
3331 fs.insert_tree(
3332 path!("/dir"),
3333 json!({
3334 "a.rs": "const fn a() { A }",
3335 "b.rs": "const y: i32 = crate::a()",
3336 }),
3337 )
3338 .await;
3339
3340 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3341
3342 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3343 language_registry.add(rust_lang());
3344 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3345
3346 let (buffer, _handle) = project
3347 .update(cx, |project, cx| {
3348 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3349 })
3350 .await
3351 .unwrap();
3352
3353 let fake_server = fake_servers.next().await.unwrap();
3354 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3355 let params = params.text_document_position_params;
3356 assert_eq!(
3357 params.text_document.uri.to_file_path().unwrap(),
3358 Path::new(path!("/dir/b.rs")),
3359 );
3360 assert_eq!(params.position, lsp::Position::new(0, 22));
3361
3362 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3363 lsp::Location::new(
3364 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3365 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3366 ),
3367 )))
3368 });
3369 let mut definitions = project
3370 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3371 .await
3372 .unwrap()
3373 .unwrap();
3374
3375 // Assert no new language server started
3376 cx.executor().run_until_parked();
3377 assert!(fake_servers.try_next().is_err());
3378
3379 assert_eq!(definitions.len(), 1);
3380 let definition = definitions.pop().unwrap();
3381 cx.update(|cx| {
3382 let target_buffer = definition.target.buffer.read(cx);
3383 assert_eq!(
3384 target_buffer
3385 .file()
3386 .unwrap()
3387 .as_local()
3388 .unwrap()
3389 .abs_path(cx),
3390 Path::new(path!("/dir/a.rs")),
3391 );
3392 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3393 assert_eq!(
3394 list_worktrees(&project, cx),
3395 [
3396 (path!("/dir/a.rs").as_ref(), false),
3397 (path!("/dir/b.rs").as_ref(), true)
3398 ],
3399 );
3400
3401 drop(definition);
3402 });
3403 cx.update(|cx| {
3404 assert_eq!(
3405 list_worktrees(&project, cx),
3406 [(path!("/dir/b.rs").as_ref(), true)]
3407 );
3408 });
3409
3410 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3411 project
3412 .read(cx)
3413 .worktrees(cx)
3414 .map(|worktree| {
3415 let worktree = worktree.read(cx);
3416 (
3417 worktree.as_local().unwrap().abs_path().as_ref(),
3418 worktree.is_visible(),
3419 )
3420 })
3421 .collect::<Vec<_>>()
3422 }
3423}
3424
3425#[gpui::test]
3426async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3427 init_test(cx);
3428
3429 let fs = FakeFs::new(cx.executor());
3430 fs.insert_tree(
3431 path!("/dir"),
3432 json!({
3433 "a.ts": "",
3434 }),
3435 )
3436 .await;
3437
3438 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3439
3440 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3441 language_registry.add(typescript_lang());
3442 let mut fake_language_servers = language_registry.register_fake_lsp(
3443 "TypeScript",
3444 FakeLspAdapter {
3445 capabilities: lsp::ServerCapabilities {
3446 completion_provider: Some(lsp::CompletionOptions {
3447 trigger_characters: Some(vec![".".to_string()]),
3448 ..Default::default()
3449 }),
3450 ..Default::default()
3451 },
3452 ..Default::default()
3453 },
3454 );
3455
3456 let (buffer, _handle) = project
3457 .update(cx, |p, cx| {
3458 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3459 })
3460 .await
3461 .unwrap();
3462
3463 let fake_server = fake_language_servers.next().await.unwrap();
3464
3465 // When text_edit exists, it takes precedence over insert_text and label
3466 let text = "let a = obj.fqn";
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::Array(vec![
3475 lsp::CompletionItem {
3476 label: "labelText".into(),
3477 insert_text: Some("insertText".into()),
3478 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3479 range: lsp::Range::new(
3480 lsp::Position::new(0, text.len() as u32 - 3),
3481 lsp::Position::new(0, text.len() as u32),
3482 ),
3483 new_text: "textEditText".into(),
3484 })),
3485 ..Default::default()
3486 },
3487 ])))
3488 })
3489 .next()
3490 .await;
3491
3492 let completions = completions
3493 .await
3494 .unwrap()
3495 .into_iter()
3496 .flat_map(|response| response.completions)
3497 .collect::<Vec<_>>();
3498 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3499
3500 assert_eq!(completions.len(), 1);
3501 assert_eq!(completions[0].new_text, "textEditText");
3502 assert_eq!(
3503 completions[0].replace_range.to_offset(&snapshot),
3504 text.len() - 3..text.len()
3505 );
3506}
3507
3508#[gpui::test]
3509async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3510 init_test(cx);
3511
3512 let fs = FakeFs::new(cx.executor());
3513 fs.insert_tree(
3514 path!("/dir"),
3515 json!({
3516 "a.ts": "",
3517 }),
3518 )
3519 .await;
3520
3521 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3522
3523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3524 language_registry.add(typescript_lang());
3525 let mut fake_language_servers = language_registry.register_fake_lsp(
3526 "TypeScript",
3527 FakeLspAdapter {
3528 capabilities: lsp::ServerCapabilities {
3529 completion_provider: Some(lsp::CompletionOptions {
3530 trigger_characters: Some(vec![".".to_string()]),
3531 ..Default::default()
3532 }),
3533 ..Default::default()
3534 },
3535 ..Default::default()
3536 },
3537 );
3538
3539 let (buffer, _handle) = project
3540 .update(cx, |p, cx| {
3541 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3542 })
3543 .await
3544 .unwrap();
3545
3546 let fake_server = fake_language_servers.next().await.unwrap();
3547 let text = "let a = obj.fqn";
3548
3549 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3550 {
3551 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3552 let completions = project.update(cx, |project, cx| {
3553 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3554 });
3555
3556 fake_server
3557 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3558 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3559 is_incomplete: false,
3560 item_defaults: Some(lsp::CompletionListItemDefaults {
3561 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3562 lsp::Range::new(
3563 lsp::Position::new(0, text.len() as u32 - 3),
3564 lsp::Position::new(0, text.len() as u32),
3565 ),
3566 )),
3567 ..Default::default()
3568 }),
3569 items: vec![lsp::CompletionItem {
3570 label: "labelText".into(),
3571 text_edit_text: Some("textEditText".into()),
3572 text_edit: None,
3573 ..Default::default()
3574 }],
3575 })))
3576 })
3577 .next()
3578 .await;
3579
3580 let completions = completions
3581 .await
3582 .unwrap()
3583 .into_iter()
3584 .flat_map(|response| response.completions)
3585 .collect::<Vec<_>>();
3586 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3587
3588 assert_eq!(completions.len(), 1);
3589 assert_eq!(completions[0].new_text, "textEditText");
3590 assert_eq!(
3591 completions[0].replace_range.to_offset(&snapshot),
3592 text.len() - 3..text.len()
3593 );
3594 }
3595
3596 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3597 {
3598 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3599 let completions = project.update(cx, |project, cx| {
3600 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3601 });
3602
3603 fake_server
3604 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3605 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3606 is_incomplete: false,
3607 item_defaults: Some(lsp::CompletionListItemDefaults {
3608 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3609 lsp::Range::new(
3610 lsp::Position::new(0, text.len() as u32 - 3),
3611 lsp::Position::new(0, text.len() as u32),
3612 ),
3613 )),
3614 ..Default::default()
3615 }),
3616 items: vec![lsp::CompletionItem {
3617 label: "labelText".into(),
3618 text_edit_text: None,
3619 insert_text: Some("irrelevant".into()),
3620 text_edit: None,
3621 ..Default::default()
3622 }],
3623 })))
3624 })
3625 .next()
3626 .await;
3627
3628 let completions = completions
3629 .await
3630 .unwrap()
3631 .into_iter()
3632 .flat_map(|response| response.completions)
3633 .collect::<Vec<_>>();
3634 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3635
3636 assert_eq!(completions.len(), 1);
3637 assert_eq!(completions[0].new_text, "labelText");
3638 assert_eq!(
3639 completions[0].replace_range.to_offset(&snapshot),
3640 text.len() - 3..text.len()
3641 );
3642 }
3643}
3644
3645#[gpui::test]
3646async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3647 init_test(cx);
3648
3649 let fs = FakeFs::new(cx.executor());
3650 fs.insert_tree(
3651 path!("/dir"),
3652 json!({
3653 "a.ts": "",
3654 }),
3655 )
3656 .await;
3657
3658 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3659
3660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3661 language_registry.add(typescript_lang());
3662 let mut fake_language_servers = language_registry.register_fake_lsp(
3663 "TypeScript",
3664 FakeLspAdapter {
3665 capabilities: lsp::ServerCapabilities {
3666 completion_provider: Some(lsp::CompletionOptions {
3667 trigger_characters: Some(vec![":".to_string()]),
3668 ..Default::default()
3669 }),
3670 ..Default::default()
3671 },
3672 ..Default::default()
3673 },
3674 );
3675
3676 let (buffer, _handle) = project
3677 .update(cx, |p, cx| {
3678 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3679 })
3680 .await
3681 .unwrap();
3682
3683 let fake_server = fake_language_servers.next().await.unwrap();
3684
3685 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3686 let text = "let a = b.fqn";
3687 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3688 let completions = project.update(cx, |project, cx| {
3689 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3690 });
3691
3692 fake_server
3693 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3694 Ok(Some(lsp::CompletionResponse::Array(vec![
3695 lsp::CompletionItem {
3696 label: "fullyQualifiedName?".into(),
3697 insert_text: Some("fullyQualifiedName".into()),
3698 ..Default::default()
3699 },
3700 ])))
3701 })
3702 .next()
3703 .await;
3704 let completions = completions
3705 .await
3706 .unwrap()
3707 .into_iter()
3708 .flat_map(|response| response.completions)
3709 .collect::<Vec<_>>();
3710 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3711 assert_eq!(completions.len(), 1);
3712 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3713 assert_eq!(
3714 completions[0].replace_range.to_offset(&snapshot),
3715 text.len() - 3..text.len()
3716 );
3717
3718 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3719 let text = "let a = \"atoms/cmp\"";
3720 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3721 let completions = project.update(cx, |project, cx| {
3722 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3723 });
3724
3725 fake_server
3726 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3727 Ok(Some(lsp::CompletionResponse::Array(vec![
3728 lsp::CompletionItem {
3729 label: "component".into(),
3730 ..Default::default()
3731 },
3732 ])))
3733 })
3734 .next()
3735 .await;
3736 let completions = completions
3737 .await
3738 .unwrap()
3739 .into_iter()
3740 .flat_map(|response| response.completions)
3741 .collect::<Vec<_>>();
3742 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3743 assert_eq!(completions.len(), 1);
3744 assert_eq!(completions[0].new_text, "component");
3745 assert_eq!(
3746 completions[0].replace_range.to_offset(&snapshot),
3747 text.len() - 4..text.len() - 1
3748 );
3749}
3750
3751#[gpui::test]
3752async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let fs = FakeFs::new(cx.executor());
3756 fs.insert_tree(
3757 path!("/dir"),
3758 json!({
3759 "a.ts": "",
3760 }),
3761 )
3762 .await;
3763
3764 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3765
3766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3767 language_registry.add(typescript_lang());
3768 let mut fake_language_servers = language_registry.register_fake_lsp(
3769 "TypeScript",
3770 FakeLspAdapter {
3771 capabilities: lsp::ServerCapabilities {
3772 completion_provider: Some(lsp::CompletionOptions {
3773 trigger_characters: Some(vec![":".to_string()]),
3774 ..Default::default()
3775 }),
3776 ..Default::default()
3777 },
3778 ..Default::default()
3779 },
3780 );
3781
3782 let (buffer, _handle) = project
3783 .update(cx, |p, cx| {
3784 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3785 })
3786 .await
3787 .unwrap();
3788
3789 let fake_server = fake_language_servers.next().await.unwrap();
3790
3791 let text = "let a = b.fqn";
3792 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3793 let completions = project.update(cx, |project, cx| {
3794 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3795 });
3796
3797 fake_server
3798 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3799 Ok(Some(lsp::CompletionResponse::Array(vec![
3800 lsp::CompletionItem {
3801 label: "fullyQualifiedName?".into(),
3802 insert_text: Some("fully\rQualified\r\nName".into()),
3803 ..Default::default()
3804 },
3805 ])))
3806 })
3807 .next()
3808 .await;
3809 let completions = completions
3810 .await
3811 .unwrap()
3812 .into_iter()
3813 .flat_map(|response| response.completions)
3814 .collect::<Vec<_>>();
3815 assert_eq!(completions.len(), 1);
3816 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3817}
3818
3819#[gpui::test(iterations = 10)]
3820async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3821 init_test(cx);
3822
3823 let fs = FakeFs::new(cx.executor());
3824 fs.insert_tree(
3825 path!("/dir"),
3826 json!({
3827 "a.ts": "a",
3828 }),
3829 )
3830 .await;
3831
3832 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3833
3834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3835 language_registry.add(typescript_lang());
3836 let mut fake_language_servers = language_registry.register_fake_lsp(
3837 "TypeScript",
3838 FakeLspAdapter {
3839 capabilities: lsp::ServerCapabilities {
3840 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3841 lsp::CodeActionOptions {
3842 resolve_provider: Some(true),
3843 ..lsp::CodeActionOptions::default()
3844 },
3845 )),
3846 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3847 commands: vec!["_the/command".to_string()],
3848 ..lsp::ExecuteCommandOptions::default()
3849 }),
3850 ..lsp::ServerCapabilities::default()
3851 },
3852 ..FakeLspAdapter::default()
3853 },
3854 );
3855
3856 let (buffer, _handle) = project
3857 .update(cx, |p, cx| {
3858 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3859 })
3860 .await
3861 .unwrap();
3862
3863 let fake_server = fake_language_servers.next().await.unwrap();
3864
3865 // Language server returns code actions that contain commands, and not edits.
3866 let actions = project.update(cx, |project, cx| {
3867 project.code_actions(&buffer, 0..0, None, cx)
3868 });
3869 fake_server
3870 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3871 Ok(Some(vec![
3872 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3873 title: "The code action".into(),
3874 data: Some(serde_json::json!({
3875 "command": "_the/command",
3876 })),
3877 ..lsp::CodeAction::default()
3878 }),
3879 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3880 title: "two".into(),
3881 ..lsp::CodeAction::default()
3882 }),
3883 ]))
3884 })
3885 .next()
3886 .await;
3887
3888 let action = actions.await.unwrap().unwrap()[0].clone();
3889 let apply = project.update(cx, |project, cx| {
3890 project.apply_code_action(buffer.clone(), action, true, cx)
3891 });
3892
3893 // Resolving the code action does not populate its edits. In absence of
3894 // edits, we must execute the given command.
3895 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3896 |mut action, _| async move {
3897 if action.data.is_some() {
3898 action.command = Some(lsp::Command {
3899 title: "The command".into(),
3900 command: "_the/command".into(),
3901 arguments: Some(vec![json!("the-argument")]),
3902 });
3903 }
3904 Ok(action)
3905 },
3906 );
3907
3908 // While executing the command, the language server sends the editor
3909 // a `workspaceEdit` request.
3910 fake_server
3911 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3912 let fake = fake_server.clone();
3913 move |params, _| {
3914 assert_eq!(params.command, "_the/command");
3915 let fake = fake.clone();
3916 async move {
3917 fake.server
3918 .request::<lsp::request::ApplyWorkspaceEdit>(
3919 lsp::ApplyWorkspaceEditParams {
3920 label: None,
3921 edit: lsp::WorkspaceEdit {
3922 changes: Some(
3923 [(
3924 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3925 vec![lsp::TextEdit {
3926 range: lsp::Range::new(
3927 lsp::Position::new(0, 0),
3928 lsp::Position::new(0, 0),
3929 ),
3930 new_text: "X".into(),
3931 }],
3932 )]
3933 .into_iter()
3934 .collect(),
3935 ),
3936 ..Default::default()
3937 },
3938 },
3939 )
3940 .await
3941 .into_response()
3942 .unwrap();
3943 Ok(Some(json!(null)))
3944 }
3945 }
3946 })
3947 .next()
3948 .await;
3949
3950 // Applying the code action returns a project transaction containing the edits
3951 // sent by the language server in its `workspaceEdit` request.
3952 let transaction = apply.await.unwrap();
3953 assert!(transaction.0.contains_key(&buffer));
3954 buffer.update(cx, |buffer, cx| {
3955 assert_eq!(buffer.text(), "Xa");
3956 buffer.undo(cx);
3957 assert_eq!(buffer.text(), "a");
3958 });
3959}
3960
3961#[gpui::test]
3962async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3963 init_test(cx);
3964 let fs = FakeFs::new(cx.background_executor.clone());
3965 let expected_contents = "content";
3966 fs.as_fake()
3967 .insert_tree(
3968 "/root",
3969 json!({
3970 "test.txt": expected_contents
3971 }),
3972 )
3973 .await;
3974
3975 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3976
3977 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3978 let worktree = project.worktrees(cx).next().unwrap();
3979 let entry_id = worktree
3980 .read(cx)
3981 .entry_for_path(rel_path("test.txt"))
3982 .unwrap()
3983 .id;
3984 (worktree, entry_id)
3985 });
3986 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3987 let _result = project
3988 .update(cx, |project, cx| {
3989 project.rename_entry(
3990 entry_id,
3991 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3992 cx,
3993 )
3994 })
3995 .await
3996 .unwrap();
3997 worktree.read_with(cx, |worktree, _| {
3998 assert!(
3999 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4000 "Old file should have been removed"
4001 );
4002 assert!(
4003 worktree
4004 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4005 .is_some(),
4006 "Whole directory hierarchy and the new file should have been created"
4007 );
4008 });
4009 assert_eq!(
4010 worktree
4011 .update(cx, |worktree, cx| {
4012 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4013 })
4014 .await
4015 .unwrap()
4016 .text,
4017 expected_contents,
4018 "Moved file's contents should be preserved"
4019 );
4020
4021 let entry_id = worktree.read_with(cx, |worktree, _| {
4022 worktree
4023 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4024 .unwrap()
4025 .id
4026 });
4027
4028 let _result = project
4029 .update(cx, |project, cx| {
4030 project.rename_entry(
4031 entry_id,
4032 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4033 cx,
4034 )
4035 })
4036 .await
4037 .unwrap();
4038 worktree.read_with(cx, |worktree, _| {
4039 assert!(
4040 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4041 "First file should not reappear"
4042 );
4043 assert!(
4044 worktree
4045 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4046 .is_none(),
4047 "Old file should have been removed"
4048 );
4049 assert!(
4050 worktree
4051 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4052 .is_some(),
4053 "No error should have occurred after moving into existing directory"
4054 );
4055 });
4056 assert_eq!(
4057 worktree
4058 .update(cx, |worktree, cx| {
4059 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4060 })
4061 .await
4062 .unwrap()
4063 .text,
4064 expected_contents,
4065 "Moved file's contents should be preserved"
4066 );
4067}
4068
4069#[gpui::test(iterations = 10)]
4070async fn test_save_file(cx: &mut gpui::TestAppContext) {
4071 init_test(cx);
4072
4073 let fs = FakeFs::new(cx.executor());
4074 fs.insert_tree(
4075 path!("/dir"),
4076 json!({
4077 "file1": "the old contents",
4078 }),
4079 )
4080 .await;
4081
4082 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4083 let buffer = project
4084 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4085 .await
4086 .unwrap();
4087 buffer.update(cx, |buffer, cx| {
4088 assert_eq!(buffer.text(), "the old contents");
4089 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4090 });
4091
4092 project
4093 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4094 .await
4095 .unwrap();
4096
4097 let new_text = fs
4098 .load(Path::new(path!("/dir/file1")))
4099 .await
4100 .unwrap()
4101 .replace("\r\n", "\n");
4102 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4103}
4104
4105#[gpui::test(iterations = 10)]
4106async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4107 // Issue: #24349
4108 init_test(cx);
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(path!("/dir"), json!({})).await;
4112
4113 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4114 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4115
4116 language_registry.add(rust_lang());
4117 let mut fake_rust_servers = language_registry.register_fake_lsp(
4118 "Rust",
4119 FakeLspAdapter {
4120 name: "the-rust-language-server",
4121 capabilities: lsp::ServerCapabilities {
4122 completion_provider: Some(lsp::CompletionOptions {
4123 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4124 ..Default::default()
4125 }),
4126 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4127 lsp::TextDocumentSyncOptions {
4128 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4129 ..Default::default()
4130 },
4131 )),
4132 ..Default::default()
4133 },
4134 ..Default::default()
4135 },
4136 );
4137
4138 let buffer = project
4139 .update(cx, |this, cx| this.create_buffer(false, cx))
4140 .unwrap()
4141 .await;
4142 project.update(cx, |this, cx| {
4143 this.register_buffer_with_language_servers(&buffer, cx);
4144 buffer.update(cx, |buffer, cx| {
4145 assert!(!this.has_language_servers_for(buffer, cx));
4146 })
4147 });
4148
4149 project
4150 .update(cx, |this, cx| {
4151 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4152 this.save_buffer_as(
4153 buffer.clone(),
4154 ProjectPath {
4155 worktree_id,
4156 path: rel_path("file.rs").into(),
4157 },
4158 cx,
4159 )
4160 })
4161 .await
4162 .unwrap();
4163 // A server is started up, and it is notified about Rust files.
4164 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4165 assert_eq!(
4166 fake_rust_server
4167 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4168 .await
4169 .text_document,
4170 lsp::TextDocumentItem {
4171 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4172 version: 0,
4173 text: "".to_string(),
4174 language_id: "rust".to_string(),
4175 }
4176 );
4177
4178 project.update(cx, |this, cx| {
4179 buffer.update(cx, |buffer, cx| {
4180 assert!(this.has_language_servers_for(buffer, cx));
4181 })
4182 });
4183}
4184
4185#[gpui::test(iterations = 30)]
4186async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4187 init_test(cx);
4188
4189 let fs = FakeFs::new(cx.executor());
4190 fs.insert_tree(
4191 path!("/dir"),
4192 json!({
4193 "file1": "the original contents",
4194 }),
4195 )
4196 .await;
4197
4198 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4199 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4200 let buffer = project
4201 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4202 .await
4203 .unwrap();
4204
4205 // Simulate buffer diffs being slow, so that they don't complete before
4206 // the next file change occurs.
4207 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4208
4209 // Change the buffer's file on disk, and then wait for the file change
4210 // to be detected by the worktree, so that the buffer starts reloading.
4211 fs.save(
4212 path!("/dir/file1").as_ref(),
4213 &"the first contents".into(),
4214 Default::default(),
4215 )
4216 .await
4217 .unwrap();
4218 worktree.next_event(cx).await;
4219
4220 // Change the buffer's file again. Depending on the random seed, the
4221 // previous file change may still be in progress.
4222 fs.save(
4223 path!("/dir/file1").as_ref(),
4224 &"the second contents".into(),
4225 Default::default(),
4226 )
4227 .await
4228 .unwrap();
4229 worktree.next_event(cx).await;
4230
4231 cx.executor().run_until_parked();
4232 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4233 buffer.read_with(cx, |buffer, _| {
4234 assert_eq!(buffer.text(), on_disk_text);
4235 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4236 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4237 });
4238}
4239
4240#[gpui::test(iterations = 30)]
4241async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4242 init_test(cx);
4243
4244 let fs = FakeFs::new(cx.executor());
4245 fs.insert_tree(
4246 path!("/dir"),
4247 json!({
4248 "file1": "the original contents",
4249 }),
4250 )
4251 .await;
4252
4253 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4254 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4255 let buffer = project
4256 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4257 .await
4258 .unwrap();
4259
4260 // Simulate buffer diffs being slow, so that they don't complete before
4261 // the next file change occurs.
4262 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4263
4264 // Change the buffer's file on disk, and then wait for the file change
4265 // to be detected by the worktree, so that the buffer starts reloading.
4266 fs.save(
4267 path!("/dir/file1").as_ref(),
4268 &"the first contents".into(),
4269 Default::default(),
4270 )
4271 .await
4272 .unwrap();
4273 worktree.next_event(cx).await;
4274
4275 cx.executor()
4276 .spawn(cx.executor().simulate_random_delay())
4277 .await;
4278
4279 // Perform a noop edit, causing the buffer's version to increase.
4280 buffer.update(cx, |buffer, cx| {
4281 buffer.edit([(0..0, " ")], None, cx);
4282 buffer.undo(cx);
4283 });
4284
4285 cx.executor().run_until_parked();
4286 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4287 buffer.read_with(cx, |buffer, _| {
4288 let buffer_text = buffer.text();
4289 if buffer_text == on_disk_text {
4290 assert!(
4291 !buffer.is_dirty() && !buffer.has_conflict(),
4292 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4293 );
4294 }
4295 // If the file change occurred while the buffer was processing the first
4296 // change, the buffer will be in a conflicting state.
4297 else {
4298 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4299 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4300 }
4301 });
4302}
4303
4304#[gpui::test]
4305async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4306 init_test(cx);
4307
4308 let fs = FakeFs::new(cx.executor());
4309 fs.insert_tree(
4310 path!("/dir"),
4311 json!({
4312 "file1": "the old contents",
4313 }),
4314 )
4315 .await;
4316
4317 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4318 let buffer = project
4319 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4320 .await
4321 .unwrap();
4322 buffer.update(cx, |buffer, cx| {
4323 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4324 });
4325
4326 project
4327 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4328 .await
4329 .unwrap();
4330
4331 let new_text = fs
4332 .load(Path::new(path!("/dir/file1")))
4333 .await
4334 .unwrap()
4335 .replace("\r\n", "\n");
4336 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4337}
4338
4339#[gpui::test]
4340async fn test_save_as(cx: &mut gpui::TestAppContext) {
4341 init_test(cx);
4342
4343 let fs = FakeFs::new(cx.executor());
4344 fs.insert_tree("/dir", json!({})).await;
4345
4346 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4347
4348 let languages = project.update(cx, |project, _| project.languages().clone());
4349 languages.add(rust_lang());
4350
4351 let buffer = project.update(cx, |project, cx| {
4352 project.create_local_buffer("", None, false, cx)
4353 });
4354 buffer.update(cx, |buffer, cx| {
4355 buffer.edit([(0..0, "abc")], None, cx);
4356 assert!(buffer.is_dirty());
4357 assert!(!buffer.has_conflict());
4358 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4359 });
4360 project
4361 .update(cx, |project, cx| {
4362 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4363 let path = ProjectPath {
4364 worktree_id,
4365 path: rel_path("file1.rs").into(),
4366 };
4367 project.save_buffer_as(buffer.clone(), path, cx)
4368 })
4369 .await
4370 .unwrap();
4371 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4372
4373 cx.executor().run_until_parked();
4374 buffer.update(cx, |buffer, cx| {
4375 assert_eq!(
4376 buffer.file().unwrap().full_path(cx),
4377 Path::new("dir/file1.rs")
4378 );
4379 assert!(!buffer.is_dirty());
4380 assert!(!buffer.has_conflict());
4381 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4382 });
4383
4384 let opened_buffer = project
4385 .update(cx, |project, cx| {
4386 project.open_local_buffer("/dir/file1.rs", cx)
4387 })
4388 .await
4389 .unwrap();
4390 assert_eq!(opened_buffer, buffer);
4391}
4392
4393#[gpui::test]
4394async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4395 init_test(cx);
4396
4397 let fs = FakeFs::new(cx.executor());
4398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4399
4400 fs.insert_tree(
4401 path!("/dir"),
4402 json!({
4403 "data_a.txt": "data about a"
4404 }),
4405 )
4406 .await;
4407
4408 let buffer = project
4409 .update(cx, |project, cx| {
4410 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4411 })
4412 .await
4413 .unwrap();
4414
4415 buffer.update(cx, |buffer, cx| {
4416 buffer.edit([(11..12, "b")], None, cx);
4417 });
4418
4419 // Save buffer's contents as a new file and confirm that the buffer's now
4420 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4421 // file associated with the buffer has now been updated to `data_b.txt`
4422 project
4423 .update(cx, |project, cx| {
4424 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4425 let new_path = ProjectPath {
4426 worktree_id,
4427 path: rel_path("data_b.txt").into(),
4428 };
4429
4430 project.save_buffer_as(buffer.clone(), new_path, cx)
4431 })
4432 .await
4433 .unwrap();
4434
4435 buffer.update(cx, |buffer, cx| {
4436 assert_eq!(
4437 buffer.file().unwrap().full_path(cx),
4438 Path::new("dir/data_b.txt")
4439 )
4440 });
4441
4442 // Open the original `data_a.txt` file, confirming that its contents are
4443 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4444 let original_buffer = project
4445 .update(cx, |project, cx| {
4446 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4447 })
4448 .await
4449 .unwrap();
4450
4451 original_buffer.update(cx, |buffer, cx| {
4452 assert_eq!(buffer.text(), "data about a");
4453 assert_eq!(
4454 buffer.file().unwrap().full_path(cx),
4455 Path::new("dir/data_a.txt")
4456 )
4457 });
4458}
4459
4460#[gpui::test(retries = 5)]
4461async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4462 use worktree::WorktreeModelHandle as _;
4463
4464 init_test(cx);
4465 cx.executor().allow_parking();
4466
4467 let dir = TempTree::new(json!({
4468 "a": {
4469 "file1": "",
4470 "file2": "",
4471 "file3": "",
4472 },
4473 "b": {
4474 "c": {
4475 "file4": "",
4476 "file5": "",
4477 }
4478 }
4479 }));
4480
4481 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4482
4483 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4484 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4485 async move { buffer.await.unwrap() }
4486 };
4487 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4488 project.update(cx, |project, cx| {
4489 let tree = project.worktrees(cx).next().unwrap();
4490 tree.read(cx)
4491 .entry_for_path(rel_path(path))
4492 .unwrap_or_else(|| panic!("no entry for path {}", path))
4493 .id
4494 })
4495 };
4496
4497 let buffer2 = buffer_for_path("a/file2", cx).await;
4498 let buffer3 = buffer_for_path("a/file3", cx).await;
4499 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4500 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4501
4502 let file2_id = id_for_path("a/file2", cx);
4503 let file3_id = id_for_path("a/file3", cx);
4504 let file4_id = id_for_path("b/c/file4", cx);
4505
4506 // Create a remote copy of this worktree.
4507 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4508 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4509
4510 let updates = Arc::new(Mutex::new(Vec::new()));
4511 tree.update(cx, |tree, cx| {
4512 let updates = updates.clone();
4513 tree.observe_updates(0, cx, move |update| {
4514 updates.lock().push(update);
4515 async { true }
4516 });
4517 });
4518
4519 let remote = cx.update(|cx| {
4520 Worktree::remote(
4521 0,
4522 ReplicaId::REMOTE_SERVER,
4523 metadata,
4524 project.read(cx).client().into(),
4525 project.read(cx).path_style(cx),
4526 cx,
4527 )
4528 });
4529
4530 cx.executor().run_until_parked();
4531
4532 cx.update(|cx| {
4533 assert!(!buffer2.read(cx).is_dirty());
4534 assert!(!buffer3.read(cx).is_dirty());
4535 assert!(!buffer4.read(cx).is_dirty());
4536 assert!(!buffer5.read(cx).is_dirty());
4537 });
4538
4539 // Rename and delete files and directories.
4540 tree.flush_fs_events(cx).await;
4541 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4542 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4543 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4544 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4545 tree.flush_fs_events(cx).await;
4546
4547 cx.update(|app| {
4548 assert_eq!(
4549 tree.read(app).paths().collect::<Vec<_>>(),
4550 vec![
4551 rel_path("a"),
4552 rel_path("a/file1"),
4553 rel_path("a/file2.new"),
4554 rel_path("b"),
4555 rel_path("d"),
4556 rel_path("d/file3"),
4557 rel_path("d/file4"),
4558 ]
4559 );
4560 });
4561
4562 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4563 assert_eq!(id_for_path("d/file3", cx), file3_id);
4564 assert_eq!(id_for_path("d/file4", cx), file4_id);
4565
4566 cx.update(|cx| {
4567 assert_eq!(
4568 buffer2.read(cx).file().unwrap().path().as_ref(),
4569 rel_path("a/file2.new")
4570 );
4571 assert_eq!(
4572 buffer3.read(cx).file().unwrap().path().as_ref(),
4573 rel_path("d/file3")
4574 );
4575 assert_eq!(
4576 buffer4.read(cx).file().unwrap().path().as_ref(),
4577 rel_path("d/file4")
4578 );
4579 assert_eq!(
4580 buffer5.read(cx).file().unwrap().path().as_ref(),
4581 rel_path("b/c/file5")
4582 );
4583
4584 assert_matches!(
4585 buffer2.read(cx).file().unwrap().disk_state(),
4586 DiskState::Present { .. }
4587 );
4588 assert_matches!(
4589 buffer3.read(cx).file().unwrap().disk_state(),
4590 DiskState::Present { .. }
4591 );
4592 assert_matches!(
4593 buffer4.read(cx).file().unwrap().disk_state(),
4594 DiskState::Present { .. }
4595 );
4596 assert_eq!(
4597 buffer5.read(cx).file().unwrap().disk_state(),
4598 DiskState::Deleted
4599 );
4600 });
4601
4602 // Update the remote worktree. Check that it becomes consistent with the
4603 // local worktree.
4604 cx.executor().run_until_parked();
4605
4606 remote.update(cx, |remote, _| {
4607 for update in updates.lock().drain(..) {
4608 remote.as_remote_mut().unwrap().update_from_remote(update);
4609 }
4610 });
4611 cx.executor().run_until_parked();
4612 remote.update(cx, |remote, _| {
4613 assert_eq!(
4614 remote.paths().collect::<Vec<_>>(),
4615 vec![
4616 rel_path("a"),
4617 rel_path("a/file1"),
4618 rel_path("a/file2.new"),
4619 rel_path("b"),
4620 rel_path("d"),
4621 rel_path("d/file3"),
4622 rel_path("d/file4"),
4623 ]
4624 );
4625 });
4626}
4627
4628#[gpui::test(iterations = 10)]
4629async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4630 init_test(cx);
4631
4632 let fs = FakeFs::new(cx.executor());
4633 fs.insert_tree(
4634 path!("/dir"),
4635 json!({
4636 "a": {
4637 "file1": "",
4638 }
4639 }),
4640 )
4641 .await;
4642
4643 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4644 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4645 let tree_id = tree.update(cx, |tree, _| tree.id());
4646
4647 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4648 project.update(cx, |project, cx| {
4649 let tree = project.worktrees(cx).next().unwrap();
4650 tree.read(cx)
4651 .entry_for_path(rel_path(path))
4652 .unwrap_or_else(|| panic!("no entry for path {}", path))
4653 .id
4654 })
4655 };
4656
4657 let dir_id = id_for_path("a", cx);
4658 let file_id = id_for_path("a/file1", cx);
4659 let buffer = project
4660 .update(cx, |p, cx| {
4661 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4662 })
4663 .await
4664 .unwrap();
4665 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4666
4667 project
4668 .update(cx, |project, cx| {
4669 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4670 })
4671 .unwrap()
4672 .await
4673 .into_included()
4674 .unwrap();
4675 cx.executor().run_until_parked();
4676
4677 assert_eq!(id_for_path("b", cx), dir_id);
4678 assert_eq!(id_for_path("b/file1", cx), file_id);
4679 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4680}
4681
4682#[gpui::test]
4683async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4684 init_test(cx);
4685
4686 let fs = FakeFs::new(cx.executor());
4687 fs.insert_tree(
4688 "/dir",
4689 json!({
4690 "a.txt": "a-contents",
4691 "b.txt": "b-contents",
4692 }),
4693 )
4694 .await;
4695
4696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4697
4698 // Spawn multiple tasks to open paths, repeating some paths.
4699 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4700 (
4701 p.open_local_buffer("/dir/a.txt", cx),
4702 p.open_local_buffer("/dir/b.txt", cx),
4703 p.open_local_buffer("/dir/a.txt", cx),
4704 )
4705 });
4706
4707 let buffer_a_1 = buffer_a_1.await.unwrap();
4708 let buffer_a_2 = buffer_a_2.await.unwrap();
4709 let buffer_b = buffer_b.await.unwrap();
4710 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4711 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4712
4713 // There is only one buffer per path.
4714 let buffer_a_id = buffer_a_1.entity_id();
4715 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4716
4717 // Open the same path again while it is still open.
4718 drop(buffer_a_1);
4719 let buffer_a_3 = project
4720 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4721 .await
4722 .unwrap();
4723
4724 // There's still only one buffer per path.
4725 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4726}
4727
4728#[gpui::test]
4729async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4730 init_test(cx);
4731
4732 let fs = FakeFs::new(cx.executor());
4733 fs.insert_tree(
4734 path!("/dir"),
4735 json!({
4736 "file1": "abc",
4737 "file2": "def",
4738 "file3": "ghi",
4739 }),
4740 )
4741 .await;
4742
4743 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4744
4745 let buffer1 = project
4746 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4747 .await
4748 .unwrap();
4749 let events = Arc::new(Mutex::new(Vec::new()));
4750
4751 // initially, the buffer isn't dirty.
4752 buffer1.update(cx, |buffer, cx| {
4753 cx.subscribe(&buffer1, {
4754 let events = events.clone();
4755 move |_, _, event, _| match event {
4756 BufferEvent::Operation { .. } => {}
4757 _ => events.lock().push(event.clone()),
4758 }
4759 })
4760 .detach();
4761
4762 assert!(!buffer.is_dirty());
4763 assert!(events.lock().is_empty());
4764
4765 buffer.edit([(1..2, "")], None, cx);
4766 });
4767
4768 // after the first edit, the buffer is dirty, and emits a dirtied event.
4769 buffer1.update(cx, |buffer, cx| {
4770 assert!(buffer.text() == "ac");
4771 assert!(buffer.is_dirty());
4772 assert_eq!(
4773 *events.lock(),
4774 &[
4775 language::BufferEvent::Edited,
4776 language::BufferEvent::DirtyChanged
4777 ]
4778 );
4779 events.lock().clear();
4780 buffer.did_save(
4781 buffer.version(),
4782 buffer.file().unwrap().disk_state().mtime(),
4783 cx,
4784 );
4785 });
4786
4787 // after saving, the buffer is not dirty, and emits a saved event.
4788 buffer1.update(cx, |buffer, cx| {
4789 assert!(!buffer.is_dirty());
4790 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4791 events.lock().clear();
4792
4793 buffer.edit([(1..1, "B")], None, cx);
4794 buffer.edit([(2..2, "D")], None, cx);
4795 });
4796
4797 // after editing again, the buffer is dirty, and emits another dirty event.
4798 buffer1.update(cx, |buffer, cx| {
4799 assert!(buffer.text() == "aBDc");
4800 assert!(buffer.is_dirty());
4801 assert_eq!(
4802 *events.lock(),
4803 &[
4804 language::BufferEvent::Edited,
4805 language::BufferEvent::DirtyChanged,
4806 language::BufferEvent::Edited,
4807 ],
4808 );
4809 events.lock().clear();
4810
4811 // After restoring the buffer to its previously-saved state,
4812 // the buffer is not considered dirty anymore.
4813 buffer.edit([(1..3, "")], None, cx);
4814 assert!(buffer.text() == "ac");
4815 assert!(!buffer.is_dirty());
4816 });
4817
4818 assert_eq!(
4819 *events.lock(),
4820 &[
4821 language::BufferEvent::Edited,
4822 language::BufferEvent::DirtyChanged
4823 ]
4824 );
4825
4826 // When a file is deleted, it is not considered dirty.
4827 let events = Arc::new(Mutex::new(Vec::new()));
4828 let buffer2 = project
4829 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4830 .await
4831 .unwrap();
4832 buffer2.update(cx, |_, cx| {
4833 cx.subscribe(&buffer2, {
4834 let events = events.clone();
4835 move |_, _, event, _| match event {
4836 BufferEvent::Operation { .. } => {}
4837 _ => events.lock().push(event.clone()),
4838 }
4839 })
4840 .detach();
4841 });
4842
4843 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4844 .await
4845 .unwrap();
4846 cx.executor().run_until_parked();
4847 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4848 assert_eq!(
4849 mem::take(&mut *events.lock()),
4850 &[language::BufferEvent::FileHandleChanged]
4851 );
4852
4853 // Buffer becomes dirty when edited.
4854 buffer2.update(cx, |buffer, cx| {
4855 buffer.edit([(2..3, "")], None, cx);
4856 assert_eq!(buffer.is_dirty(), true);
4857 });
4858 assert_eq!(
4859 mem::take(&mut *events.lock()),
4860 &[
4861 language::BufferEvent::Edited,
4862 language::BufferEvent::DirtyChanged
4863 ]
4864 );
4865
4866 // Buffer becomes clean again when all of its content is removed, because
4867 // the file was deleted.
4868 buffer2.update(cx, |buffer, cx| {
4869 buffer.edit([(0..2, "")], None, cx);
4870 assert_eq!(buffer.is_empty(), true);
4871 assert_eq!(buffer.is_dirty(), false);
4872 });
4873 assert_eq!(
4874 *events.lock(),
4875 &[
4876 language::BufferEvent::Edited,
4877 language::BufferEvent::DirtyChanged
4878 ]
4879 );
4880
4881 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4882 let events = Arc::new(Mutex::new(Vec::new()));
4883 let buffer3 = project
4884 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4885 .await
4886 .unwrap();
4887 buffer3.update(cx, |_, cx| {
4888 cx.subscribe(&buffer3, {
4889 let events = events.clone();
4890 move |_, _, event, _| match event {
4891 BufferEvent::Operation { .. } => {}
4892 _ => events.lock().push(event.clone()),
4893 }
4894 })
4895 .detach();
4896 });
4897
4898 buffer3.update(cx, |buffer, cx| {
4899 buffer.edit([(0..0, "x")], None, cx);
4900 });
4901 events.lock().clear();
4902 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4903 .await
4904 .unwrap();
4905 cx.executor().run_until_parked();
4906 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4907 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4908}
4909
4910#[gpui::test]
4911async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4912 init_test(cx);
4913
4914 let (initial_contents, initial_offsets) =
4915 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4916 let fs = FakeFs::new(cx.executor());
4917 fs.insert_tree(
4918 path!("/dir"),
4919 json!({
4920 "the-file": initial_contents,
4921 }),
4922 )
4923 .await;
4924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4925 let buffer = project
4926 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4927 .await
4928 .unwrap();
4929
4930 let anchors = initial_offsets
4931 .iter()
4932 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4933 .collect::<Vec<_>>();
4934
4935 // Change the file on disk, adding two new lines of text, and removing
4936 // one line.
4937 buffer.update(cx, |buffer, _| {
4938 assert!(!buffer.is_dirty());
4939 assert!(!buffer.has_conflict());
4940 });
4941
4942 let (new_contents, new_offsets) =
4943 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4944 fs.save(
4945 path!("/dir/the-file").as_ref(),
4946 &new_contents.as_str().into(),
4947 LineEnding::Unix,
4948 )
4949 .await
4950 .unwrap();
4951
4952 // Because the buffer was not modified, it is reloaded from disk. Its
4953 // contents are edited according to the diff between the old and new
4954 // file contents.
4955 cx.executor().run_until_parked();
4956 buffer.update(cx, |buffer, _| {
4957 assert_eq!(buffer.text(), new_contents);
4958 assert!(!buffer.is_dirty());
4959 assert!(!buffer.has_conflict());
4960
4961 let anchor_offsets = anchors
4962 .iter()
4963 .map(|anchor| anchor.to_offset(&*buffer))
4964 .collect::<Vec<_>>();
4965 assert_eq!(anchor_offsets, new_offsets);
4966 });
4967
4968 // Modify the buffer
4969 buffer.update(cx, |buffer, cx| {
4970 buffer.edit([(0..0, " ")], None, cx);
4971 assert!(buffer.is_dirty());
4972 assert!(!buffer.has_conflict());
4973 });
4974
4975 // Change the file on disk again, adding blank lines to the beginning.
4976 fs.save(
4977 path!("/dir/the-file").as_ref(),
4978 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4979 LineEnding::Unix,
4980 )
4981 .await
4982 .unwrap();
4983
4984 // Because the buffer is modified, it doesn't reload from disk, but is
4985 // marked as having a conflict.
4986 cx.executor().run_until_parked();
4987 buffer.update(cx, |buffer, _| {
4988 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4989 assert!(buffer.has_conflict());
4990 });
4991}
4992
4993#[gpui::test]
4994async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4995 init_test(cx);
4996
4997 let fs = FakeFs::new(cx.executor());
4998 fs.insert_tree(
4999 path!("/dir"),
5000 json!({
5001 "file1": "a\nb\nc\n",
5002 "file2": "one\r\ntwo\r\nthree\r\n",
5003 }),
5004 )
5005 .await;
5006
5007 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5008 let buffer1 = project
5009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5010 .await
5011 .unwrap();
5012 let buffer2 = project
5013 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5014 .await
5015 .unwrap();
5016
5017 buffer1.update(cx, |buffer, _| {
5018 assert_eq!(buffer.text(), "a\nb\nc\n");
5019 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5020 });
5021 buffer2.update(cx, |buffer, _| {
5022 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5023 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5024 });
5025
5026 // Change a file's line endings on disk from unix to windows. The buffer's
5027 // state updates correctly.
5028 fs.save(
5029 path!("/dir/file1").as_ref(),
5030 &"aaa\nb\nc\n".into(),
5031 LineEnding::Windows,
5032 )
5033 .await
5034 .unwrap();
5035 cx.executor().run_until_parked();
5036 buffer1.update(cx, |buffer, _| {
5037 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5038 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5039 });
5040
5041 // Save a file with windows line endings. The file is written correctly.
5042 buffer2.update(cx, |buffer, cx| {
5043 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5044 });
5045 project
5046 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5047 .await
5048 .unwrap();
5049 assert_eq!(
5050 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5051 "one\r\ntwo\r\nthree\r\nfour\r\n",
5052 );
5053}
5054
5055#[gpui::test]
5056async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5057 init_test(cx);
5058
5059 let fs = FakeFs::new(cx.executor());
5060 fs.insert_tree(
5061 path!("/dir"),
5062 json!({
5063 "a.rs": "
5064 fn foo(mut v: Vec<usize>) {
5065 for x in &v {
5066 v.push(1);
5067 }
5068 }
5069 "
5070 .unindent(),
5071 }),
5072 )
5073 .await;
5074
5075 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5076 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5077 let buffer = project
5078 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5079 .await
5080 .unwrap();
5081
5082 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5083 let message = lsp::PublishDiagnosticsParams {
5084 uri: buffer_uri.clone(),
5085 diagnostics: vec![
5086 lsp::Diagnostic {
5087 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5088 severity: Some(DiagnosticSeverity::WARNING),
5089 message: "error 1".to_string(),
5090 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5091 location: lsp::Location {
5092 uri: buffer_uri.clone(),
5093 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5094 },
5095 message: "error 1 hint 1".to_string(),
5096 }]),
5097 ..Default::default()
5098 },
5099 lsp::Diagnostic {
5100 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5101 severity: Some(DiagnosticSeverity::HINT),
5102 message: "error 1 hint 1".to_string(),
5103 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5104 location: lsp::Location {
5105 uri: buffer_uri.clone(),
5106 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5107 },
5108 message: "original diagnostic".to_string(),
5109 }]),
5110 ..Default::default()
5111 },
5112 lsp::Diagnostic {
5113 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5114 severity: Some(DiagnosticSeverity::ERROR),
5115 message: "error 2".to_string(),
5116 related_information: Some(vec![
5117 lsp::DiagnosticRelatedInformation {
5118 location: lsp::Location {
5119 uri: buffer_uri.clone(),
5120 range: lsp::Range::new(
5121 lsp::Position::new(1, 13),
5122 lsp::Position::new(1, 15),
5123 ),
5124 },
5125 message: "error 2 hint 1".to_string(),
5126 },
5127 lsp::DiagnosticRelatedInformation {
5128 location: lsp::Location {
5129 uri: buffer_uri.clone(),
5130 range: lsp::Range::new(
5131 lsp::Position::new(1, 13),
5132 lsp::Position::new(1, 15),
5133 ),
5134 },
5135 message: "error 2 hint 2".to_string(),
5136 },
5137 ]),
5138 ..Default::default()
5139 },
5140 lsp::Diagnostic {
5141 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5142 severity: Some(DiagnosticSeverity::HINT),
5143 message: "error 2 hint 1".to_string(),
5144 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5145 location: lsp::Location {
5146 uri: buffer_uri.clone(),
5147 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5148 },
5149 message: "original diagnostic".to_string(),
5150 }]),
5151 ..Default::default()
5152 },
5153 lsp::Diagnostic {
5154 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5155 severity: Some(DiagnosticSeverity::HINT),
5156 message: "error 2 hint 2".to_string(),
5157 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5158 location: lsp::Location {
5159 uri: buffer_uri,
5160 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5161 },
5162 message: "original diagnostic".to_string(),
5163 }]),
5164 ..Default::default()
5165 },
5166 ],
5167 version: None,
5168 };
5169
5170 lsp_store
5171 .update(cx, |lsp_store, cx| {
5172 lsp_store.update_diagnostics(
5173 LanguageServerId(0),
5174 message,
5175 None,
5176 DiagnosticSourceKind::Pushed,
5177 &[],
5178 cx,
5179 )
5180 })
5181 .unwrap();
5182 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5183
5184 assert_eq!(
5185 buffer
5186 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5187 .collect::<Vec<_>>(),
5188 &[
5189 DiagnosticEntry {
5190 range: Point::new(1, 8)..Point::new(1, 9),
5191 diagnostic: Diagnostic {
5192 severity: DiagnosticSeverity::WARNING,
5193 message: "error 1".to_string(),
5194 group_id: 1,
5195 is_primary: true,
5196 source_kind: DiagnosticSourceKind::Pushed,
5197 ..Diagnostic::default()
5198 }
5199 },
5200 DiagnosticEntry {
5201 range: Point::new(1, 8)..Point::new(1, 9),
5202 diagnostic: Diagnostic {
5203 severity: DiagnosticSeverity::HINT,
5204 message: "error 1 hint 1".to_string(),
5205 group_id: 1,
5206 is_primary: false,
5207 source_kind: DiagnosticSourceKind::Pushed,
5208 ..Diagnostic::default()
5209 }
5210 },
5211 DiagnosticEntry {
5212 range: Point::new(1, 13)..Point::new(1, 15),
5213 diagnostic: Diagnostic {
5214 severity: DiagnosticSeverity::HINT,
5215 message: "error 2 hint 1".to_string(),
5216 group_id: 0,
5217 is_primary: false,
5218 source_kind: DiagnosticSourceKind::Pushed,
5219 ..Diagnostic::default()
5220 }
5221 },
5222 DiagnosticEntry {
5223 range: Point::new(1, 13)..Point::new(1, 15),
5224 diagnostic: Diagnostic {
5225 severity: DiagnosticSeverity::HINT,
5226 message: "error 2 hint 2".to_string(),
5227 group_id: 0,
5228 is_primary: false,
5229 source_kind: DiagnosticSourceKind::Pushed,
5230 ..Diagnostic::default()
5231 }
5232 },
5233 DiagnosticEntry {
5234 range: Point::new(2, 8)..Point::new(2, 17),
5235 diagnostic: Diagnostic {
5236 severity: DiagnosticSeverity::ERROR,
5237 message: "error 2".to_string(),
5238 group_id: 0,
5239 is_primary: true,
5240 source_kind: DiagnosticSourceKind::Pushed,
5241 ..Diagnostic::default()
5242 }
5243 }
5244 ]
5245 );
5246
5247 assert_eq!(
5248 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5249 &[
5250 DiagnosticEntry {
5251 range: Point::new(1, 13)..Point::new(1, 15),
5252 diagnostic: Diagnostic {
5253 severity: DiagnosticSeverity::HINT,
5254 message: "error 2 hint 1".to_string(),
5255 group_id: 0,
5256 is_primary: false,
5257 source_kind: DiagnosticSourceKind::Pushed,
5258 ..Diagnostic::default()
5259 }
5260 },
5261 DiagnosticEntry {
5262 range: Point::new(1, 13)..Point::new(1, 15),
5263 diagnostic: Diagnostic {
5264 severity: DiagnosticSeverity::HINT,
5265 message: "error 2 hint 2".to_string(),
5266 group_id: 0,
5267 is_primary: false,
5268 source_kind: DiagnosticSourceKind::Pushed,
5269 ..Diagnostic::default()
5270 }
5271 },
5272 DiagnosticEntry {
5273 range: Point::new(2, 8)..Point::new(2, 17),
5274 diagnostic: Diagnostic {
5275 severity: DiagnosticSeverity::ERROR,
5276 message: "error 2".to_string(),
5277 group_id: 0,
5278 is_primary: true,
5279 source_kind: DiagnosticSourceKind::Pushed,
5280 ..Diagnostic::default()
5281 }
5282 }
5283 ]
5284 );
5285
5286 assert_eq!(
5287 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5288 &[
5289 DiagnosticEntry {
5290 range: Point::new(1, 8)..Point::new(1, 9),
5291 diagnostic: Diagnostic {
5292 severity: DiagnosticSeverity::WARNING,
5293 message: "error 1".to_string(),
5294 group_id: 1,
5295 is_primary: true,
5296 source_kind: DiagnosticSourceKind::Pushed,
5297 ..Diagnostic::default()
5298 }
5299 },
5300 DiagnosticEntry {
5301 range: Point::new(1, 8)..Point::new(1, 9),
5302 diagnostic: Diagnostic {
5303 severity: DiagnosticSeverity::HINT,
5304 message: "error 1 hint 1".to_string(),
5305 group_id: 1,
5306 is_primary: false,
5307 source_kind: DiagnosticSourceKind::Pushed,
5308 ..Diagnostic::default()
5309 }
5310 },
5311 ]
5312 );
5313}
5314
5315#[gpui::test]
5316async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5317 init_test(cx);
5318
5319 let fs = FakeFs::new(cx.executor());
5320 fs.insert_tree(
5321 path!("/dir"),
5322 json!({
5323 "one.rs": "const ONE: usize = 1;",
5324 "two": {
5325 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5326 }
5327
5328 }),
5329 )
5330 .await;
5331 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5332
5333 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5334 language_registry.add(rust_lang());
5335 let watched_paths = lsp::FileOperationRegistrationOptions {
5336 filters: vec![
5337 FileOperationFilter {
5338 scheme: Some("file".to_owned()),
5339 pattern: lsp::FileOperationPattern {
5340 glob: "**/*.rs".to_owned(),
5341 matches: Some(lsp::FileOperationPatternKind::File),
5342 options: None,
5343 },
5344 },
5345 FileOperationFilter {
5346 scheme: Some("file".to_owned()),
5347 pattern: lsp::FileOperationPattern {
5348 glob: "**/**".to_owned(),
5349 matches: Some(lsp::FileOperationPatternKind::Folder),
5350 options: None,
5351 },
5352 },
5353 ],
5354 };
5355 let mut fake_servers = language_registry.register_fake_lsp(
5356 "Rust",
5357 FakeLspAdapter {
5358 capabilities: lsp::ServerCapabilities {
5359 workspace: Some(lsp::WorkspaceServerCapabilities {
5360 workspace_folders: None,
5361 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5362 did_rename: Some(watched_paths.clone()),
5363 will_rename: Some(watched_paths),
5364 ..Default::default()
5365 }),
5366 }),
5367 ..Default::default()
5368 },
5369 ..Default::default()
5370 },
5371 );
5372
5373 let _ = project
5374 .update(cx, |project, cx| {
5375 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5376 })
5377 .await
5378 .unwrap();
5379
5380 let fake_server = fake_servers.next().await.unwrap();
5381 let response = project.update(cx, |project, cx| {
5382 let worktree = project.worktrees(cx).next().unwrap();
5383 let entry = worktree
5384 .read(cx)
5385 .entry_for_path(rel_path("one.rs"))
5386 .unwrap();
5387 project.rename_entry(
5388 entry.id,
5389 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5390 cx,
5391 )
5392 });
5393 let expected_edit = lsp::WorkspaceEdit {
5394 changes: None,
5395 document_changes: Some(DocumentChanges::Edits({
5396 vec![TextDocumentEdit {
5397 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5398 range: lsp::Range {
5399 start: lsp::Position {
5400 line: 0,
5401 character: 1,
5402 },
5403 end: lsp::Position {
5404 line: 0,
5405 character: 3,
5406 },
5407 },
5408 new_text: "This is not a drill".to_owned(),
5409 })],
5410 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5411 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5412 version: Some(1337),
5413 },
5414 }]
5415 })),
5416 change_annotations: None,
5417 };
5418 let resolved_workspace_edit = Arc::new(OnceLock::new());
5419 fake_server
5420 .set_request_handler::<WillRenameFiles, _, _>({
5421 let resolved_workspace_edit = resolved_workspace_edit.clone();
5422 let expected_edit = expected_edit.clone();
5423 move |params, _| {
5424 let resolved_workspace_edit = resolved_workspace_edit.clone();
5425 let expected_edit = expected_edit.clone();
5426 async move {
5427 assert_eq!(params.files.len(), 1);
5428 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5429 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5430 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5431 Ok(Some(expected_edit))
5432 }
5433 }
5434 })
5435 .next()
5436 .await
5437 .unwrap();
5438 let _ = response.await.unwrap();
5439 fake_server
5440 .handle_notification::<DidRenameFiles, _>(|params, _| {
5441 assert_eq!(params.files.len(), 1);
5442 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5443 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5444 })
5445 .next()
5446 .await
5447 .unwrap();
5448 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5449}
5450
5451#[gpui::test]
5452async fn test_rename(cx: &mut gpui::TestAppContext) {
5453 // hi
5454 init_test(cx);
5455
5456 let fs = FakeFs::new(cx.executor());
5457 fs.insert_tree(
5458 path!("/dir"),
5459 json!({
5460 "one.rs": "const ONE: usize = 1;",
5461 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5462 }),
5463 )
5464 .await;
5465
5466 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5467
5468 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5469 language_registry.add(rust_lang());
5470 let mut fake_servers = language_registry.register_fake_lsp(
5471 "Rust",
5472 FakeLspAdapter {
5473 capabilities: lsp::ServerCapabilities {
5474 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5475 prepare_provider: Some(true),
5476 work_done_progress_options: Default::default(),
5477 })),
5478 ..Default::default()
5479 },
5480 ..Default::default()
5481 },
5482 );
5483
5484 let (buffer, _handle) = project
5485 .update(cx, |project, cx| {
5486 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5487 })
5488 .await
5489 .unwrap();
5490
5491 let fake_server = fake_servers.next().await.unwrap();
5492
5493 let response = project.update(cx, |project, cx| {
5494 project.prepare_rename(buffer.clone(), 7, cx)
5495 });
5496 fake_server
5497 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5498 assert_eq!(
5499 params.text_document.uri.as_str(),
5500 uri!("file:///dir/one.rs")
5501 );
5502 assert_eq!(params.position, lsp::Position::new(0, 7));
5503 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5504 lsp::Position::new(0, 6),
5505 lsp::Position::new(0, 9),
5506 ))))
5507 })
5508 .next()
5509 .await
5510 .unwrap();
5511 let response = response.await.unwrap();
5512 let PrepareRenameResponse::Success(range) = response else {
5513 panic!("{:?}", response);
5514 };
5515 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5516 assert_eq!(range, 6..9);
5517
5518 let response = project.update(cx, |project, cx| {
5519 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5520 });
5521 fake_server
5522 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5523 assert_eq!(
5524 params.text_document_position.text_document.uri.as_str(),
5525 uri!("file:///dir/one.rs")
5526 );
5527 assert_eq!(
5528 params.text_document_position.position,
5529 lsp::Position::new(0, 7)
5530 );
5531 assert_eq!(params.new_name, "THREE");
5532 Ok(Some(lsp::WorkspaceEdit {
5533 changes: Some(
5534 [
5535 (
5536 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5537 vec![lsp::TextEdit::new(
5538 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5539 "THREE".to_string(),
5540 )],
5541 ),
5542 (
5543 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5544 vec![
5545 lsp::TextEdit::new(
5546 lsp::Range::new(
5547 lsp::Position::new(0, 24),
5548 lsp::Position::new(0, 27),
5549 ),
5550 "THREE".to_string(),
5551 ),
5552 lsp::TextEdit::new(
5553 lsp::Range::new(
5554 lsp::Position::new(0, 35),
5555 lsp::Position::new(0, 38),
5556 ),
5557 "THREE".to_string(),
5558 ),
5559 ],
5560 ),
5561 ]
5562 .into_iter()
5563 .collect(),
5564 ),
5565 ..Default::default()
5566 }))
5567 })
5568 .next()
5569 .await
5570 .unwrap();
5571 let mut transaction = response.await.unwrap().0;
5572 assert_eq!(transaction.len(), 2);
5573 assert_eq!(
5574 transaction
5575 .remove_entry(&buffer)
5576 .unwrap()
5577 .0
5578 .update(cx, |buffer, _| buffer.text()),
5579 "const THREE: usize = 1;"
5580 );
5581 assert_eq!(
5582 transaction
5583 .into_keys()
5584 .next()
5585 .unwrap()
5586 .update(cx, |buffer, _| buffer.text()),
5587 "const TWO: usize = one::THREE + one::THREE;"
5588 );
5589}
5590
5591#[gpui::test]
5592async fn test_search(cx: &mut gpui::TestAppContext) {
5593 init_test(cx);
5594
5595 let fs = FakeFs::new(cx.executor());
5596 fs.insert_tree(
5597 path!("/dir"),
5598 json!({
5599 "one.rs": "const ONE: usize = 1;",
5600 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5601 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5602 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5603 }),
5604 )
5605 .await;
5606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5607 assert_eq!(
5608 search(
5609 &project,
5610 SearchQuery::text(
5611 "TWO",
5612 false,
5613 true,
5614 false,
5615 Default::default(),
5616 Default::default(),
5617 false,
5618 None
5619 )
5620 .unwrap(),
5621 cx
5622 )
5623 .await
5624 .unwrap(),
5625 HashMap::from_iter([
5626 (path!("dir/two.rs").to_string(), vec![6..9]),
5627 (path!("dir/three.rs").to_string(), vec![37..40])
5628 ])
5629 );
5630
5631 let buffer_4 = project
5632 .update(cx, |project, cx| {
5633 project.open_local_buffer(path!("/dir/four.rs"), cx)
5634 })
5635 .await
5636 .unwrap();
5637 buffer_4.update(cx, |buffer, cx| {
5638 let text = "two::TWO";
5639 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5640 });
5641
5642 assert_eq!(
5643 search(
5644 &project,
5645 SearchQuery::text(
5646 "TWO",
5647 false,
5648 true,
5649 false,
5650 Default::default(),
5651 Default::default(),
5652 false,
5653 None,
5654 )
5655 .unwrap(),
5656 cx
5657 )
5658 .await
5659 .unwrap(),
5660 HashMap::from_iter([
5661 (path!("dir/two.rs").to_string(), vec![6..9]),
5662 (path!("dir/three.rs").to_string(), vec![37..40]),
5663 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5664 ])
5665 );
5666}
5667
5668#[gpui::test]
5669async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5670 init_test(cx);
5671
5672 let search_query = "file";
5673
5674 let fs = FakeFs::new(cx.executor());
5675 fs.insert_tree(
5676 path!("/dir"),
5677 json!({
5678 "one.rs": r#"// Rust file one"#,
5679 "one.ts": r#"// TypeScript file one"#,
5680 "two.rs": r#"// Rust file two"#,
5681 "two.ts": r#"// TypeScript file two"#,
5682 }),
5683 )
5684 .await;
5685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5686
5687 assert!(
5688 search(
5689 &project,
5690 SearchQuery::text(
5691 search_query,
5692 false,
5693 true,
5694 false,
5695 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5696 Default::default(),
5697 false,
5698 None
5699 )
5700 .unwrap(),
5701 cx
5702 )
5703 .await
5704 .unwrap()
5705 .is_empty(),
5706 "If no inclusions match, no files should be returned"
5707 );
5708
5709 assert_eq!(
5710 search(
5711 &project,
5712 SearchQuery::text(
5713 search_query,
5714 false,
5715 true,
5716 false,
5717 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5718 Default::default(),
5719 false,
5720 None
5721 )
5722 .unwrap(),
5723 cx
5724 )
5725 .await
5726 .unwrap(),
5727 HashMap::from_iter([
5728 (path!("dir/one.rs").to_string(), vec![8..12]),
5729 (path!("dir/two.rs").to_string(), vec![8..12]),
5730 ]),
5731 "Rust only search should give only Rust files"
5732 );
5733
5734 assert_eq!(
5735 search(
5736 &project,
5737 SearchQuery::text(
5738 search_query,
5739 false,
5740 true,
5741 false,
5742 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5743 .unwrap(),
5744 Default::default(),
5745 false,
5746 None,
5747 )
5748 .unwrap(),
5749 cx
5750 )
5751 .await
5752 .unwrap(),
5753 HashMap::from_iter([
5754 (path!("dir/one.ts").to_string(), vec![14..18]),
5755 (path!("dir/two.ts").to_string(), vec![14..18]),
5756 ]),
5757 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5758 );
5759
5760 assert_eq!(
5761 search(
5762 &project,
5763 SearchQuery::text(
5764 search_query,
5765 false,
5766 true,
5767 false,
5768 PathMatcher::new(
5769 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5770 PathStyle::local()
5771 )
5772 .unwrap(),
5773 Default::default(),
5774 false,
5775 None,
5776 )
5777 .unwrap(),
5778 cx
5779 )
5780 .await
5781 .unwrap(),
5782 HashMap::from_iter([
5783 (path!("dir/two.ts").to_string(), vec![14..18]),
5784 (path!("dir/one.rs").to_string(), vec![8..12]),
5785 (path!("dir/one.ts").to_string(), vec![14..18]),
5786 (path!("dir/two.rs").to_string(), vec![8..12]),
5787 ]),
5788 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5789 );
5790}
5791
5792#[gpui::test]
5793async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5794 init_test(cx);
5795
5796 let search_query = "file";
5797
5798 let fs = FakeFs::new(cx.executor());
5799 fs.insert_tree(
5800 path!("/dir"),
5801 json!({
5802 "one.rs": r#"// Rust file one"#,
5803 "one.ts": r#"// TypeScript file one"#,
5804 "two.rs": r#"// Rust file two"#,
5805 "two.ts": r#"// TypeScript file two"#,
5806 }),
5807 )
5808 .await;
5809 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5810
5811 assert_eq!(
5812 search(
5813 &project,
5814 SearchQuery::text(
5815 search_query,
5816 false,
5817 true,
5818 false,
5819 Default::default(),
5820 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5821 false,
5822 None,
5823 )
5824 .unwrap(),
5825 cx
5826 )
5827 .await
5828 .unwrap(),
5829 HashMap::from_iter([
5830 (path!("dir/one.rs").to_string(), vec![8..12]),
5831 (path!("dir/one.ts").to_string(), vec![14..18]),
5832 (path!("dir/two.rs").to_string(), vec![8..12]),
5833 (path!("dir/two.ts").to_string(), vec![14..18]),
5834 ]),
5835 "If no exclusions match, all files should be returned"
5836 );
5837
5838 assert_eq!(
5839 search(
5840 &project,
5841 SearchQuery::text(
5842 search_query,
5843 false,
5844 true,
5845 false,
5846 Default::default(),
5847 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5848 false,
5849 None,
5850 )
5851 .unwrap(),
5852 cx
5853 )
5854 .await
5855 .unwrap(),
5856 HashMap::from_iter([
5857 (path!("dir/one.ts").to_string(), vec![14..18]),
5858 (path!("dir/two.ts").to_string(), vec![14..18]),
5859 ]),
5860 "Rust exclusion search should give only TypeScript files"
5861 );
5862
5863 assert_eq!(
5864 search(
5865 &project,
5866 SearchQuery::text(
5867 search_query,
5868 false,
5869 true,
5870 false,
5871 Default::default(),
5872 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5873 .unwrap(),
5874 false,
5875 None,
5876 )
5877 .unwrap(),
5878 cx
5879 )
5880 .await
5881 .unwrap(),
5882 HashMap::from_iter([
5883 (path!("dir/one.rs").to_string(), vec![8..12]),
5884 (path!("dir/two.rs").to_string(), vec![8..12]),
5885 ]),
5886 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5887 );
5888
5889 assert!(
5890 search(
5891 &project,
5892 SearchQuery::text(
5893 search_query,
5894 false,
5895 true,
5896 false,
5897 Default::default(),
5898 PathMatcher::new(
5899 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5900 PathStyle::local(),
5901 )
5902 .unwrap(),
5903 false,
5904 None,
5905 )
5906 .unwrap(),
5907 cx
5908 )
5909 .await
5910 .unwrap()
5911 .is_empty(),
5912 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5913 );
5914}
5915
5916#[gpui::test]
5917async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5918 init_test(cx);
5919
5920 let search_query = "file";
5921
5922 let fs = FakeFs::new(cx.executor());
5923 fs.insert_tree(
5924 path!("/dir"),
5925 json!({
5926 "one.rs": r#"// Rust file one"#,
5927 "one.ts": r#"// TypeScript file one"#,
5928 "two.rs": r#"// Rust file two"#,
5929 "two.ts": r#"// TypeScript file two"#,
5930 }),
5931 )
5932 .await;
5933
5934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5935 let path_style = PathStyle::local();
5936 let _buffer = project.update(cx, |project, cx| {
5937 project.create_local_buffer("file", None, false, cx)
5938 });
5939
5940 assert_eq!(
5941 search(
5942 &project,
5943 SearchQuery::text(
5944 search_query,
5945 false,
5946 true,
5947 false,
5948 Default::default(),
5949 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5950 false,
5951 None,
5952 )
5953 .unwrap(),
5954 cx
5955 )
5956 .await
5957 .unwrap(),
5958 HashMap::from_iter([
5959 (path!("dir/one.rs").to_string(), vec![8..12]),
5960 (path!("dir/one.ts").to_string(), vec![14..18]),
5961 (path!("dir/two.rs").to_string(), vec![8..12]),
5962 (path!("dir/two.ts").to_string(), vec![14..18]),
5963 ]),
5964 "If no exclusions match, all files should be returned"
5965 );
5966
5967 assert_eq!(
5968 search(
5969 &project,
5970 SearchQuery::text(
5971 search_query,
5972 false,
5973 true,
5974 false,
5975 Default::default(),
5976 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5977 false,
5978 None,
5979 )
5980 .unwrap(),
5981 cx
5982 )
5983 .await
5984 .unwrap(),
5985 HashMap::from_iter([
5986 (path!("dir/one.ts").to_string(), vec![14..18]),
5987 (path!("dir/two.ts").to_string(), vec![14..18]),
5988 ]),
5989 "Rust exclusion search should give only TypeScript files"
5990 );
5991
5992 assert_eq!(
5993 search(
5994 &project,
5995 SearchQuery::text(
5996 search_query,
5997 false,
5998 true,
5999 false,
6000 Default::default(),
6001 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6002 false,
6003 None,
6004 )
6005 .unwrap(),
6006 cx
6007 )
6008 .await
6009 .unwrap(),
6010 HashMap::from_iter([
6011 (path!("dir/one.rs").to_string(), vec![8..12]),
6012 (path!("dir/two.rs").to_string(), vec![8..12]),
6013 ]),
6014 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6015 );
6016
6017 assert!(
6018 search(
6019 &project,
6020 SearchQuery::text(
6021 search_query,
6022 false,
6023 true,
6024 false,
6025 Default::default(),
6026 PathMatcher::new(
6027 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6028 PathStyle::local(),
6029 )
6030 .unwrap(),
6031 false,
6032 None,
6033 )
6034 .unwrap(),
6035 cx
6036 )
6037 .await
6038 .unwrap()
6039 .is_empty(),
6040 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6041 );
6042}
6043
6044#[gpui::test]
6045async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6046 init_test(cx);
6047
6048 let search_query = "file";
6049
6050 let fs = FakeFs::new(cx.executor());
6051 fs.insert_tree(
6052 path!("/dir"),
6053 json!({
6054 "one.rs": r#"// Rust file one"#,
6055 "one.ts": r#"// TypeScript file one"#,
6056 "two.rs": r#"// Rust file two"#,
6057 "two.ts": r#"// TypeScript file two"#,
6058 }),
6059 )
6060 .await;
6061 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6062 assert!(
6063 search(
6064 &project,
6065 SearchQuery::text(
6066 search_query,
6067 false,
6068 true,
6069 false,
6070 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6071 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6072 false,
6073 None,
6074 )
6075 .unwrap(),
6076 cx
6077 )
6078 .await
6079 .unwrap()
6080 .is_empty(),
6081 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6082 );
6083
6084 assert!(
6085 search(
6086 &project,
6087 SearchQuery::text(
6088 search_query,
6089 false,
6090 true,
6091 false,
6092 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6093 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6094 false,
6095 None,
6096 )
6097 .unwrap(),
6098 cx
6099 )
6100 .await
6101 .unwrap()
6102 .is_empty(),
6103 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6104 );
6105
6106 assert!(
6107 search(
6108 &project,
6109 SearchQuery::text(
6110 search_query,
6111 false,
6112 true,
6113 false,
6114 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6115 .unwrap(),
6116 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6117 .unwrap(),
6118 false,
6119 None,
6120 )
6121 .unwrap(),
6122 cx
6123 )
6124 .await
6125 .unwrap()
6126 .is_empty(),
6127 "Non-matching inclusions and exclusions should not change that."
6128 );
6129
6130 assert_eq!(
6131 search(
6132 &project,
6133 SearchQuery::text(
6134 search_query,
6135 false,
6136 true,
6137 false,
6138 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6139 .unwrap(),
6140 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6141 .unwrap(),
6142 false,
6143 None,
6144 )
6145 .unwrap(),
6146 cx
6147 )
6148 .await
6149 .unwrap(),
6150 HashMap::from_iter([
6151 (path!("dir/one.ts").to_string(), vec![14..18]),
6152 (path!("dir/two.ts").to_string(), vec![14..18]),
6153 ]),
6154 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6155 );
6156}
6157
6158#[gpui::test]
6159async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6160 init_test(cx);
6161
6162 let fs = FakeFs::new(cx.executor());
6163 fs.insert_tree(
6164 path!("/worktree-a"),
6165 json!({
6166 "haystack.rs": r#"// NEEDLE"#,
6167 "haystack.ts": r#"// NEEDLE"#,
6168 }),
6169 )
6170 .await;
6171 fs.insert_tree(
6172 path!("/worktree-b"),
6173 json!({
6174 "haystack.rs": r#"// NEEDLE"#,
6175 "haystack.ts": r#"// NEEDLE"#,
6176 }),
6177 )
6178 .await;
6179
6180 let path_style = PathStyle::local();
6181 let project = Project::test(
6182 fs.clone(),
6183 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6184 cx,
6185 )
6186 .await;
6187
6188 assert_eq!(
6189 search(
6190 &project,
6191 SearchQuery::text(
6192 "NEEDLE",
6193 false,
6194 true,
6195 false,
6196 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6197 Default::default(),
6198 true,
6199 None,
6200 )
6201 .unwrap(),
6202 cx
6203 )
6204 .await
6205 .unwrap(),
6206 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6207 "should only return results from included worktree"
6208 );
6209 assert_eq!(
6210 search(
6211 &project,
6212 SearchQuery::text(
6213 "NEEDLE",
6214 false,
6215 true,
6216 false,
6217 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6218 Default::default(),
6219 true,
6220 None,
6221 )
6222 .unwrap(),
6223 cx
6224 )
6225 .await
6226 .unwrap(),
6227 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6228 "should only return results from included worktree"
6229 );
6230
6231 assert_eq!(
6232 search(
6233 &project,
6234 SearchQuery::text(
6235 "NEEDLE",
6236 false,
6237 true,
6238 false,
6239 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6240 Default::default(),
6241 false,
6242 None,
6243 )
6244 .unwrap(),
6245 cx
6246 )
6247 .await
6248 .unwrap(),
6249 HashMap::from_iter([
6250 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6251 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6252 ]),
6253 "should return results from both worktrees"
6254 );
6255}
6256
6257#[gpui::test]
6258async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6259 init_test(cx);
6260
6261 let fs = FakeFs::new(cx.background_executor.clone());
6262 fs.insert_tree(
6263 path!("/dir"),
6264 json!({
6265 ".git": {},
6266 ".gitignore": "**/target\n/node_modules\n",
6267 "target": {
6268 "index.txt": "index_key:index_value"
6269 },
6270 "node_modules": {
6271 "eslint": {
6272 "index.ts": "const eslint_key = 'eslint value'",
6273 "package.json": r#"{ "some_key": "some value" }"#,
6274 },
6275 "prettier": {
6276 "index.ts": "const prettier_key = 'prettier value'",
6277 "package.json": r#"{ "other_key": "other value" }"#,
6278 },
6279 },
6280 "package.json": r#"{ "main_key": "main value" }"#,
6281 }),
6282 )
6283 .await;
6284 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6285
6286 let query = "key";
6287 assert_eq!(
6288 search(
6289 &project,
6290 SearchQuery::text(
6291 query,
6292 false,
6293 false,
6294 false,
6295 Default::default(),
6296 Default::default(),
6297 false,
6298 None,
6299 )
6300 .unwrap(),
6301 cx
6302 )
6303 .await
6304 .unwrap(),
6305 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6306 "Only one non-ignored file should have the query"
6307 );
6308
6309 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6310 let path_style = PathStyle::local();
6311 assert_eq!(
6312 search(
6313 &project,
6314 SearchQuery::text(
6315 query,
6316 false,
6317 false,
6318 true,
6319 Default::default(),
6320 Default::default(),
6321 false,
6322 None,
6323 )
6324 .unwrap(),
6325 cx
6326 )
6327 .await
6328 .unwrap(),
6329 HashMap::from_iter([
6330 (path!("dir/package.json").to_string(), vec![8..11]),
6331 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6332 (
6333 path!("dir/node_modules/prettier/package.json").to_string(),
6334 vec![9..12]
6335 ),
6336 (
6337 path!("dir/node_modules/prettier/index.ts").to_string(),
6338 vec![15..18]
6339 ),
6340 (
6341 path!("dir/node_modules/eslint/index.ts").to_string(),
6342 vec![13..16]
6343 ),
6344 (
6345 path!("dir/node_modules/eslint/package.json").to_string(),
6346 vec![8..11]
6347 ),
6348 ]),
6349 "Unrestricted search with ignored directories should find every file with the query"
6350 );
6351
6352 let files_to_include =
6353 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6354 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6355 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6356 assert_eq!(
6357 search(
6358 &project,
6359 SearchQuery::text(
6360 query,
6361 false,
6362 false,
6363 true,
6364 files_to_include,
6365 files_to_exclude,
6366 false,
6367 None,
6368 )
6369 .unwrap(),
6370 cx
6371 )
6372 .await
6373 .unwrap(),
6374 HashMap::from_iter([(
6375 path!("dir/node_modules/prettier/package.json").to_string(),
6376 vec![9..12]
6377 )]),
6378 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6379 );
6380}
6381
6382#[gpui::test]
6383async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6384 init_test(cx);
6385
6386 let fs = FakeFs::new(cx.executor());
6387 fs.insert_tree(
6388 path!("/dir"),
6389 json!({
6390 "one.rs": "// ПРИВЕТ? привет!",
6391 "two.rs": "// ПРИВЕТ.",
6392 "three.rs": "// привет",
6393 }),
6394 )
6395 .await;
6396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6397 let unicode_case_sensitive_query = SearchQuery::text(
6398 "привет",
6399 false,
6400 true,
6401 false,
6402 Default::default(),
6403 Default::default(),
6404 false,
6405 None,
6406 );
6407 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6408 assert_eq!(
6409 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6410 .await
6411 .unwrap(),
6412 HashMap::from_iter([
6413 (path!("dir/one.rs").to_string(), vec![17..29]),
6414 (path!("dir/three.rs").to_string(), vec![3..15]),
6415 ])
6416 );
6417
6418 let unicode_case_insensitive_query = SearchQuery::text(
6419 "привет",
6420 false,
6421 false,
6422 false,
6423 Default::default(),
6424 Default::default(),
6425 false,
6426 None,
6427 );
6428 assert_matches!(
6429 unicode_case_insensitive_query,
6430 Ok(SearchQuery::Regex { .. })
6431 );
6432 assert_eq!(
6433 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6434 .await
6435 .unwrap(),
6436 HashMap::from_iter([
6437 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6438 (path!("dir/two.rs").to_string(), vec![3..15]),
6439 (path!("dir/three.rs").to_string(), vec![3..15]),
6440 ])
6441 );
6442
6443 assert_eq!(
6444 search(
6445 &project,
6446 SearchQuery::text(
6447 "привет.",
6448 false,
6449 false,
6450 false,
6451 Default::default(),
6452 Default::default(),
6453 false,
6454 None,
6455 )
6456 .unwrap(),
6457 cx
6458 )
6459 .await
6460 .unwrap(),
6461 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6462 );
6463}
6464
6465#[gpui::test]
6466async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6467 init_test(cx);
6468
6469 let fs = FakeFs::new(cx.executor());
6470 fs.insert_tree(
6471 "/one/two",
6472 json!({
6473 "three": {
6474 "a.txt": "",
6475 "four": {}
6476 },
6477 "c.rs": ""
6478 }),
6479 )
6480 .await;
6481
6482 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6483 project
6484 .update(cx, |project, cx| {
6485 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6486 project.create_entry((id, rel_path("b..")), true, cx)
6487 })
6488 .await
6489 .unwrap()
6490 .into_included()
6491 .unwrap();
6492
6493 assert_eq!(
6494 fs.paths(true),
6495 vec![
6496 PathBuf::from(path!("/")),
6497 PathBuf::from(path!("/one")),
6498 PathBuf::from(path!("/one/two")),
6499 PathBuf::from(path!("/one/two/c.rs")),
6500 PathBuf::from(path!("/one/two/three")),
6501 PathBuf::from(path!("/one/two/three/a.txt")),
6502 PathBuf::from(path!("/one/two/three/b..")),
6503 PathBuf::from(path!("/one/two/three/four")),
6504 ]
6505 );
6506}
6507
6508#[gpui::test]
6509async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6510 init_test(cx);
6511
6512 let fs = FakeFs::new(cx.executor());
6513 fs.insert_tree(
6514 path!("/dir"),
6515 json!({
6516 "a.tsx": "a",
6517 }),
6518 )
6519 .await;
6520
6521 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6522
6523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6524 language_registry.add(tsx_lang());
6525 let language_server_names = [
6526 "TypeScriptServer",
6527 "TailwindServer",
6528 "ESLintServer",
6529 "NoHoverCapabilitiesServer",
6530 ];
6531 let mut language_servers = [
6532 language_registry.register_fake_lsp(
6533 "tsx",
6534 FakeLspAdapter {
6535 name: language_server_names[0],
6536 capabilities: lsp::ServerCapabilities {
6537 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6538 ..lsp::ServerCapabilities::default()
6539 },
6540 ..FakeLspAdapter::default()
6541 },
6542 ),
6543 language_registry.register_fake_lsp(
6544 "tsx",
6545 FakeLspAdapter {
6546 name: language_server_names[1],
6547 capabilities: lsp::ServerCapabilities {
6548 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6549 ..lsp::ServerCapabilities::default()
6550 },
6551 ..FakeLspAdapter::default()
6552 },
6553 ),
6554 language_registry.register_fake_lsp(
6555 "tsx",
6556 FakeLspAdapter {
6557 name: language_server_names[2],
6558 capabilities: lsp::ServerCapabilities {
6559 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6560 ..lsp::ServerCapabilities::default()
6561 },
6562 ..FakeLspAdapter::default()
6563 },
6564 ),
6565 language_registry.register_fake_lsp(
6566 "tsx",
6567 FakeLspAdapter {
6568 name: language_server_names[3],
6569 capabilities: lsp::ServerCapabilities {
6570 hover_provider: None,
6571 ..lsp::ServerCapabilities::default()
6572 },
6573 ..FakeLspAdapter::default()
6574 },
6575 ),
6576 ];
6577
6578 let (buffer, _handle) = project
6579 .update(cx, |p, cx| {
6580 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6581 })
6582 .await
6583 .unwrap();
6584 cx.executor().run_until_parked();
6585
6586 let mut servers_with_hover_requests = HashMap::default();
6587 for i in 0..language_server_names.len() {
6588 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6589 panic!(
6590 "Failed to get language server #{i} with name {}",
6591 &language_server_names[i]
6592 )
6593 });
6594 let new_server_name = new_server.server.name();
6595 assert!(
6596 !servers_with_hover_requests.contains_key(&new_server_name),
6597 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6598 );
6599 match new_server_name.as_ref() {
6600 "TailwindServer" | "TypeScriptServer" => {
6601 servers_with_hover_requests.insert(
6602 new_server_name.clone(),
6603 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6604 move |_, _| {
6605 let name = new_server_name.clone();
6606 async move {
6607 Ok(Some(lsp::Hover {
6608 contents: lsp::HoverContents::Scalar(
6609 lsp::MarkedString::String(format!("{name} hover")),
6610 ),
6611 range: None,
6612 }))
6613 }
6614 },
6615 ),
6616 );
6617 }
6618 "ESLintServer" => {
6619 servers_with_hover_requests.insert(
6620 new_server_name,
6621 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6622 |_, _| async move { Ok(None) },
6623 ),
6624 );
6625 }
6626 "NoHoverCapabilitiesServer" => {
6627 let _never_handled = new_server
6628 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6629 panic!(
6630 "Should not call for hovers server with no corresponding capabilities"
6631 )
6632 });
6633 }
6634 unexpected => panic!("Unexpected server name: {unexpected}"),
6635 }
6636 }
6637
6638 let hover_task = project.update(cx, |project, cx| {
6639 project.hover(&buffer, Point::new(0, 0), cx)
6640 });
6641 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6642 |mut hover_request| async move {
6643 hover_request
6644 .next()
6645 .await
6646 .expect("All hover requests should have been triggered")
6647 },
6648 ))
6649 .await;
6650 assert_eq!(
6651 vec!["TailwindServer hover", "TypeScriptServer hover"],
6652 hover_task
6653 .await
6654 .into_iter()
6655 .flatten()
6656 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6657 .sorted()
6658 .collect::<Vec<_>>(),
6659 "Should receive hover responses from all related servers with hover capabilities"
6660 );
6661}
6662
6663#[gpui::test]
6664async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6665 init_test(cx);
6666
6667 let fs = FakeFs::new(cx.executor());
6668 fs.insert_tree(
6669 path!("/dir"),
6670 json!({
6671 "a.ts": "a",
6672 }),
6673 )
6674 .await;
6675
6676 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6677
6678 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6679 language_registry.add(typescript_lang());
6680 let mut fake_language_servers = language_registry.register_fake_lsp(
6681 "TypeScript",
6682 FakeLspAdapter {
6683 capabilities: lsp::ServerCapabilities {
6684 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6685 ..lsp::ServerCapabilities::default()
6686 },
6687 ..FakeLspAdapter::default()
6688 },
6689 );
6690
6691 let (buffer, _handle) = project
6692 .update(cx, |p, cx| {
6693 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6694 })
6695 .await
6696 .unwrap();
6697 cx.executor().run_until_parked();
6698
6699 let fake_server = fake_language_servers
6700 .next()
6701 .await
6702 .expect("failed to get the language server");
6703
6704 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6705 move |_, _| async move {
6706 Ok(Some(lsp::Hover {
6707 contents: lsp::HoverContents::Array(vec![
6708 lsp::MarkedString::String("".to_string()),
6709 lsp::MarkedString::String(" ".to_string()),
6710 lsp::MarkedString::String("\n\n\n".to_string()),
6711 ]),
6712 range: None,
6713 }))
6714 },
6715 );
6716
6717 let hover_task = project.update(cx, |project, cx| {
6718 project.hover(&buffer, Point::new(0, 0), cx)
6719 });
6720 let () = request_handled
6721 .next()
6722 .await
6723 .expect("All hover requests should have been triggered");
6724 assert_eq!(
6725 Vec::<String>::new(),
6726 hover_task
6727 .await
6728 .into_iter()
6729 .flatten()
6730 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6731 .sorted()
6732 .collect::<Vec<_>>(),
6733 "Empty hover parts should be ignored"
6734 );
6735}
6736
6737#[gpui::test]
6738async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6739 init_test(cx);
6740
6741 let fs = FakeFs::new(cx.executor());
6742 fs.insert_tree(
6743 path!("/dir"),
6744 json!({
6745 "a.ts": "a",
6746 }),
6747 )
6748 .await;
6749
6750 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6751
6752 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6753 language_registry.add(typescript_lang());
6754 let mut fake_language_servers = language_registry.register_fake_lsp(
6755 "TypeScript",
6756 FakeLspAdapter {
6757 capabilities: lsp::ServerCapabilities {
6758 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6759 ..lsp::ServerCapabilities::default()
6760 },
6761 ..FakeLspAdapter::default()
6762 },
6763 );
6764
6765 let (buffer, _handle) = project
6766 .update(cx, |p, cx| {
6767 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6768 })
6769 .await
6770 .unwrap();
6771 cx.executor().run_until_parked();
6772
6773 let fake_server = fake_language_servers
6774 .next()
6775 .await
6776 .expect("failed to get the language server");
6777
6778 let mut request_handled = fake_server
6779 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6780 Ok(Some(vec![
6781 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6782 title: "organize imports".to_string(),
6783 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6784 ..lsp::CodeAction::default()
6785 }),
6786 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6787 title: "fix code".to_string(),
6788 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6789 ..lsp::CodeAction::default()
6790 }),
6791 ]))
6792 });
6793
6794 let code_actions_task = project.update(cx, |project, cx| {
6795 project.code_actions(
6796 &buffer,
6797 0..buffer.read(cx).len(),
6798 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6799 cx,
6800 )
6801 });
6802
6803 let () = request_handled
6804 .next()
6805 .await
6806 .expect("The code action request should have been triggered");
6807
6808 let code_actions = code_actions_task.await.unwrap().unwrap();
6809 assert_eq!(code_actions.len(), 1);
6810 assert_eq!(
6811 code_actions[0].lsp_action.action_kind(),
6812 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6813 );
6814}
6815
6816#[gpui::test]
6817async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6818 init_test(cx);
6819
6820 let fs = FakeFs::new(cx.executor());
6821 fs.insert_tree(
6822 path!("/dir"),
6823 json!({
6824 "a.tsx": "a",
6825 }),
6826 )
6827 .await;
6828
6829 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6830
6831 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6832 language_registry.add(tsx_lang());
6833 let language_server_names = [
6834 "TypeScriptServer",
6835 "TailwindServer",
6836 "ESLintServer",
6837 "NoActionsCapabilitiesServer",
6838 ];
6839
6840 let mut language_server_rxs = [
6841 language_registry.register_fake_lsp(
6842 "tsx",
6843 FakeLspAdapter {
6844 name: language_server_names[0],
6845 capabilities: lsp::ServerCapabilities {
6846 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6847 ..lsp::ServerCapabilities::default()
6848 },
6849 ..FakeLspAdapter::default()
6850 },
6851 ),
6852 language_registry.register_fake_lsp(
6853 "tsx",
6854 FakeLspAdapter {
6855 name: language_server_names[1],
6856 capabilities: lsp::ServerCapabilities {
6857 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6858 ..lsp::ServerCapabilities::default()
6859 },
6860 ..FakeLspAdapter::default()
6861 },
6862 ),
6863 language_registry.register_fake_lsp(
6864 "tsx",
6865 FakeLspAdapter {
6866 name: language_server_names[2],
6867 capabilities: lsp::ServerCapabilities {
6868 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6869 ..lsp::ServerCapabilities::default()
6870 },
6871 ..FakeLspAdapter::default()
6872 },
6873 ),
6874 language_registry.register_fake_lsp(
6875 "tsx",
6876 FakeLspAdapter {
6877 name: language_server_names[3],
6878 capabilities: lsp::ServerCapabilities {
6879 code_action_provider: None,
6880 ..lsp::ServerCapabilities::default()
6881 },
6882 ..FakeLspAdapter::default()
6883 },
6884 ),
6885 ];
6886
6887 let (buffer, _handle) = project
6888 .update(cx, |p, cx| {
6889 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6890 })
6891 .await
6892 .unwrap();
6893 cx.executor().run_until_parked();
6894
6895 let mut servers_with_actions_requests = HashMap::default();
6896 for i in 0..language_server_names.len() {
6897 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6898 panic!(
6899 "Failed to get language server #{i} with name {}",
6900 &language_server_names[i]
6901 )
6902 });
6903 let new_server_name = new_server.server.name();
6904
6905 assert!(
6906 !servers_with_actions_requests.contains_key(&new_server_name),
6907 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6908 );
6909 match new_server_name.0.as_ref() {
6910 "TailwindServer" | "TypeScriptServer" => {
6911 servers_with_actions_requests.insert(
6912 new_server_name.clone(),
6913 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6914 move |_, _| {
6915 let name = new_server_name.clone();
6916 async move {
6917 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6918 lsp::CodeAction {
6919 title: format!("{name} code action"),
6920 ..lsp::CodeAction::default()
6921 },
6922 )]))
6923 }
6924 },
6925 ),
6926 );
6927 }
6928 "ESLintServer" => {
6929 servers_with_actions_requests.insert(
6930 new_server_name,
6931 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6932 |_, _| async move { Ok(None) },
6933 ),
6934 );
6935 }
6936 "NoActionsCapabilitiesServer" => {
6937 let _never_handled = new_server
6938 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6939 panic!(
6940 "Should not call for code actions server with no corresponding capabilities"
6941 )
6942 });
6943 }
6944 unexpected => panic!("Unexpected server name: {unexpected}"),
6945 }
6946 }
6947
6948 let code_actions_task = project.update(cx, |project, cx| {
6949 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6950 });
6951
6952 // cx.run_until_parked();
6953 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6954 |mut code_actions_request| async move {
6955 code_actions_request
6956 .next()
6957 .await
6958 .expect("All code actions requests should have been triggered")
6959 },
6960 ))
6961 .await;
6962 assert_eq!(
6963 vec!["TailwindServer code action", "TypeScriptServer code action"],
6964 code_actions_task
6965 .await
6966 .unwrap()
6967 .unwrap()
6968 .into_iter()
6969 .map(|code_action| code_action.lsp_action.title().to_owned())
6970 .sorted()
6971 .collect::<Vec<_>>(),
6972 "Should receive code actions responses from all related servers with hover capabilities"
6973 );
6974}
6975
6976#[gpui::test]
6977async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6978 init_test(cx);
6979
6980 let fs = FakeFs::new(cx.executor());
6981 fs.insert_tree(
6982 "/dir",
6983 json!({
6984 "a.rs": "let a = 1;",
6985 "b.rs": "let b = 2;",
6986 "c.rs": "let c = 2;",
6987 }),
6988 )
6989 .await;
6990
6991 let project = Project::test(
6992 fs,
6993 [
6994 "/dir/a.rs".as_ref(),
6995 "/dir/b.rs".as_ref(),
6996 "/dir/c.rs".as_ref(),
6997 ],
6998 cx,
6999 )
7000 .await;
7001
7002 // check the initial state and get the worktrees
7003 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7004 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7005 assert_eq!(worktrees.len(), 3);
7006
7007 let worktree_a = worktrees[0].read(cx);
7008 let worktree_b = worktrees[1].read(cx);
7009 let worktree_c = worktrees[2].read(cx);
7010
7011 // check they start in the right order
7012 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7013 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7014 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7015
7016 (
7017 worktrees[0].clone(),
7018 worktrees[1].clone(),
7019 worktrees[2].clone(),
7020 )
7021 });
7022
7023 // move first worktree to after the second
7024 // [a, b, c] -> [b, a, c]
7025 project
7026 .update(cx, |project, cx| {
7027 let first = worktree_a.read(cx);
7028 let second = worktree_b.read(cx);
7029 project.move_worktree(first.id(), second.id(), cx)
7030 })
7031 .expect("moving first after second");
7032
7033 // check the state after moving
7034 project.update(cx, |project, cx| {
7035 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7036 assert_eq!(worktrees.len(), 3);
7037
7038 let first = worktrees[0].read(cx);
7039 let second = worktrees[1].read(cx);
7040 let third = worktrees[2].read(cx);
7041
7042 // check they are now in the right order
7043 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7044 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7045 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7046 });
7047
7048 // move the second worktree to before the first
7049 // [b, a, c] -> [a, b, c]
7050 project
7051 .update(cx, |project, cx| {
7052 let second = worktree_a.read(cx);
7053 let first = worktree_b.read(cx);
7054 project.move_worktree(first.id(), second.id(), cx)
7055 })
7056 .expect("moving second before first");
7057
7058 // check the state after moving
7059 project.update(cx, |project, cx| {
7060 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7061 assert_eq!(worktrees.len(), 3);
7062
7063 let first = worktrees[0].read(cx);
7064 let second = worktrees[1].read(cx);
7065 let third = worktrees[2].read(cx);
7066
7067 // check they are now in the right order
7068 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7069 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7070 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7071 });
7072
7073 // move the second worktree to after the third
7074 // [a, b, c] -> [a, c, b]
7075 project
7076 .update(cx, |project, cx| {
7077 let second = worktree_b.read(cx);
7078 let third = worktree_c.read(cx);
7079 project.move_worktree(second.id(), third.id(), cx)
7080 })
7081 .expect("moving second after third");
7082
7083 // check the state after moving
7084 project.update(cx, |project, cx| {
7085 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7086 assert_eq!(worktrees.len(), 3);
7087
7088 let first = worktrees[0].read(cx);
7089 let second = worktrees[1].read(cx);
7090 let third = worktrees[2].read(cx);
7091
7092 // check they are now in the right order
7093 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7094 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7095 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7096 });
7097
7098 // move the third worktree to before the second
7099 // [a, c, b] -> [a, b, c]
7100 project
7101 .update(cx, |project, cx| {
7102 let third = worktree_c.read(cx);
7103 let second = worktree_b.read(cx);
7104 project.move_worktree(third.id(), second.id(), cx)
7105 })
7106 .expect("moving third before second");
7107
7108 // check the state after moving
7109 project.update(cx, |project, cx| {
7110 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7111 assert_eq!(worktrees.len(), 3);
7112
7113 let first = worktrees[0].read(cx);
7114 let second = worktrees[1].read(cx);
7115 let third = worktrees[2].read(cx);
7116
7117 // check they are now in the right order
7118 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7119 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7120 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7121 });
7122
7123 // move the first worktree to after the third
7124 // [a, b, c] -> [b, c, a]
7125 project
7126 .update(cx, |project, cx| {
7127 let first = worktree_a.read(cx);
7128 let third = worktree_c.read(cx);
7129 project.move_worktree(first.id(), third.id(), cx)
7130 })
7131 .expect("moving first after third");
7132
7133 // check the state after moving
7134 project.update(cx, |project, cx| {
7135 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7136 assert_eq!(worktrees.len(), 3);
7137
7138 let first = worktrees[0].read(cx);
7139 let second = worktrees[1].read(cx);
7140 let third = worktrees[2].read(cx);
7141
7142 // check they are now in the right order
7143 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7144 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7145 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7146 });
7147
7148 // move the third worktree to before the first
7149 // [b, c, a] -> [a, b, c]
7150 project
7151 .update(cx, |project, cx| {
7152 let third = worktree_a.read(cx);
7153 let first = worktree_b.read(cx);
7154 project.move_worktree(third.id(), first.id(), cx)
7155 })
7156 .expect("moving third before first");
7157
7158 // check the state after moving
7159 project.update(cx, |project, cx| {
7160 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7161 assert_eq!(worktrees.len(), 3);
7162
7163 let first = worktrees[0].read(cx);
7164 let second = worktrees[1].read(cx);
7165 let third = worktrees[2].read(cx);
7166
7167 // check they are now in the right order
7168 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7169 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7170 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7171 });
7172}
7173
7174#[gpui::test]
7175async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7176 init_test(cx);
7177
7178 let staged_contents = r#"
7179 fn main() {
7180 println!("hello world");
7181 }
7182 "#
7183 .unindent();
7184 let file_contents = r#"
7185 // print goodbye
7186 fn main() {
7187 println!("goodbye world");
7188 }
7189 "#
7190 .unindent();
7191
7192 let fs = FakeFs::new(cx.background_executor.clone());
7193 fs.insert_tree(
7194 "/dir",
7195 json!({
7196 ".git": {},
7197 "src": {
7198 "main.rs": file_contents,
7199 }
7200 }),
7201 )
7202 .await;
7203
7204 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7205
7206 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7207
7208 let buffer = project
7209 .update(cx, |project, cx| {
7210 project.open_local_buffer("/dir/src/main.rs", cx)
7211 })
7212 .await
7213 .unwrap();
7214 let unstaged_diff = project
7215 .update(cx, |project, cx| {
7216 project.open_unstaged_diff(buffer.clone(), cx)
7217 })
7218 .await
7219 .unwrap();
7220
7221 cx.run_until_parked();
7222 unstaged_diff.update(cx, |unstaged_diff, cx| {
7223 let snapshot = buffer.read(cx).snapshot();
7224 assert_hunks(
7225 unstaged_diff.hunks(&snapshot, cx),
7226 &snapshot,
7227 &unstaged_diff.base_text_string().unwrap(),
7228 &[
7229 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7230 (
7231 2..3,
7232 " println!(\"hello world\");\n",
7233 " println!(\"goodbye world\");\n",
7234 DiffHunkStatus::modified_none(),
7235 ),
7236 ],
7237 );
7238 });
7239
7240 let staged_contents = r#"
7241 // print goodbye
7242 fn main() {
7243 }
7244 "#
7245 .unindent();
7246
7247 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7248
7249 cx.run_until_parked();
7250 unstaged_diff.update(cx, |unstaged_diff, cx| {
7251 let snapshot = buffer.read(cx).snapshot();
7252 assert_hunks(
7253 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7254 &snapshot,
7255 &unstaged_diff.base_text().text(),
7256 &[(
7257 2..3,
7258 "",
7259 " println!(\"goodbye world\");\n",
7260 DiffHunkStatus::added_none(),
7261 )],
7262 );
7263 });
7264}
7265
7266#[gpui::test]
7267async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7268 init_test(cx);
7269
7270 let committed_contents = r#"
7271 fn main() {
7272 println!("hello world");
7273 }
7274 "#
7275 .unindent();
7276 let staged_contents = r#"
7277 fn main() {
7278 println!("goodbye world");
7279 }
7280 "#
7281 .unindent();
7282 let file_contents = r#"
7283 // print goodbye
7284 fn main() {
7285 println!("goodbye world");
7286 }
7287 "#
7288 .unindent();
7289
7290 let fs = FakeFs::new(cx.background_executor.clone());
7291 fs.insert_tree(
7292 "/dir",
7293 json!({
7294 ".git": {},
7295 "src": {
7296 "modification.rs": file_contents,
7297 }
7298 }),
7299 )
7300 .await;
7301
7302 fs.set_head_for_repo(
7303 Path::new("/dir/.git"),
7304 &[
7305 ("src/modification.rs", committed_contents),
7306 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7307 ],
7308 "deadbeef",
7309 );
7310 fs.set_index_for_repo(
7311 Path::new("/dir/.git"),
7312 &[
7313 ("src/modification.rs", staged_contents),
7314 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7315 ],
7316 );
7317
7318 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7319 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7320 let language = rust_lang();
7321 language_registry.add(language.clone());
7322
7323 let buffer_1 = project
7324 .update(cx, |project, cx| {
7325 project.open_local_buffer("/dir/src/modification.rs", cx)
7326 })
7327 .await
7328 .unwrap();
7329 let diff_1 = project
7330 .update(cx, |project, cx| {
7331 project.open_uncommitted_diff(buffer_1.clone(), cx)
7332 })
7333 .await
7334 .unwrap();
7335 diff_1.read_with(cx, |diff, _| {
7336 assert_eq!(diff.base_text().language().cloned(), Some(language))
7337 });
7338 cx.run_until_parked();
7339 diff_1.update(cx, |diff, cx| {
7340 let snapshot = buffer_1.read(cx).snapshot();
7341 assert_hunks(
7342 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7343 &snapshot,
7344 &diff.base_text_string().unwrap(),
7345 &[
7346 (
7347 0..1,
7348 "",
7349 "// print goodbye\n",
7350 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7351 ),
7352 (
7353 2..3,
7354 " println!(\"hello world\");\n",
7355 " println!(\"goodbye world\");\n",
7356 DiffHunkStatus::modified_none(),
7357 ),
7358 ],
7359 );
7360 });
7361
7362 // Reset HEAD to a version that differs from both the buffer and the index.
7363 let committed_contents = r#"
7364 // print goodbye
7365 fn main() {
7366 }
7367 "#
7368 .unindent();
7369 fs.set_head_for_repo(
7370 Path::new("/dir/.git"),
7371 &[
7372 ("src/modification.rs", committed_contents.clone()),
7373 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7374 ],
7375 "deadbeef",
7376 );
7377
7378 // Buffer now has an unstaged hunk.
7379 cx.run_until_parked();
7380 diff_1.update(cx, |diff, cx| {
7381 let snapshot = buffer_1.read(cx).snapshot();
7382 assert_hunks(
7383 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7384 &snapshot,
7385 &diff.base_text().text(),
7386 &[(
7387 2..3,
7388 "",
7389 " println!(\"goodbye world\");\n",
7390 DiffHunkStatus::added_none(),
7391 )],
7392 );
7393 });
7394
7395 // Open a buffer for a file that's been deleted.
7396 let buffer_2 = project
7397 .update(cx, |project, cx| {
7398 project.open_local_buffer("/dir/src/deletion.rs", cx)
7399 })
7400 .await
7401 .unwrap();
7402 let diff_2 = project
7403 .update(cx, |project, cx| {
7404 project.open_uncommitted_diff(buffer_2.clone(), cx)
7405 })
7406 .await
7407 .unwrap();
7408 cx.run_until_parked();
7409 diff_2.update(cx, |diff, cx| {
7410 let snapshot = buffer_2.read(cx).snapshot();
7411 assert_hunks(
7412 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7413 &snapshot,
7414 &diff.base_text_string().unwrap(),
7415 &[(
7416 0..0,
7417 "// the-deleted-contents\n",
7418 "",
7419 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7420 )],
7421 );
7422 });
7423
7424 // Stage the deletion of this file
7425 fs.set_index_for_repo(
7426 Path::new("/dir/.git"),
7427 &[("src/modification.rs", committed_contents.clone())],
7428 );
7429 cx.run_until_parked();
7430 diff_2.update(cx, |diff, cx| {
7431 let snapshot = buffer_2.read(cx).snapshot();
7432 assert_hunks(
7433 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7434 &snapshot,
7435 &diff.base_text_string().unwrap(),
7436 &[(
7437 0..0,
7438 "// the-deleted-contents\n",
7439 "",
7440 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7441 )],
7442 );
7443 });
7444}
7445
7446#[gpui::test]
7447async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7448 use DiffHunkSecondaryStatus::*;
7449 init_test(cx);
7450
7451 let committed_contents = r#"
7452 zero
7453 one
7454 two
7455 three
7456 four
7457 five
7458 "#
7459 .unindent();
7460 let file_contents = r#"
7461 one
7462 TWO
7463 three
7464 FOUR
7465 five
7466 "#
7467 .unindent();
7468
7469 let fs = FakeFs::new(cx.background_executor.clone());
7470 fs.insert_tree(
7471 "/dir",
7472 json!({
7473 ".git": {},
7474 "file.txt": file_contents.clone()
7475 }),
7476 )
7477 .await;
7478
7479 fs.set_head_and_index_for_repo(
7480 path!("/dir/.git").as_ref(),
7481 &[("file.txt", committed_contents.clone())],
7482 );
7483
7484 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7485
7486 let buffer = project
7487 .update(cx, |project, cx| {
7488 project.open_local_buffer("/dir/file.txt", cx)
7489 })
7490 .await
7491 .unwrap();
7492 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7493 let uncommitted_diff = project
7494 .update(cx, |project, cx| {
7495 project.open_uncommitted_diff(buffer.clone(), cx)
7496 })
7497 .await
7498 .unwrap();
7499 let mut diff_events = cx.events(&uncommitted_diff);
7500
7501 // The hunks are initially unstaged.
7502 uncommitted_diff.read_with(cx, |diff, cx| {
7503 assert_hunks(
7504 diff.hunks(&snapshot, cx),
7505 &snapshot,
7506 &diff.base_text_string().unwrap(),
7507 &[
7508 (
7509 0..0,
7510 "zero\n",
7511 "",
7512 DiffHunkStatus::deleted(HasSecondaryHunk),
7513 ),
7514 (
7515 1..2,
7516 "two\n",
7517 "TWO\n",
7518 DiffHunkStatus::modified(HasSecondaryHunk),
7519 ),
7520 (
7521 3..4,
7522 "four\n",
7523 "FOUR\n",
7524 DiffHunkStatus::modified(HasSecondaryHunk),
7525 ),
7526 ],
7527 );
7528 });
7529
7530 // Stage a hunk. It appears as optimistically staged.
7531 uncommitted_diff.update(cx, |diff, cx| {
7532 let range =
7533 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7534 let hunks = diff
7535 .hunks_intersecting_range(range, &snapshot, cx)
7536 .collect::<Vec<_>>();
7537 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7538
7539 assert_hunks(
7540 diff.hunks(&snapshot, cx),
7541 &snapshot,
7542 &diff.base_text_string().unwrap(),
7543 &[
7544 (
7545 0..0,
7546 "zero\n",
7547 "",
7548 DiffHunkStatus::deleted(HasSecondaryHunk),
7549 ),
7550 (
7551 1..2,
7552 "two\n",
7553 "TWO\n",
7554 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7555 ),
7556 (
7557 3..4,
7558 "four\n",
7559 "FOUR\n",
7560 DiffHunkStatus::modified(HasSecondaryHunk),
7561 ),
7562 ],
7563 );
7564 });
7565
7566 // The diff emits a change event for the range of the staged hunk.
7567 assert!(matches!(
7568 diff_events.next().await.unwrap(),
7569 BufferDiffEvent::HunksStagedOrUnstaged(_)
7570 ));
7571 let event = diff_events.next().await.unwrap();
7572 if let BufferDiffEvent::DiffChanged {
7573 changed_range: Some(changed_range),
7574 } = event
7575 {
7576 let changed_range = changed_range.to_point(&snapshot);
7577 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7578 } else {
7579 panic!("Unexpected event {event:?}");
7580 }
7581
7582 // When the write to the index completes, it appears as staged.
7583 cx.run_until_parked();
7584 uncommitted_diff.update(cx, |diff, cx| {
7585 assert_hunks(
7586 diff.hunks(&snapshot, cx),
7587 &snapshot,
7588 &diff.base_text_string().unwrap(),
7589 &[
7590 (
7591 0..0,
7592 "zero\n",
7593 "",
7594 DiffHunkStatus::deleted(HasSecondaryHunk),
7595 ),
7596 (
7597 1..2,
7598 "two\n",
7599 "TWO\n",
7600 DiffHunkStatus::modified(NoSecondaryHunk),
7601 ),
7602 (
7603 3..4,
7604 "four\n",
7605 "FOUR\n",
7606 DiffHunkStatus::modified(HasSecondaryHunk),
7607 ),
7608 ],
7609 );
7610 });
7611
7612 // The diff emits a change event for the changed index text.
7613 let event = diff_events.next().await.unwrap();
7614 if let BufferDiffEvent::DiffChanged {
7615 changed_range: Some(changed_range),
7616 } = event
7617 {
7618 let changed_range = changed_range.to_point(&snapshot);
7619 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7620 } else {
7621 panic!("Unexpected event {event:?}");
7622 }
7623
7624 // Simulate a problem writing to the git index.
7625 fs.set_error_message_for_index_write(
7626 "/dir/.git".as_ref(),
7627 Some("failed to write git index".into()),
7628 );
7629
7630 // Stage another hunk.
7631 uncommitted_diff.update(cx, |diff, cx| {
7632 let range =
7633 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7634 let hunks = diff
7635 .hunks_intersecting_range(range, &snapshot, cx)
7636 .collect::<Vec<_>>();
7637 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7638
7639 assert_hunks(
7640 diff.hunks(&snapshot, cx),
7641 &snapshot,
7642 &diff.base_text_string().unwrap(),
7643 &[
7644 (
7645 0..0,
7646 "zero\n",
7647 "",
7648 DiffHunkStatus::deleted(HasSecondaryHunk),
7649 ),
7650 (
7651 1..2,
7652 "two\n",
7653 "TWO\n",
7654 DiffHunkStatus::modified(NoSecondaryHunk),
7655 ),
7656 (
7657 3..4,
7658 "four\n",
7659 "FOUR\n",
7660 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7661 ),
7662 ],
7663 );
7664 });
7665 assert!(matches!(
7666 diff_events.next().await.unwrap(),
7667 BufferDiffEvent::HunksStagedOrUnstaged(_)
7668 ));
7669 let event = diff_events.next().await.unwrap();
7670 if let BufferDiffEvent::DiffChanged {
7671 changed_range: Some(changed_range),
7672 } = event
7673 {
7674 let changed_range = changed_range.to_point(&snapshot);
7675 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7676 } else {
7677 panic!("Unexpected event {event:?}");
7678 }
7679
7680 // When the write fails, the hunk returns to being unstaged.
7681 cx.run_until_parked();
7682 uncommitted_diff.update(cx, |diff, cx| {
7683 assert_hunks(
7684 diff.hunks(&snapshot, cx),
7685 &snapshot,
7686 &diff.base_text_string().unwrap(),
7687 &[
7688 (
7689 0..0,
7690 "zero\n",
7691 "",
7692 DiffHunkStatus::deleted(HasSecondaryHunk),
7693 ),
7694 (
7695 1..2,
7696 "two\n",
7697 "TWO\n",
7698 DiffHunkStatus::modified(NoSecondaryHunk),
7699 ),
7700 (
7701 3..4,
7702 "four\n",
7703 "FOUR\n",
7704 DiffHunkStatus::modified(HasSecondaryHunk),
7705 ),
7706 ],
7707 );
7708 });
7709
7710 let event = diff_events.next().await.unwrap();
7711 if let BufferDiffEvent::DiffChanged {
7712 changed_range: Some(changed_range),
7713 } = event
7714 {
7715 let changed_range = changed_range.to_point(&snapshot);
7716 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7717 } else {
7718 panic!("Unexpected event {event:?}");
7719 }
7720
7721 // Allow writing to the git index to succeed again.
7722 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7723
7724 // Stage two hunks with separate operations.
7725 uncommitted_diff.update(cx, |diff, cx| {
7726 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7727 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7728 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7729 });
7730
7731 // Both staged hunks appear as pending.
7732 uncommitted_diff.update(cx, |diff, cx| {
7733 assert_hunks(
7734 diff.hunks(&snapshot, cx),
7735 &snapshot,
7736 &diff.base_text_string().unwrap(),
7737 &[
7738 (
7739 0..0,
7740 "zero\n",
7741 "",
7742 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7743 ),
7744 (
7745 1..2,
7746 "two\n",
7747 "TWO\n",
7748 DiffHunkStatus::modified(NoSecondaryHunk),
7749 ),
7750 (
7751 3..4,
7752 "four\n",
7753 "FOUR\n",
7754 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7755 ),
7756 ],
7757 );
7758 });
7759
7760 // Both staging operations take effect.
7761 cx.run_until_parked();
7762 uncommitted_diff.update(cx, |diff, cx| {
7763 assert_hunks(
7764 diff.hunks(&snapshot, cx),
7765 &snapshot,
7766 &diff.base_text_string().unwrap(),
7767 &[
7768 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7769 (
7770 1..2,
7771 "two\n",
7772 "TWO\n",
7773 DiffHunkStatus::modified(NoSecondaryHunk),
7774 ),
7775 (
7776 3..4,
7777 "four\n",
7778 "FOUR\n",
7779 DiffHunkStatus::modified(NoSecondaryHunk),
7780 ),
7781 ],
7782 );
7783 });
7784}
7785
7786#[gpui::test(seeds(340, 472))]
7787async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7788 use DiffHunkSecondaryStatus::*;
7789 init_test(cx);
7790
7791 let committed_contents = r#"
7792 zero
7793 one
7794 two
7795 three
7796 four
7797 five
7798 "#
7799 .unindent();
7800 let file_contents = r#"
7801 one
7802 TWO
7803 three
7804 FOUR
7805 five
7806 "#
7807 .unindent();
7808
7809 let fs = FakeFs::new(cx.background_executor.clone());
7810 fs.insert_tree(
7811 "/dir",
7812 json!({
7813 ".git": {},
7814 "file.txt": file_contents.clone()
7815 }),
7816 )
7817 .await;
7818
7819 fs.set_head_for_repo(
7820 "/dir/.git".as_ref(),
7821 &[("file.txt", committed_contents.clone())],
7822 "deadbeef",
7823 );
7824 fs.set_index_for_repo(
7825 "/dir/.git".as_ref(),
7826 &[("file.txt", committed_contents.clone())],
7827 );
7828
7829 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7830
7831 let buffer = project
7832 .update(cx, |project, cx| {
7833 project.open_local_buffer("/dir/file.txt", cx)
7834 })
7835 .await
7836 .unwrap();
7837 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7838 let uncommitted_diff = project
7839 .update(cx, |project, cx| {
7840 project.open_uncommitted_diff(buffer.clone(), cx)
7841 })
7842 .await
7843 .unwrap();
7844
7845 // The hunks are initially unstaged.
7846 uncommitted_diff.read_with(cx, |diff, cx| {
7847 assert_hunks(
7848 diff.hunks(&snapshot, cx),
7849 &snapshot,
7850 &diff.base_text_string().unwrap(),
7851 &[
7852 (
7853 0..0,
7854 "zero\n",
7855 "",
7856 DiffHunkStatus::deleted(HasSecondaryHunk),
7857 ),
7858 (
7859 1..2,
7860 "two\n",
7861 "TWO\n",
7862 DiffHunkStatus::modified(HasSecondaryHunk),
7863 ),
7864 (
7865 3..4,
7866 "four\n",
7867 "FOUR\n",
7868 DiffHunkStatus::modified(HasSecondaryHunk),
7869 ),
7870 ],
7871 );
7872 });
7873
7874 // Pause IO events
7875 fs.pause_events();
7876
7877 // Stage the first hunk.
7878 uncommitted_diff.update(cx, |diff, cx| {
7879 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7880 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7881 assert_hunks(
7882 diff.hunks(&snapshot, cx),
7883 &snapshot,
7884 &diff.base_text_string().unwrap(),
7885 &[
7886 (
7887 0..0,
7888 "zero\n",
7889 "",
7890 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7891 ),
7892 (
7893 1..2,
7894 "two\n",
7895 "TWO\n",
7896 DiffHunkStatus::modified(HasSecondaryHunk),
7897 ),
7898 (
7899 3..4,
7900 "four\n",
7901 "FOUR\n",
7902 DiffHunkStatus::modified(HasSecondaryHunk),
7903 ),
7904 ],
7905 );
7906 });
7907
7908 // Stage the second hunk *before* receiving the FS event for the first hunk.
7909 cx.run_until_parked();
7910 uncommitted_diff.update(cx, |diff, cx| {
7911 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7912 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7913 assert_hunks(
7914 diff.hunks(&snapshot, cx),
7915 &snapshot,
7916 &diff.base_text_string().unwrap(),
7917 &[
7918 (
7919 0..0,
7920 "zero\n",
7921 "",
7922 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7923 ),
7924 (
7925 1..2,
7926 "two\n",
7927 "TWO\n",
7928 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7929 ),
7930 (
7931 3..4,
7932 "four\n",
7933 "FOUR\n",
7934 DiffHunkStatus::modified(HasSecondaryHunk),
7935 ),
7936 ],
7937 );
7938 });
7939
7940 // Process the FS event for staging the first hunk (second event is still pending).
7941 fs.flush_events(1);
7942 cx.run_until_parked();
7943
7944 // Stage the third hunk before receiving the second FS event.
7945 uncommitted_diff.update(cx, |diff, cx| {
7946 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7947 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7948 });
7949
7950 // Wait for all remaining IO.
7951 cx.run_until_parked();
7952 fs.flush_events(fs.buffered_event_count());
7953
7954 // Now all hunks are staged.
7955 cx.run_until_parked();
7956 uncommitted_diff.update(cx, |diff, cx| {
7957 assert_hunks(
7958 diff.hunks(&snapshot, cx),
7959 &snapshot,
7960 &diff.base_text_string().unwrap(),
7961 &[
7962 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7963 (
7964 1..2,
7965 "two\n",
7966 "TWO\n",
7967 DiffHunkStatus::modified(NoSecondaryHunk),
7968 ),
7969 (
7970 3..4,
7971 "four\n",
7972 "FOUR\n",
7973 DiffHunkStatus::modified(NoSecondaryHunk),
7974 ),
7975 ],
7976 );
7977 });
7978}
7979
7980#[gpui::test(iterations = 25)]
7981async fn test_staging_random_hunks(
7982 mut rng: StdRng,
7983 executor: BackgroundExecutor,
7984 cx: &mut gpui::TestAppContext,
7985) {
7986 let operations = env::var("OPERATIONS")
7987 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7988 .unwrap_or(20);
7989
7990 // Try to induce races between diff recalculation and index writes.
7991 if rng.random_bool(0.5) {
7992 executor.deprioritize(*CALCULATE_DIFF_TASK);
7993 }
7994
7995 use DiffHunkSecondaryStatus::*;
7996 init_test(cx);
7997
7998 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7999 let index_text = committed_text.clone();
8000 let buffer_text = (0..30)
8001 .map(|i| match i % 5 {
8002 0 => format!("line {i} (modified)\n"),
8003 _ => format!("line {i}\n"),
8004 })
8005 .collect::<String>();
8006
8007 let fs = FakeFs::new(cx.background_executor.clone());
8008 fs.insert_tree(
8009 path!("/dir"),
8010 json!({
8011 ".git": {},
8012 "file.txt": buffer_text.clone()
8013 }),
8014 )
8015 .await;
8016 fs.set_head_for_repo(
8017 path!("/dir/.git").as_ref(),
8018 &[("file.txt", committed_text.clone())],
8019 "deadbeef",
8020 );
8021 fs.set_index_for_repo(
8022 path!("/dir/.git").as_ref(),
8023 &[("file.txt", index_text.clone())],
8024 );
8025 let repo = fs
8026 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8027 .unwrap();
8028
8029 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8030 let buffer = project
8031 .update(cx, |project, cx| {
8032 project.open_local_buffer(path!("/dir/file.txt"), cx)
8033 })
8034 .await
8035 .unwrap();
8036 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8037 let uncommitted_diff = project
8038 .update(cx, |project, cx| {
8039 project.open_uncommitted_diff(buffer.clone(), cx)
8040 })
8041 .await
8042 .unwrap();
8043
8044 let mut hunks =
8045 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
8046 assert_eq!(hunks.len(), 6);
8047
8048 for _i in 0..operations {
8049 let hunk_ix = rng.random_range(0..hunks.len());
8050 let hunk = &mut hunks[hunk_ix];
8051 let row = hunk.range.start.row;
8052
8053 if hunk.status().has_secondary_hunk() {
8054 log::info!("staging hunk at {row}");
8055 uncommitted_diff.update(cx, |diff, cx| {
8056 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8057 });
8058 hunk.secondary_status = SecondaryHunkRemovalPending;
8059 } else {
8060 log::info!("unstaging hunk at {row}");
8061 uncommitted_diff.update(cx, |diff, cx| {
8062 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8063 });
8064 hunk.secondary_status = SecondaryHunkAdditionPending;
8065 }
8066
8067 for _ in 0..rng.random_range(0..10) {
8068 log::info!("yielding");
8069 cx.executor().simulate_random_delay().await;
8070 }
8071 }
8072
8073 cx.executor().run_until_parked();
8074
8075 for hunk in &mut hunks {
8076 if hunk.secondary_status == SecondaryHunkRemovalPending {
8077 hunk.secondary_status = NoSecondaryHunk;
8078 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8079 hunk.secondary_status = HasSecondaryHunk;
8080 }
8081 }
8082
8083 log::info!(
8084 "index text:\n{}",
8085 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8086 .await
8087 .unwrap()
8088 );
8089
8090 uncommitted_diff.update(cx, |diff, cx| {
8091 let expected_hunks = hunks
8092 .iter()
8093 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8094 .collect::<Vec<_>>();
8095 let actual_hunks = diff
8096 .hunks(&snapshot, cx)
8097 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8098 .collect::<Vec<_>>();
8099 assert_eq!(actual_hunks, expected_hunks);
8100 });
8101}
8102
8103#[gpui::test]
8104async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8105 init_test(cx);
8106
8107 let committed_contents = r#"
8108 fn main() {
8109 println!("hello from HEAD");
8110 }
8111 "#
8112 .unindent();
8113 let file_contents = r#"
8114 fn main() {
8115 println!("hello from the working copy");
8116 }
8117 "#
8118 .unindent();
8119
8120 let fs = FakeFs::new(cx.background_executor.clone());
8121 fs.insert_tree(
8122 "/dir",
8123 json!({
8124 ".git": {},
8125 "src": {
8126 "main.rs": file_contents,
8127 }
8128 }),
8129 )
8130 .await;
8131
8132 fs.set_head_for_repo(
8133 Path::new("/dir/.git"),
8134 &[("src/main.rs", committed_contents.clone())],
8135 "deadbeef",
8136 );
8137 fs.set_index_for_repo(
8138 Path::new("/dir/.git"),
8139 &[("src/main.rs", committed_contents.clone())],
8140 );
8141
8142 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8143
8144 let buffer = project
8145 .update(cx, |project, cx| {
8146 project.open_local_buffer("/dir/src/main.rs", cx)
8147 })
8148 .await
8149 .unwrap();
8150 let uncommitted_diff = project
8151 .update(cx, |project, cx| {
8152 project.open_uncommitted_diff(buffer.clone(), cx)
8153 })
8154 .await
8155 .unwrap();
8156
8157 cx.run_until_parked();
8158 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8159 let snapshot = buffer.read(cx).snapshot();
8160 assert_hunks(
8161 uncommitted_diff.hunks(&snapshot, cx),
8162 &snapshot,
8163 &uncommitted_diff.base_text_string().unwrap(),
8164 &[(
8165 1..2,
8166 " println!(\"hello from HEAD\");\n",
8167 " println!(\"hello from the working copy\");\n",
8168 DiffHunkStatus {
8169 kind: DiffHunkStatusKind::Modified,
8170 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8171 },
8172 )],
8173 );
8174 });
8175}
8176
8177#[gpui::test]
8178async fn test_repository_and_path_for_project_path(
8179 background_executor: BackgroundExecutor,
8180 cx: &mut gpui::TestAppContext,
8181) {
8182 init_test(cx);
8183 let fs = FakeFs::new(background_executor);
8184 fs.insert_tree(
8185 path!("/root"),
8186 json!({
8187 "c.txt": "",
8188 "dir1": {
8189 ".git": {},
8190 "deps": {
8191 "dep1": {
8192 ".git": {},
8193 "src": {
8194 "a.txt": ""
8195 }
8196 }
8197 },
8198 "src": {
8199 "b.txt": ""
8200 }
8201 },
8202 }),
8203 )
8204 .await;
8205
8206 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8207 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8208 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8209 project
8210 .update(cx, |project, cx| project.git_scans_complete(cx))
8211 .await;
8212 cx.run_until_parked();
8213
8214 project.read_with(cx, |project, cx| {
8215 let git_store = project.git_store().read(cx);
8216 let pairs = [
8217 ("c.txt", None),
8218 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8219 (
8220 "dir1/deps/dep1/src/a.txt",
8221 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8222 ),
8223 ];
8224 let expected = pairs
8225 .iter()
8226 .map(|(path, result)| {
8227 (
8228 path,
8229 result.map(|(repo, repo_path)| {
8230 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8231 }),
8232 )
8233 })
8234 .collect::<Vec<_>>();
8235 let actual = pairs
8236 .iter()
8237 .map(|(path, _)| {
8238 let project_path = (tree_id, rel_path(path)).into();
8239 let result = maybe!({
8240 let (repo, repo_path) =
8241 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8242 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8243 });
8244 (path, result)
8245 })
8246 .collect::<Vec<_>>();
8247 pretty_assertions::assert_eq!(expected, actual);
8248 });
8249
8250 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8251 .await
8252 .unwrap();
8253 cx.run_until_parked();
8254
8255 project.read_with(cx, |project, cx| {
8256 let git_store = project.git_store().read(cx);
8257 assert_eq!(
8258 git_store.repository_and_path_for_project_path(
8259 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8260 cx
8261 ),
8262 None
8263 );
8264 });
8265}
8266
8267#[gpui::test]
8268async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8269 init_test(cx);
8270 let fs = FakeFs::new(cx.background_executor.clone());
8271 let home = paths::home_dir();
8272 fs.insert_tree(
8273 home,
8274 json!({
8275 ".git": {},
8276 "project": {
8277 "a.txt": "A"
8278 },
8279 }),
8280 )
8281 .await;
8282
8283 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8284 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8285 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8286
8287 project
8288 .update(cx, |project, cx| project.git_scans_complete(cx))
8289 .await;
8290 tree.flush_fs_events(cx).await;
8291
8292 project.read_with(cx, |project, cx| {
8293 let containing = project
8294 .git_store()
8295 .read(cx)
8296 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8297 assert!(containing.is_none());
8298 });
8299
8300 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8301 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8302 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8303 project
8304 .update(cx, |project, cx| project.git_scans_complete(cx))
8305 .await;
8306 tree.flush_fs_events(cx).await;
8307
8308 project.read_with(cx, |project, cx| {
8309 let containing = project
8310 .git_store()
8311 .read(cx)
8312 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8313 assert_eq!(
8314 containing
8315 .unwrap()
8316 .0
8317 .read(cx)
8318 .work_directory_abs_path
8319 .as_ref(),
8320 home,
8321 );
8322 });
8323}
8324
8325#[gpui::test]
8326async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8327 init_test(cx);
8328 cx.executor().allow_parking();
8329
8330 let root = TempTree::new(json!({
8331 "project": {
8332 "a.txt": "a", // Modified
8333 "b.txt": "bb", // Added
8334 "c.txt": "ccc", // Unchanged
8335 "d.txt": "dddd", // Deleted
8336 },
8337 }));
8338
8339 // Set up git repository before creating the project.
8340 let work_dir = root.path().join("project");
8341 let repo = git_init(work_dir.as_path());
8342 git_add("a.txt", &repo);
8343 git_add("c.txt", &repo);
8344 git_add("d.txt", &repo);
8345 git_commit("Initial commit", &repo);
8346 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8347 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8348
8349 let project = Project::test(
8350 Arc::new(RealFs::new(None, cx.executor())),
8351 [root.path()],
8352 cx,
8353 )
8354 .await;
8355
8356 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8357 tree.flush_fs_events(cx).await;
8358 project
8359 .update(cx, |project, cx| project.git_scans_complete(cx))
8360 .await;
8361 cx.executor().run_until_parked();
8362
8363 let repository = project.read_with(cx, |project, cx| {
8364 project.repositories(cx).values().next().unwrap().clone()
8365 });
8366
8367 // Check that the right git state is observed on startup
8368 repository.read_with(cx, |repository, _| {
8369 let entries = repository.cached_status().collect::<Vec<_>>();
8370 assert_eq!(
8371 entries,
8372 [
8373 StatusEntry {
8374 repo_path: repo_path("a.txt"),
8375 status: StatusCode::Modified.worktree(),
8376 },
8377 StatusEntry {
8378 repo_path: repo_path("b.txt"),
8379 status: FileStatus::Untracked,
8380 },
8381 StatusEntry {
8382 repo_path: repo_path("d.txt"),
8383 status: StatusCode::Deleted.worktree(),
8384 },
8385 ]
8386 );
8387 });
8388
8389 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8390
8391 tree.flush_fs_events(cx).await;
8392 project
8393 .update(cx, |project, cx| project.git_scans_complete(cx))
8394 .await;
8395 cx.executor().run_until_parked();
8396
8397 repository.read_with(cx, |repository, _| {
8398 let entries = repository.cached_status().collect::<Vec<_>>();
8399 assert_eq!(
8400 entries,
8401 [
8402 StatusEntry {
8403 repo_path: repo_path("a.txt"),
8404 status: StatusCode::Modified.worktree(),
8405 },
8406 StatusEntry {
8407 repo_path: repo_path("b.txt"),
8408 status: FileStatus::Untracked,
8409 },
8410 StatusEntry {
8411 repo_path: repo_path("c.txt"),
8412 status: StatusCode::Modified.worktree(),
8413 },
8414 StatusEntry {
8415 repo_path: repo_path("d.txt"),
8416 status: StatusCode::Deleted.worktree(),
8417 },
8418 ]
8419 );
8420 });
8421
8422 git_add("a.txt", &repo);
8423 git_add("c.txt", &repo);
8424 git_remove_index(Path::new("d.txt"), &repo);
8425 git_commit("Another commit", &repo);
8426 tree.flush_fs_events(cx).await;
8427 project
8428 .update(cx, |project, cx| project.git_scans_complete(cx))
8429 .await;
8430 cx.executor().run_until_parked();
8431
8432 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8433 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8434 tree.flush_fs_events(cx).await;
8435 project
8436 .update(cx, |project, cx| project.git_scans_complete(cx))
8437 .await;
8438 cx.executor().run_until_parked();
8439
8440 repository.read_with(cx, |repository, _cx| {
8441 let entries = repository.cached_status().collect::<Vec<_>>();
8442
8443 // Deleting an untracked entry, b.txt, should leave no status
8444 // a.txt was tracked, and so should have a status
8445 assert_eq!(
8446 entries,
8447 [StatusEntry {
8448 repo_path: repo_path("a.txt"),
8449 status: StatusCode::Deleted.worktree(),
8450 }]
8451 );
8452 });
8453}
8454
8455#[gpui::test]
8456async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8457 init_test(cx);
8458 cx.executor().allow_parking();
8459
8460 let root = TempTree::new(json!({
8461 "project": {
8462 "sub": {},
8463 "a.txt": "",
8464 },
8465 }));
8466
8467 let work_dir = root.path().join("project");
8468 let repo = git_init(work_dir.as_path());
8469 // a.txt exists in HEAD and the working copy but is deleted in the index.
8470 git_add("a.txt", &repo);
8471 git_commit("Initial commit", &repo);
8472 git_remove_index("a.txt".as_ref(), &repo);
8473 // `sub` is a nested git repository.
8474 let _sub = git_init(&work_dir.join("sub"));
8475
8476 let project = Project::test(
8477 Arc::new(RealFs::new(None, cx.executor())),
8478 [root.path()],
8479 cx,
8480 )
8481 .await;
8482
8483 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8484 tree.flush_fs_events(cx).await;
8485 project
8486 .update(cx, |project, cx| project.git_scans_complete(cx))
8487 .await;
8488 cx.executor().run_until_parked();
8489
8490 let repository = project.read_with(cx, |project, cx| {
8491 project
8492 .repositories(cx)
8493 .values()
8494 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8495 .unwrap()
8496 .clone()
8497 });
8498
8499 repository.read_with(cx, |repository, _cx| {
8500 let entries = repository.cached_status().collect::<Vec<_>>();
8501
8502 // `sub` doesn't appear in our computed statuses.
8503 // a.txt appears with a combined `DA` status.
8504 assert_eq!(
8505 entries,
8506 [StatusEntry {
8507 repo_path: repo_path("a.txt"),
8508 status: TrackedStatus {
8509 index_status: StatusCode::Deleted,
8510 worktree_status: StatusCode::Added
8511 }
8512 .into(),
8513 }]
8514 )
8515 });
8516}
8517
8518#[track_caller]
8519/// We merge lhs into rhs.
8520fn merge_pending_ops_snapshots(
8521 source: Vec<pending_op::PendingOps>,
8522 mut target: Vec<pending_op::PendingOps>,
8523) -> Vec<pending_op::PendingOps> {
8524 for s_ops in source {
8525 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8526 if ops.repo_path == s_ops.repo_path {
8527 Some(idx)
8528 } else {
8529 None
8530 }
8531 }) {
8532 let t_ops = &mut target[idx];
8533 for s_op in s_ops.ops {
8534 if let Some(op_idx) = t_ops
8535 .ops
8536 .iter()
8537 .zip(0..)
8538 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8539 {
8540 let t_op = &mut t_ops.ops[op_idx];
8541 match (s_op.job_status, t_op.job_status) {
8542 (pending_op::JobStatus::Running, _) => {}
8543 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8544 (s_st, t_st) if s_st == t_st => {}
8545 _ => unreachable!(),
8546 }
8547 } else {
8548 t_ops.ops.push(s_op);
8549 }
8550 }
8551 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8552 } else {
8553 target.push(s_ops);
8554 }
8555 }
8556 target
8557}
8558
8559#[gpui::test]
8560async fn test_repository_pending_ops_staging(
8561 executor: gpui::BackgroundExecutor,
8562 cx: &mut gpui::TestAppContext,
8563) {
8564 init_test(cx);
8565
8566 let fs = FakeFs::new(executor);
8567 fs.insert_tree(
8568 path!("/root"),
8569 json!({
8570 "my-repo": {
8571 ".git": {},
8572 "a.txt": "a",
8573 }
8574
8575 }),
8576 )
8577 .await;
8578
8579 fs.set_status_for_repo(
8580 path!("/root/my-repo/.git").as_ref(),
8581 &[("a.txt", FileStatus::Untracked)],
8582 );
8583
8584 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8585 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8586 project.update(cx, |project, cx| {
8587 let pending_ops_all = pending_ops_all.clone();
8588 cx.subscribe(project.git_store(), move |_, _, e, _| {
8589 if let GitStoreEvent::RepositoryUpdated(
8590 _,
8591 RepositoryEvent::PendingOpsChanged { pending_ops },
8592 _,
8593 ) = e
8594 {
8595 let merged = merge_pending_ops_snapshots(
8596 pending_ops.items(()),
8597 pending_ops_all.lock().items(()),
8598 );
8599 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8600 }
8601 })
8602 .detach();
8603 });
8604 project
8605 .update(cx, |project, cx| project.git_scans_complete(cx))
8606 .await;
8607
8608 let repo = project.read_with(cx, |project, cx| {
8609 project.repositories(cx).values().next().unwrap().clone()
8610 });
8611
8612 // Ensure we have no pending ops for any of the untracked files
8613 repo.read_with(cx, |repo, _cx| {
8614 assert!(repo.pending_ops_by_path.is_empty());
8615 });
8616
8617 let mut id = 1u16;
8618
8619 let mut assert_stage = async |path: RepoPath, stage| {
8620 let git_status = if stage {
8621 pending_op::GitStatus::Staged
8622 } else {
8623 pending_op::GitStatus::Unstaged
8624 };
8625 repo.update(cx, |repo, cx| {
8626 let task = if stage {
8627 repo.stage_entries(vec![path.clone()], cx)
8628 } else {
8629 repo.unstage_entries(vec![path.clone()], cx)
8630 };
8631 let ops = repo.pending_ops_for_path(&path).unwrap();
8632 assert_eq!(
8633 ops.ops.last(),
8634 Some(&pending_op::PendingOp {
8635 id: id.into(),
8636 git_status,
8637 job_status: pending_op::JobStatus::Running
8638 })
8639 );
8640 task
8641 })
8642 .await
8643 .unwrap();
8644
8645 repo.read_with(cx, |repo, _cx| {
8646 let ops = repo.pending_ops_for_path(&path).unwrap();
8647 assert_eq!(
8648 ops.ops.last(),
8649 Some(&pending_op::PendingOp {
8650 id: id.into(),
8651 git_status,
8652 job_status: pending_op::JobStatus::Finished
8653 })
8654 );
8655 });
8656
8657 id += 1;
8658 };
8659
8660 assert_stage(repo_path("a.txt"), true).await;
8661 assert_stage(repo_path("a.txt"), false).await;
8662 assert_stage(repo_path("a.txt"), true).await;
8663 assert_stage(repo_path("a.txt"), false).await;
8664 assert_stage(repo_path("a.txt"), true).await;
8665
8666 cx.run_until_parked();
8667
8668 assert_eq!(
8669 pending_ops_all
8670 .lock()
8671 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8672 .unwrap()
8673 .ops,
8674 vec![
8675 pending_op::PendingOp {
8676 id: 1u16.into(),
8677 git_status: pending_op::GitStatus::Staged,
8678 job_status: pending_op::JobStatus::Finished
8679 },
8680 pending_op::PendingOp {
8681 id: 2u16.into(),
8682 git_status: pending_op::GitStatus::Unstaged,
8683 job_status: pending_op::JobStatus::Finished
8684 },
8685 pending_op::PendingOp {
8686 id: 3u16.into(),
8687 git_status: pending_op::GitStatus::Staged,
8688 job_status: pending_op::JobStatus::Finished
8689 },
8690 pending_op::PendingOp {
8691 id: 4u16.into(),
8692 git_status: pending_op::GitStatus::Unstaged,
8693 job_status: pending_op::JobStatus::Finished
8694 },
8695 pending_op::PendingOp {
8696 id: 5u16.into(),
8697 git_status: pending_op::GitStatus::Staged,
8698 job_status: pending_op::JobStatus::Finished
8699 }
8700 ],
8701 );
8702
8703 repo.update(cx, |repo, _cx| {
8704 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8705
8706 assert_eq!(
8707 git_statuses,
8708 [StatusEntry {
8709 repo_path: repo_path("a.txt"),
8710 status: TrackedStatus {
8711 index_status: StatusCode::Added,
8712 worktree_status: StatusCode::Unmodified
8713 }
8714 .into(),
8715 }]
8716 );
8717 });
8718}
8719
8720#[gpui::test]
8721async fn test_repository_pending_ops_long_running_staging(
8722 executor: gpui::BackgroundExecutor,
8723 cx: &mut gpui::TestAppContext,
8724) {
8725 init_test(cx);
8726
8727 let fs = FakeFs::new(executor);
8728 fs.insert_tree(
8729 path!("/root"),
8730 json!({
8731 "my-repo": {
8732 ".git": {},
8733 "a.txt": "a",
8734 }
8735
8736 }),
8737 )
8738 .await;
8739
8740 fs.set_status_for_repo(
8741 path!("/root/my-repo/.git").as_ref(),
8742 &[("a.txt", FileStatus::Untracked)],
8743 );
8744
8745 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8746 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8747 project.update(cx, |project, cx| {
8748 let pending_ops_all = pending_ops_all.clone();
8749 cx.subscribe(project.git_store(), move |_, _, e, _| {
8750 if let GitStoreEvent::RepositoryUpdated(
8751 _,
8752 RepositoryEvent::PendingOpsChanged { pending_ops },
8753 _,
8754 ) = e
8755 {
8756 let merged = merge_pending_ops_snapshots(
8757 pending_ops.items(()),
8758 pending_ops_all.lock().items(()),
8759 );
8760 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8761 }
8762 })
8763 .detach();
8764 });
8765
8766 project
8767 .update(cx, |project, cx| project.git_scans_complete(cx))
8768 .await;
8769
8770 let repo = project.read_with(cx, |project, cx| {
8771 project.repositories(cx).values().next().unwrap().clone()
8772 });
8773
8774 repo.update(cx, |repo, cx| {
8775 repo.stage_entries(vec![repo_path("a.txt")], cx)
8776 })
8777 .detach();
8778
8779 repo.update(cx, |repo, cx| {
8780 repo.stage_entries(vec![repo_path("a.txt")], cx)
8781 })
8782 .unwrap()
8783 .with_timeout(Duration::from_secs(1), &cx.executor())
8784 .await
8785 .unwrap();
8786
8787 cx.run_until_parked();
8788
8789 assert_eq!(
8790 pending_ops_all
8791 .lock()
8792 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8793 .unwrap()
8794 .ops,
8795 vec![
8796 pending_op::PendingOp {
8797 id: 1u16.into(),
8798 git_status: pending_op::GitStatus::Staged,
8799 job_status: pending_op::JobStatus::Skipped
8800 },
8801 pending_op::PendingOp {
8802 id: 2u16.into(),
8803 git_status: pending_op::GitStatus::Staged,
8804 job_status: pending_op::JobStatus::Finished
8805 }
8806 ],
8807 );
8808
8809 repo.update(cx, |repo, _cx| {
8810 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8811
8812 assert_eq!(
8813 git_statuses,
8814 [StatusEntry {
8815 repo_path: repo_path("a.txt"),
8816 status: TrackedStatus {
8817 index_status: StatusCode::Added,
8818 worktree_status: StatusCode::Unmodified
8819 }
8820 .into(),
8821 }]
8822 );
8823 });
8824}
8825
8826#[gpui::test]
8827async fn test_repository_pending_ops_stage_all(
8828 executor: gpui::BackgroundExecutor,
8829 cx: &mut gpui::TestAppContext,
8830) {
8831 init_test(cx);
8832
8833 let fs = FakeFs::new(executor);
8834 fs.insert_tree(
8835 path!("/root"),
8836 json!({
8837 "my-repo": {
8838 ".git": {},
8839 "a.txt": "a",
8840 "b.txt": "b"
8841 }
8842
8843 }),
8844 )
8845 .await;
8846
8847 fs.set_status_for_repo(
8848 path!("/root/my-repo/.git").as_ref(),
8849 &[
8850 ("a.txt", FileStatus::Untracked),
8851 ("b.txt", FileStatus::Untracked),
8852 ],
8853 );
8854
8855 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8856 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8857 project.update(cx, |project, cx| {
8858 let pending_ops_all = pending_ops_all.clone();
8859 cx.subscribe(project.git_store(), move |_, _, e, _| {
8860 if let GitStoreEvent::RepositoryUpdated(
8861 _,
8862 RepositoryEvent::PendingOpsChanged { pending_ops },
8863 _,
8864 ) = e
8865 {
8866 let merged = merge_pending_ops_snapshots(
8867 pending_ops.items(()),
8868 pending_ops_all.lock().items(()),
8869 );
8870 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8871 }
8872 })
8873 .detach();
8874 });
8875 project
8876 .update(cx, |project, cx| project.git_scans_complete(cx))
8877 .await;
8878
8879 let repo = project.read_with(cx, |project, cx| {
8880 project.repositories(cx).values().next().unwrap().clone()
8881 });
8882
8883 repo.update(cx, |repo, cx| {
8884 repo.stage_entries(vec![repo_path("a.txt")], cx)
8885 })
8886 .await
8887 .unwrap();
8888 repo.update(cx, |repo, cx| repo.stage_all(cx))
8889 .await
8890 .unwrap();
8891 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8892 .await
8893 .unwrap();
8894
8895 cx.run_until_parked();
8896
8897 assert_eq!(
8898 pending_ops_all
8899 .lock()
8900 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8901 .unwrap()
8902 .ops,
8903 vec![
8904 pending_op::PendingOp {
8905 id: 1u16.into(),
8906 git_status: pending_op::GitStatus::Staged,
8907 job_status: pending_op::JobStatus::Finished
8908 },
8909 pending_op::PendingOp {
8910 id: 2u16.into(),
8911 git_status: pending_op::GitStatus::Unstaged,
8912 job_status: pending_op::JobStatus::Finished
8913 },
8914 ],
8915 );
8916 assert_eq!(
8917 pending_ops_all
8918 .lock()
8919 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
8920 .unwrap()
8921 .ops,
8922 vec![
8923 pending_op::PendingOp {
8924 id: 1u16.into(),
8925 git_status: pending_op::GitStatus::Staged,
8926 job_status: pending_op::JobStatus::Finished
8927 },
8928 pending_op::PendingOp {
8929 id: 2u16.into(),
8930 git_status: pending_op::GitStatus::Unstaged,
8931 job_status: pending_op::JobStatus::Finished
8932 },
8933 ],
8934 );
8935
8936 repo.update(cx, |repo, _cx| {
8937 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8938
8939 assert_eq!(
8940 git_statuses,
8941 [
8942 StatusEntry {
8943 repo_path: repo_path("a.txt"),
8944 status: FileStatus::Untracked,
8945 },
8946 StatusEntry {
8947 repo_path: repo_path("b.txt"),
8948 status: FileStatus::Untracked,
8949 },
8950 ]
8951 );
8952 });
8953}
8954
8955#[gpui::test]
8956async fn test_repository_subfolder_git_status(
8957 executor: gpui::BackgroundExecutor,
8958 cx: &mut gpui::TestAppContext,
8959) {
8960 init_test(cx);
8961
8962 let fs = FakeFs::new(executor);
8963 fs.insert_tree(
8964 path!("/root"),
8965 json!({
8966 "my-repo": {
8967 ".git": {},
8968 "a.txt": "a",
8969 "sub-folder-1": {
8970 "sub-folder-2": {
8971 "c.txt": "cc",
8972 "d": {
8973 "e.txt": "eee"
8974 }
8975 },
8976 }
8977 },
8978 }),
8979 )
8980 .await;
8981
8982 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8983 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8984
8985 fs.set_status_for_repo(
8986 path!("/root/my-repo/.git").as_ref(),
8987 &[(E_TXT, FileStatus::Untracked)],
8988 );
8989
8990 let project = Project::test(
8991 fs.clone(),
8992 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8993 cx,
8994 )
8995 .await;
8996
8997 project
8998 .update(cx, |project, cx| project.git_scans_complete(cx))
8999 .await;
9000 cx.run_until_parked();
9001
9002 let repository = project.read_with(cx, |project, cx| {
9003 project.repositories(cx).values().next().unwrap().clone()
9004 });
9005
9006 // Ensure that the git status is loaded correctly
9007 repository.read_with(cx, |repository, _cx| {
9008 assert_eq!(
9009 repository.work_directory_abs_path,
9010 Path::new(path!("/root/my-repo")).into()
9011 );
9012
9013 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9014 assert_eq!(
9015 repository
9016 .status_for_path(&repo_path(E_TXT))
9017 .unwrap()
9018 .status,
9019 FileStatus::Untracked
9020 );
9021 });
9022
9023 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9024 project
9025 .update(cx, |project, cx| project.git_scans_complete(cx))
9026 .await;
9027 cx.run_until_parked();
9028
9029 repository.read_with(cx, |repository, _cx| {
9030 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9031 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9032 });
9033}
9034
9035// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9036#[cfg(any())]
9037#[gpui::test]
9038async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9039 init_test(cx);
9040 cx.executor().allow_parking();
9041
9042 let root = TempTree::new(json!({
9043 "project": {
9044 "a.txt": "a",
9045 },
9046 }));
9047 let root_path = root.path();
9048
9049 let repo = git_init(&root_path.join("project"));
9050 git_add("a.txt", &repo);
9051 git_commit("init", &repo);
9052
9053 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9054
9055 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9056 tree.flush_fs_events(cx).await;
9057 project
9058 .update(cx, |project, cx| project.git_scans_complete(cx))
9059 .await;
9060 cx.executor().run_until_parked();
9061
9062 let repository = project.read_with(cx, |project, cx| {
9063 project.repositories(cx).values().next().unwrap().clone()
9064 });
9065
9066 git_branch("other-branch", &repo);
9067 git_checkout("refs/heads/other-branch", &repo);
9068 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9069 git_add("a.txt", &repo);
9070 git_commit("capitalize", &repo);
9071 let commit = repo
9072 .head()
9073 .expect("Failed to get HEAD")
9074 .peel_to_commit()
9075 .expect("HEAD is not a commit");
9076 git_checkout("refs/heads/main", &repo);
9077 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9078 git_add("a.txt", &repo);
9079 git_commit("improve letter", &repo);
9080 git_cherry_pick(&commit, &repo);
9081 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9082 .expect("No CHERRY_PICK_HEAD");
9083 pretty_assertions::assert_eq!(
9084 git_status(&repo),
9085 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9086 );
9087 tree.flush_fs_events(cx).await;
9088 project
9089 .update(cx, |project, cx| project.git_scans_complete(cx))
9090 .await;
9091 cx.executor().run_until_parked();
9092 let conflicts = repository.update(cx, |repository, _| {
9093 repository
9094 .merge_conflicts
9095 .iter()
9096 .cloned()
9097 .collect::<Vec<_>>()
9098 });
9099 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9100
9101 git_add("a.txt", &repo);
9102 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9103 git_commit("whatevs", &repo);
9104 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9105 .expect("Failed to remove CHERRY_PICK_HEAD");
9106 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9107 tree.flush_fs_events(cx).await;
9108 let conflicts = repository.update(cx, |repository, _| {
9109 repository
9110 .merge_conflicts
9111 .iter()
9112 .cloned()
9113 .collect::<Vec<_>>()
9114 });
9115 pretty_assertions::assert_eq!(conflicts, []);
9116}
9117
9118#[gpui::test]
9119async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9120 init_test(cx);
9121 let fs = FakeFs::new(cx.background_executor.clone());
9122 fs.insert_tree(
9123 path!("/root"),
9124 json!({
9125 ".git": {},
9126 ".gitignore": "*.txt\n",
9127 "a.xml": "<a></a>",
9128 "b.txt": "Some text"
9129 }),
9130 )
9131 .await;
9132
9133 fs.set_head_and_index_for_repo(
9134 path!("/root/.git").as_ref(),
9135 &[
9136 (".gitignore", "*.txt\n".into()),
9137 ("a.xml", "<a></a>".into()),
9138 ],
9139 );
9140
9141 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9142
9143 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9144 tree.flush_fs_events(cx).await;
9145 project
9146 .update(cx, |project, cx| project.git_scans_complete(cx))
9147 .await;
9148 cx.executor().run_until_parked();
9149
9150 let repository = project.read_with(cx, |project, cx| {
9151 project.repositories(cx).values().next().unwrap().clone()
9152 });
9153
9154 // One file is unmodified, the other is ignored.
9155 cx.read(|cx| {
9156 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9157 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9158 });
9159
9160 // Change the gitignore, and stage the newly non-ignored file.
9161 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9162 .await
9163 .unwrap();
9164 fs.set_index_for_repo(
9165 Path::new(path!("/root/.git")),
9166 &[
9167 (".gitignore", "*.txt\n".into()),
9168 ("a.xml", "<a></a>".into()),
9169 ("b.txt", "Some text".into()),
9170 ],
9171 );
9172
9173 cx.executor().run_until_parked();
9174 cx.read(|cx| {
9175 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9176 assert_entry_git_state(
9177 tree.read(cx),
9178 repository.read(cx),
9179 "b.txt",
9180 Some(StatusCode::Added),
9181 false,
9182 );
9183 });
9184}
9185
9186// NOTE:
9187// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9188// a directory which some program has already open.
9189// This is a limitation of the Windows.
9190// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9191// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9192#[gpui::test]
9193#[cfg_attr(target_os = "windows", ignore)]
9194async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9195 init_test(cx);
9196 cx.executor().allow_parking();
9197 let root = TempTree::new(json!({
9198 "projects": {
9199 "project1": {
9200 "a": "",
9201 "b": "",
9202 }
9203 },
9204
9205 }));
9206 let root_path = root.path();
9207
9208 let repo = git_init(&root_path.join("projects/project1"));
9209 git_add("a", &repo);
9210 git_commit("init", &repo);
9211 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9212
9213 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9214
9215 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9216 tree.flush_fs_events(cx).await;
9217 project
9218 .update(cx, |project, cx| project.git_scans_complete(cx))
9219 .await;
9220 cx.executor().run_until_parked();
9221
9222 let repository = project.read_with(cx, |project, cx| {
9223 project.repositories(cx).values().next().unwrap().clone()
9224 });
9225
9226 repository.read_with(cx, |repository, _| {
9227 assert_eq!(
9228 repository.work_directory_abs_path.as_ref(),
9229 root_path.join("projects/project1").as_path()
9230 );
9231 assert_eq!(
9232 repository
9233 .status_for_path(&repo_path("a"))
9234 .map(|entry| entry.status),
9235 Some(StatusCode::Modified.worktree()),
9236 );
9237 assert_eq!(
9238 repository
9239 .status_for_path(&repo_path("b"))
9240 .map(|entry| entry.status),
9241 Some(FileStatus::Untracked),
9242 );
9243 });
9244
9245 std::fs::rename(
9246 root_path.join("projects/project1"),
9247 root_path.join("projects/project2"),
9248 )
9249 .unwrap();
9250 tree.flush_fs_events(cx).await;
9251
9252 repository.read_with(cx, |repository, _| {
9253 assert_eq!(
9254 repository.work_directory_abs_path.as_ref(),
9255 root_path.join("projects/project2").as_path()
9256 );
9257 assert_eq!(
9258 repository.status_for_path(&repo_path("a")).unwrap().status,
9259 StatusCode::Modified.worktree(),
9260 );
9261 assert_eq!(
9262 repository.status_for_path(&repo_path("b")).unwrap().status,
9263 FileStatus::Untracked,
9264 );
9265 });
9266}
9267
9268// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9269// you can't rename a directory which some program has already open. This is a
9270// limitation of the Windows. See:
9271// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9272// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9273#[gpui::test]
9274#[cfg_attr(target_os = "windows", ignore)]
9275async fn test_file_status(cx: &mut gpui::TestAppContext) {
9276 init_test(cx);
9277 cx.executor().allow_parking();
9278 const IGNORE_RULE: &str = "**/target";
9279
9280 let root = TempTree::new(json!({
9281 "project": {
9282 "a.txt": "a",
9283 "b.txt": "bb",
9284 "c": {
9285 "d": {
9286 "e.txt": "eee"
9287 }
9288 },
9289 "f.txt": "ffff",
9290 "target": {
9291 "build_file": "???"
9292 },
9293 ".gitignore": IGNORE_RULE
9294 },
9295
9296 }));
9297 let root_path = root.path();
9298
9299 const A_TXT: &str = "a.txt";
9300 const B_TXT: &str = "b.txt";
9301 const E_TXT: &str = "c/d/e.txt";
9302 const F_TXT: &str = "f.txt";
9303 const DOTGITIGNORE: &str = ".gitignore";
9304 const BUILD_FILE: &str = "target/build_file";
9305
9306 // Set up git repository before creating the worktree.
9307 let work_dir = root.path().join("project");
9308 let mut repo = git_init(work_dir.as_path());
9309 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9310 git_add(A_TXT, &repo);
9311 git_add(E_TXT, &repo);
9312 git_add(DOTGITIGNORE, &repo);
9313 git_commit("Initial commit", &repo);
9314
9315 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9316
9317 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9318 tree.flush_fs_events(cx).await;
9319 project
9320 .update(cx, |project, cx| project.git_scans_complete(cx))
9321 .await;
9322 cx.executor().run_until_parked();
9323
9324 let repository = project.read_with(cx, |project, cx| {
9325 project.repositories(cx).values().next().unwrap().clone()
9326 });
9327
9328 // Check that the right git state is observed on startup
9329 repository.read_with(cx, |repository, _cx| {
9330 assert_eq!(
9331 repository.work_directory_abs_path.as_ref(),
9332 root_path.join("project").as_path()
9333 );
9334
9335 assert_eq!(
9336 repository
9337 .status_for_path(&repo_path(B_TXT))
9338 .unwrap()
9339 .status,
9340 FileStatus::Untracked,
9341 );
9342 assert_eq!(
9343 repository
9344 .status_for_path(&repo_path(F_TXT))
9345 .unwrap()
9346 .status,
9347 FileStatus::Untracked,
9348 );
9349 });
9350
9351 // Modify a file in the working copy.
9352 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9353 tree.flush_fs_events(cx).await;
9354 project
9355 .update(cx, |project, cx| project.git_scans_complete(cx))
9356 .await;
9357 cx.executor().run_until_parked();
9358
9359 // The worktree detects that the file's git status has changed.
9360 repository.read_with(cx, |repository, _| {
9361 assert_eq!(
9362 repository
9363 .status_for_path(&repo_path(A_TXT))
9364 .unwrap()
9365 .status,
9366 StatusCode::Modified.worktree(),
9367 );
9368 });
9369
9370 // Create a commit in the git repository.
9371 git_add(A_TXT, &repo);
9372 git_add(B_TXT, &repo);
9373 git_commit("Committing modified and added", &repo);
9374 tree.flush_fs_events(cx).await;
9375 project
9376 .update(cx, |project, cx| project.git_scans_complete(cx))
9377 .await;
9378 cx.executor().run_until_parked();
9379
9380 // The worktree detects that the files' git status have changed.
9381 repository.read_with(cx, |repository, _cx| {
9382 assert_eq!(
9383 repository
9384 .status_for_path(&repo_path(F_TXT))
9385 .unwrap()
9386 .status,
9387 FileStatus::Untracked,
9388 );
9389 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9390 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9391 });
9392
9393 // Modify files in the working copy and perform git operations on other files.
9394 git_reset(0, &repo);
9395 git_remove_index(Path::new(B_TXT), &repo);
9396 git_stash(&mut repo);
9397 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9398 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9399 tree.flush_fs_events(cx).await;
9400 project
9401 .update(cx, |project, cx| project.git_scans_complete(cx))
9402 .await;
9403 cx.executor().run_until_parked();
9404
9405 // Check that more complex repo changes are tracked
9406 repository.read_with(cx, |repository, _cx| {
9407 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9408 assert_eq!(
9409 repository
9410 .status_for_path(&repo_path(B_TXT))
9411 .unwrap()
9412 .status,
9413 FileStatus::Untracked,
9414 );
9415 assert_eq!(
9416 repository
9417 .status_for_path(&repo_path(E_TXT))
9418 .unwrap()
9419 .status,
9420 StatusCode::Modified.worktree(),
9421 );
9422 });
9423
9424 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9425 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9426 std::fs::write(
9427 work_dir.join(DOTGITIGNORE),
9428 [IGNORE_RULE, "f.txt"].join("\n"),
9429 )
9430 .unwrap();
9431
9432 git_add(Path::new(DOTGITIGNORE), &repo);
9433 git_commit("Committing modified git ignore", &repo);
9434
9435 tree.flush_fs_events(cx).await;
9436 cx.executor().run_until_parked();
9437
9438 let mut renamed_dir_name = "first_directory/second_directory";
9439 const RENAMED_FILE: &str = "rf.txt";
9440
9441 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9442 std::fs::write(
9443 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9444 "new-contents",
9445 )
9446 .unwrap();
9447
9448 tree.flush_fs_events(cx).await;
9449 project
9450 .update(cx, |project, cx| project.git_scans_complete(cx))
9451 .await;
9452 cx.executor().run_until_parked();
9453
9454 repository.read_with(cx, |repository, _cx| {
9455 assert_eq!(
9456 repository
9457 .status_for_path(&RepoPath::from_rel_path(
9458 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9459 ))
9460 .unwrap()
9461 .status,
9462 FileStatus::Untracked,
9463 );
9464 });
9465
9466 renamed_dir_name = "new_first_directory/second_directory";
9467
9468 std::fs::rename(
9469 work_dir.join("first_directory"),
9470 work_dir.join("new_first_directory"),
9471 )
9472 .unwrap();
9473
9474 tree.flush_fs_events(cx).await;
9475 project
9476 .update(cx, |project, cx| project.git_scans_complete(cx))
9477 .await;
9478 cx.executor().run_until_parked();
9479
9480 repository.read_with(cx, |repository, _cx| {
9481 assert_eq!(
9482 repository
9483 .status_for_path(&RepoPath::from_rel_path(
9484 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9485 ))
9486 .unwrap()
9487 .status,
9488 FileStatus::Untracked,
9489 );
9490 });
9491}
9492
9493#[gpui::test]
9494#[ignore]
9495async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9496 init_test(cx);
9497 cx.executor().allow_parking();
9498
9499 const IGNORE_RULE: &str = "**/target";
9500
9501 let root = TempTree::new(json!({
9502 "project": {
9503 "src": {
9504 "main.rs": "fn main() {}"
9505 },
9506 "target": {
9507 "debug": {
9508 "important_text.txt": "important text",
9509 },
9510 },
9511 ".gitignore": IGNORE_RULE
9512 },
9513
9514 }));
9515 let root_path = root.path();
9516
9517 // Set up git repository before creating the worktree.
9518 let work_dir = root.path().join("project");
9519 let repo = git_init(work_dir.as_path());
9520 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9521 git_add("src/main.rs", &repo);
9522 git_add(".gitignore", &repo);
9523 git_commit("Initial commit", &repo);
9524
9525 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9526 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9527 let project_events = Arc::new(Mutex::new(Vec::new()));
9528 project.update(cx, |project, cx| {
9529 let repo_events = repository_updates.clone();
9530 cx.subscribe(project.git_store(), move |_, _, e, _| {
9531 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9532 repo_events.lock().push(e.clone());
9533 }
9534 })
9535 .detach();
9536 let project_events = project_events.clone();
9537 cx.subscribe_self(move |_, e, _| {
9538 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9539 project_events.lock().extend(
9540 updates
9541 .iter()
9542 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9543 .filter(|(path, _)| path != "fs-event-sentinel"),
9544 );
9545 }
9546 })
9547 .detach();
9548 });
9549
9550 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9551 tree.flush_fs_events(cx).await;
9552 tree.update(cx, |tree, cx| {
9553 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9554 })
9555 .await
9556 .unwrap();
9557 tree.update(cx, |tree, _| {
9558 assert_eq!(
9559 tree.entries(true, 0)
9560 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9561 .collect::<Vec<_>>(),
9562 vec![
9563 (rel_path(""), false),
9564 (rel_path("project/"), false),
9565 (rel_path("project/.gitignore"), false),
9566 (rel_path("project/src"), false),
9567 (rel_path("project/src/main.rs"), false),
9568 (rel_path("project/target"), true),
9569 (rel_path("project/target/debug"), true),
9570 (rel_path("project/target/debug/important_text.txt"), true),
9571 ]
9572 );
9573 });
9574
9575 assert_eq!(
9576 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9577 vec![
9578 RepositoryEvent::StatusesChanged,
9579 RepositoryEvent::MergeHeadsChanged,
9580 ],
9581 "Initial worktree scan should produce a repo update event"
9582 );
9583 assert_eq!(
9584 project_events.lock().drain(..).collect::<Vec<_>>(),
9585 vec![
9586 ("project/target".to_string(), PathChange::Loaded),
9587 ("project/target/debug".to_string(), PathChange::Loaded),
9588 (
9589 "project/target/debug/important_text.txt".to_string(),
9590 PathChange::Loaded
9591 ),
9592 ],
9593 "Initial project changes should show that all not-ignored and all opened files are loaded"
9594 );
9595
9596 let deps_dir = work_dir.join("target").join("debug").join("deps");
9597 std::fs::create_dir_all(&deps_dir).unwrap();
9598 tree.flush_fs_events(cx).await;
9599 project
9600 .update(cx, |project, cx| project.git_scans_complete(cx))
9601 .await;
9602 cx.executor().run_until_parked();
9603 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9604 tree.flush_fs_events(cx).await;
9605 project
9606 .update(cx, |project, cx| project.git_scans_complete(cx))
9607 .await;
9608 cx.executor().run_until_parked();
9609 std::fs::remove_dir_all(&deps_dir).unwrap();
9610 tree.flush_fs_events(cx).await;
9611 project
9612 .update(cx, |project, cx| project.git_scans_complete(cx))
9613 .await;
9614 cx.executor().run_until_parked();
9615
9616 tree.update(cx, |tree, _| {
9617 assert_eq!(
9618 tree.entries(true, 0)
9619 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9620 .collect::<Vec<_>>(),
9621 vec![
9622 (rel_path(""), false),
9623 (rel_path("project/"), false),
9624 (rel_path("project/.gitignore"), false),
9625 (rel_path("project/src"), false),
9626 (rel_path("project/src/main.rs"), false),
9627 (rel_path("project/target"), true),
9628 (rel_path("project/target/debug"), true),
9629 (rel_path("project/target/debug/important_text.txt"), true),
9630 ],
9631 "No stray temp files should be left after the flycheck changes"
9632 );
9633 });
9634
9635 assert_eq!(
9636 repository_updates
9637 .lock()
9638 .iter()
9639 .cloned()
9640 .collect::<Vec<_>>(),
9641 Vec::new(),
9642 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9643 );
9644 assert_eq!(
9645 project_events.lock().as_slice(),
9646 vec![
9647 ("project/target/debug/deps".to_string(), PathChange::Added),
9648 ("project/target/debug/deps".to_string(), PathChange::Removed),
9649 ],
9650 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9651 No updates for more nested directories should happen as those are ignored",
9652 );
9653}
9654
9655#[gpui::test]
9656async fn test_odd_events_for_ignored_dirs(
9657 executor: BackgroundExecutor,
9658 cx: &mut gpui::TestAppContext,
9659) {
9660 init_test(cx);
9661 let fs = FakeFs::new(executor);
9662 fs.insert_tree(
9663 path!("/root"),
9664 json!({
9665 ".git": {},
9666 ".gitignore": "**/target/",
9667 "src": {
9668 "main.rs": "fn main() {}",
9669 },
9670 "target": {
9671 "debug": {
9672 "foo.txt": "foo",
9673 "deps": {}
9674 }
9675 }
9676 }),
9677 )
9678 .await;
9679 fs.set_head_and_index_for_repo(
9680 path!("/root/.git").as_ref(),
9681 &[
9682 (".gitignore", "**/target/".into()),
9683 ("src/main.rs", "fn main() {}".into()),
9684 ],
9685 );
9686
9687 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9688 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9689 let project_events = Arc::new(Mutex::new(Vec::new()));
9690 project.update(cx, |project, cx| {
9691 let repository_updates = repository_updates.clone();
9692 cx.subscribe(project.git_store(), move |_, _, e, _| {
9693 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9694 repository_updates.lock().push(e.clone());
9695 }
9696 })
9697 .detach();
9698 let project_events = project_events.clone();
9699 cx.subscribe_self(move |_, e, _| {
9700 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9701 project_events.lock().extend(
9702 updates
9703 .iter()
9704 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9705 .filter(|(path, _)| path != "fs-event-sentinel"),
9706 );
9707 }
9708 })
9709 .detach();
9710 });
9711
9712 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9713 tree.update(cx, |tree, cx| {
9714 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9715 })
9716 .await
9717 .unwrap();
9718 tree.flush_fs_events(cx).await;
9719 project
9720 .update(cx, |project, cx| project.git_scans_complete(cx))
9721 .await;
9722 cx.run_until_parked();
9723 tree.update(cx, |tree, _| {
9724 assert_eq!(
9725 tree.entries(true, 0)
9726 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9727 .collect::<Vec<_>>(),
9728 vec![
9729 (rel_path(""), false),
9730 (rel_path(".gitignore"), false),
9731 (rel_path("src"), false),
9732 (rel_path("src/main.rs"), false),
9733 (rel_path("target"), true),
9734 (rel_path("target/debug"), true),
9735 (rel_path("target/debug/deps"), true),
9736 (rel_path("target/debug/foo.txt"), true),
9737 ]
9738 );
9739 });
9740
9741 assert_eq!(
9742 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9743 vec![
9744 RepositoryEvent::MergeHeadsChanged,
9745 RepositoryEvent::BranchChanged,
9746 RepositoryEvent::StatusesChanged,
9747 RepositoryEvent::StatusesChanged,
9748 ],
9749 "Initial worktree scan should produce a repo update event"
9750 );
9751 assert_eq!(
9752 project_events.lock().drain(..).collect::<Vec<_>>(),
9753 vec![
9754 ("target".to_string(), PathChange::Loaded),
9755 ("target/debug".to_string(), PathChange::Loaded),
9756 ("target/debug/deps".to_string(), PathChange::Loaded),
9757 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9758 ],
9759 "All non-ignored entries and all opened firs should be getting a project event",
9760 );
9761
9762 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9763 // This may happen multiple times during a single flycheck, but once is enough for testing.
9764 fs.emit_fs_event("/root/target/debug/deps", None);
9765 tree.flush_fs_events(cx).await;
9766 project
9767 .update(cx, |project, cx| project.git_scans_complete(cx))
9768 .await;
9769 cx.executor().run_until_parked();
9770
9771 assert_eq!(
9772 repository_updates
9773 .lock()
9774 .iter()
9775 .cloned()
9776 .collect::<Vec<_>>(),
9777 Vec::new(),
9778 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9779 );
9780 assert_eq!(
9781 project_events.lock().as_slice(),
9782 Vec::new(),
9783 "No further project events should happen, as only ignored dirs received FS events",
9784 );
9785}
9786
9787#[gpui::test]
9788async fn test_repos_in_invisible_worktrees(
9789 executor: BackgroundExecutor,
9790 cx: &mut gpui::TestAppContext,
9791) {
9792 init_test(cx);
9793 let fs = FakeFs::new(executor);
9794 fs.insert_tree(
9795 path!("/root"),
9796 json!({
9797 "dir1": {
9798 ".git": {},
9799 "dep1": {
9800 ".git": {},
9801 "src": {
9802 "a.txt": "",
9803 },
9804 },
9805 "b.txt": "",
9806 },
9807 }),
9808 )
9809 .await;
9810
9811 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9812 let _visible_worktree =
9813 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9814 project
9815 .update(cx, |project, cx| project.git_scans_complete(cx))
9816 .await;
9817
9818 let repos = project.read_with(cx, |project, cx| {
9819 project
9820 .repositories(cx)
9821 .values()
9822 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9823 .collect::<Vec<_>>()
9824 });
9825 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9826
9827 let (_invisible_worktree, _) = project
9828 .update(cx, |project, cx| {
9829 project.worktree_store.update(cx, |worktree_store, cx| {
9830 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9831 })
9832 })
9833 .await
9834 .expect("failed to create worktree");
9835 project
9836 .update(cx, |project, cx| project.git_scans_complete(cx))
9837 .await;
9838
9839 let repos = project.read_with(cx, |project, cx| {
9840 project
9841 .repositories(cx)
9842 .values()
9843 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9844 .collect::<Vec<_>>()
9845 });
9846 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9847}
9848
9849#[gpui::test(iterations = 10)]
9850async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9851 init_test(cx);
9852 cx.update(|cx| {
9853 cx.update_global::<SettingsStore, _>(|store, cx| {
9854 store.update_user_settings(cx, |settings| {
9855 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9856 });
9857 });
9858 });
9859 let fs = FakeFs::new(cx.background_executor.clone());
9860 fs.insert_tree(
9861 path!("/root"),
9862 json!({
9863 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9864 "tree": {
9865 ".git": {},
9866 ".gitignore": "ignored-dir\n",
9867 "tracked-dir": {
9868 "tracked-file1": "",
9869 "ancestor-ignored-file1": "",
9870 },
9871 "ignored-dir": {
9872 "ignored-file1": ""
9873 }
9874 }
9875 }),
9876 )
9877 .await;
9878 fs.set_head_and_index_for_repo(
9879 path!("/root/tree/.git").as_ref(),
9880 &[
9881 (".gitignore", "ignored-dir\n".into()),
9882 ("tracked-dir/tracked-file1", "".into()),
9883 ],
9884 );
9885
9886 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9887
9888 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9889 tree.flush_fs_events(cx).await;
9890 project
9891 .update(cx, |project, cx| project.git_scans_complete(cx))
9892 .await;
9893 cx.executor().run_until_parked();
9894
9895 let repository = project.read_with(cx, |project, cx| {
9896 project.repositories(cx).values().next().unwrap().clone()
9897 });
9898
9899 tree.read_with(cx, |tree, _| {
9900 tree.as_local()
9901 .unwrap()
9902 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9903 })
9904 .recv()
9905 .await;
9906
9907 cx.read(|cx| {
9908 assert_entry_git_state(
9909 tree.read(cx),
9910 repository.read(cx),
9911 "tracked-dir/tracked-file1",
9912 None,
9913 false,
9914 );
9915 assert_entry_git_state(
9916 tree.read(cx),
9917 repository.read(cx),
9918 "tracked-dir/ancestor-ignored-file1",
9919 None,
9920 false,
9921 );
9922 assert_entry_git_state(
9923 tree.read(cx),
9924 repository.read(cx),
9925 "ignored-dir/ignored-file1",
9926 None,
9927 true,
9928 );
9929 });
9930
9931 fs.create_file(
9932 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9933 Default::default(),
9934 )
9935 .await
9936 .unwrap();
9937 fs.set_index_for_repo(
9938 path!("/root/tree/.git").as_ref(),
9939 &[
9940 (".gitignore", "ignored-dir\n".into()),
9941 ("tracked-dir/tracked-file1", "".into()),
9942 ("tracked-dir/tracked-file2", "".into()),
9943 ],
9944 );
9945 fs.create_file(
9946 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9947 Default::default(),
9948 )
9949 .await
9950 .unwrap();
9951 fs.create_file(
9952 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9953 Default::default(),
9954 )
9955 .await
9956 .unwrap();
9957
9958 cx.executor().run_until_parked();
9959 cx.read(|cx| {
9960 assert_entry_git_state(
9961 tree.read(cx),
9962 repository.read(cx),
9963 "tracked-dir/tracked-file2",
9964 Some(StatusCode::Added),
9965 false,
9966 );
9967 assert_entry_git_state(
9968 tree.read(cx),
9969 repository.read(cx),
9970 "tracked-dir/ancestor-ignored-file2",
9971 None,
9972 false,
9973 );
9974 assert_entry_git_state(
9975 tree.read(cx),
9976 repository.read(cx),
9977 "ignored-dir/ignored-file2",
9978 None,
9979 true,
9980 );
9981 assert!(
9982 tree.read(cx)
9983 .entry_for_path(&rel_path(".git"))
9984 .unwrap()
9985 .is_ignored
9986 );
9987 });
9988}
9989
9990#[gpui::test]
9991async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9992 init_test(cx);
9993
9994 let fs = FakeFs::new(cx.executor());
9995 fs.insert_tree(
9996 path!("/project"),
9997 json!({
9998 ".git": {
9999 "worktrees": {
10000 "some-worktree": {
10001 "commondir": "../..\n",
10002 // For is_git_dir
10003 "HEAD": "",
10004 "config": ""
10005 }
10006 },
10007 "modules": {
10008 "subdir": {
10009 "some-submodule": {
10010 // For is_git_dir
10011 "HEAD": "",
10012 "config": "",
10013 }
10014 }
10015 }
10016 },
10017 "src": {
10018 "a.txt": "A",
10019 },
10020 "some-worktree": {
10021 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10022 "src": {
10023 "b.txt": "B",
10024 }
10025 },
10026 "subdir": {
10027 "some-submodule": {
10028 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10029 "c.txt": "C",
10030 }
10031 }
10032 }),
10033 )
10034 .await;
10035
10036 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10037 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10038 scan_complete.await;
10039
10040 let mut repositories = project.update(cx, |project, cx| {
10041 project
10042 .repositories(cx)
10043 .values()
10044 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10045 .collect::<Vec<_>>()
10046 });
10047 repositories.sort();
10048 pretty_assertions::assert_eq!(
10049 repositories,
10050 [
10051 Path::new(path!("/project")).into(),
10052 Path::new(path!("/project/some-worktree")).into(),
10053 Path::new(path!("/project/subdir/some-submodule")).into(),
10054 ]
10055 );
10056
10057 // Generate a git-related event for the worktree and check that it's refreshed.
10058 fs.with_git_state(
10059 path!("/project/some-worktree/.git").as_ref(),
10060 true,
10061 |state| {
10062 state
10063 .head_contents
10064 .insert(repo_path("src/b.txt"), "b".to_owned());
10065 state
10066 .index_contents
10067 .insert(repo_path("src/b.txt"), "b".to_owned());
10068 },
10069 )
10070 .unwrap();
10071 cx.run_until_parked();
10072
10073 let buffer = project
10074 .update(cx, |project, cx| {
10075 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10076 })
10077 .await
10078 .unwrap();
10079 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10080 let (repo, _) = project
10081 .git_store()
10082 .read(cx)
10083 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10084 .unwrap();
10085 pretty_assertions::assert_eq!(
10086 repo.read(cx).work_directory_abs_path,
10087 Path::new(path!("/project/some-worktree")).into(),
10088 );
10089 let barrier = repo.update(cx, |repo, _| repo.barrier());
10090 (repo.clone(), barrier)
10091 });
10092 barrier.await.unwrap();
10093 worktree_repo.update(cx, |repo, _| {
10094 pretty_assertions::assert_eq!(
10095 repo.status_for_path(&repo_path("src/b.txt"))
10096 .unwrap()
10097 .status,
10098 StatusCode::Modified.worktree(),
10099 );
10100 });
10101
10102 // The same for the submodule.
10103 fs.with_git_state(
10104 path!("/project/subdir/some-submodule/.git").as_ref(),
10105 true,
10106 |state| {
10107 state
10108 .head_contents
10109 .insert(repo_path("c.txt"), "c".to_owned());
10110 state
10111 .index_contents
10112 .insert(repo_path("c.txt"), "c".to_owned());
10113 },
10114 )
10115 .unwrap();
10116 cx.run_until_parked();
10117
10118 let buffer = project
10119 .update(cx, |project, cx| {
10120 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10121 })
10122 .await
10123 .unwrap();
10124 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10125 let (repo, _) = project
10126 .git_store()
10127 .read(cx)
10128 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10129 .unwrap();
10130 pretty_assertions::assert_eq!(
10131 repo.read(cx).work_directory_abs_path,
10132 Path::new(path!("/project/subdir/some-submodule")).into(),
10133 );
10134 let barrier = repo.update(cx, |repo, _| repo.barrier());
10135 (repo.clone(), barrier)
10136 });
10137 barrier.await.unwrap();
10138 submodule_repo.update(cx, |repo, _| {
10139 pretty_assertions::assert_eq!(
10140 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10141 StatusCode::Modified.worktree(),
10142 );
10143 });
10144}
10145
10146#[gpui::test]
10147async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10148 init_test(cx);
10149 let fs = FakeFs::new(cx.background_executor.clone());
10150 fs.insert_tree(
10151 path!("/root"),
10152 json!({
10153 "project": {
10154 ".git": {},
10155 "child1": {
10156 "a.txt": "A",
10157 },
10158 "child2": {
10159 "b.txt": "B",
10160 }
10161 }
10162 }),
10163 )
10164 .await;
10165
10166 let project = Project::test(
10167 fs.clone(),
10168 [
10169 path!("/root/project/child1").as_ref(),
10170 path!("/root/project/child2").as_ref(),
10171 ],
10172 cx,
10173 )
10174 .await;
10175
10176 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10177 tree.flush_fs_events(cx).await;
10178 project
10179 .update(cx, |project, cx| project.git_scans_complete(cx))
10180 .await;
10181 cx.executor().run_until_parked();
10182
10183 let repos = project.read_with(cx, |project, cx| {
10184 project
10185 .repositories(cx)
10186 .values()
10187 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10188 .collect::<Vec<_>>()
10189 });
10190 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10191}
10192
10193#[gpui::test]
10194async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10195 init_test(cx);
10196
10197 let file_1_committed = String::from(r#"file_1_committed"#);
10198 let file_1_staged = String::from(r#"file_1_staged"#);
10199 let file_2_committed = String::from(r#"file_2_committed"#);
10200 let file_2_staged = String::from(r#"file_2_staged"#);
10201 let buffer_contents = String::from(r#"buffer"#);
10202
10203 let fs = FakeFs::new(cx.background_executor.clone());
10204 fs.insert_tree(
10205 path!("/dir"),
10206 json!({
10207 ".git": {},
10208 "src": {
10209 "file_1.rs": file_1_committed.clone(),
10210 "file_2.rs": file_2_committed.clone(),
10211 }
10212 }),
10213 )
10214 .await;
10215
10216 fs.set_head_for_repo(
10217 path!("/dir/.git").as_ref(),
10218 &[
10219 ("src/file_1.rs", file_1_committed.clone()),
10220 ("src/file_2.rs", file_2_committed.clone()),
10221 ],
10222 "deadbeef",
10223 );
10224 fs.set_index_for_repo(
10225 path!("/dir/.git").as_ref(),
10226 &[
10227 ("src/file_1.rs", file_1_staged.clone()),
10228 ("src/file_2.rs", file_2_staged.clone()),
10229 ],
10230 );
10231
10232 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10233
10234 let buffer = project
10235 .update(cx, |project, cx| {
10236 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10237 })
10238 .await
10239 .unwrap();
10240
10241 buffer.update(cx, |buffer, cx| {
10242 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10243 });
10244
10245 let unstaged_diff = project
10246 .update(cx, |project, cx| {
10247 project.open_unstaged_diff(buffer.clone(), cx)
10248 })
10249 .await
10250 .unwrap();
10251
10252 cx.run_until_parked();
10253
10254 unstaged_diff.update(cx, |unstaged_diff, _cx| {
10255 let base_text = unstaged_diff.base_text_string().unwrap();
10256 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10257 });
10258
10259 // Save the buffer as `file_2.rs`, which should trigger the
10260 // `BufferChangedFilePath` event.
10261 project
10262 .update(cx, |project, cx| {
10263 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10264 let path = ProjectPath {
10265 worktree_id,
10266 path: rel_path("src/file_2.rs").into(),
10267 };
10268 project.save_buffer_as(buffer.clone(), path, cx)
10269 })
10270 .await
10271 .unwrap();
10272
10273 cx.run_until_parked();
10274
10275 // Verify that the diff bases have been updated to file_2's contents due to
10276 // the `BufferChangedFilePath` event being handled.
10277 unstaged_diff.update(cx, |unstaged_diff, cx| {
10278 let snapshot = buffer.read(cx).snapshot();
10279 let base_text = unstaged_diff.base_text_string().unwrap();
10280 assert_eq!(
10281 base_text, file_2_staged,
10282 "Diff bases should be automatically updated to file_2 staged content"
10283 );
10284
10285 let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
10286 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10287 });
10288
10289 let uncommitted_diff = project
10290 .update(cx, |project, cx| {
10291 project.open_uncommitted_diff(buffer.clone(), cx)
10292 })
10293 .await
10294 .unwrap();
10295
10296 cx.run_until_parked();
10297
10298 uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
10299 let base_text = uncommitted_diff.base_text_string().unwrap();
10300 assert_eq!(
10301 base_text, file_2_committed,
10302 "Uncommitted diff should compare against file_2 committed content"
10303 );
10304 });
10305}
10306
10307async fn search(
10308 project: &Entity<Project>,
10309 query: SearchQuery,
10310 cx: &mut gpui::TestAppContext,
10311) -> Result<HashMap<String, Vec<Range<usize>>>> {
10312 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10313 let mut results = HashMap::default();
10314 while let Ok(search_result) = search_rx.recv().await {
10315 match search_result {
10316 SearchResult::Buffer { buffer, ranges } => {
10317 results.entry(buffer).or_insert(ranges);
10318 }
10319 SearchResult::LimitReached => {}
10320 }
10321 }
10322 Ok(results
10323 .into_iter()
10324 .map(|(buffer, ranges)| {
10325 buffer.update(cx, |buffer, cx| {
10326 let path = buffer
10327 .file()
10328 .unwrap()
10329 .full_path(cx)
10330 .to_string_lossy()
10331 .to_string();
10332 let ranges = ranges
10333 .into_iter()
10334 .map(|range| range.to_offset(buffer))
10335 .collect::<Vec<_>>();
10336 (path, ranges)
10337 })
10338 })
10339 .collect())
10340}
10341
10342pub fn init_test(cx: &mut gpui::TestAppContext) {
10343 zlog::init_test();
10344
10345 cx.update(|cx| {
10346 let settings_store = SettingsStore::test(cx);
10347 cx.set_global(settings_store);
10348 release_channel::init(SemanticVersion::default(), cx);
10349 });
10350}
10351
10352fn json_lang() -> Arc<Language> {
10353 Arc::new(Language::new(
10354 LanguageConfig {
10355 name: "JSON".into(),
10356 matcher: LanguageMatcher {
10357 path_suffixes: vec!["json".to_string()],
10358 ..Default::default()
10359 },
10360 ..Default::default()
10361 },
10362 None,
10363 ))
10364}
10365
10366fn js_lang() -> Arc<Language> {
10367 Arc::new(Language::new(
10368 LanguageConfig {
10369 name: "JavaScript".into(),
10370 matcher: LanguageMatcher {
10371 path_suffixes: vec!["js".to_string()],
10372 ..Default::default()
10373 },
10374 ..Default::default()
10375 },
10376 None,
10377 ))
10378}
10379
10380fn rust_lang() -> Arc<Language> {
10381 Arc::new(Language::new(
10382 LanguageConfig {
10383 name: "Rust".into(),
10384 matcher: LanguageMatcher {
10385 path_suffixes: vec!["rs".to_string()],
10386 ..Default::default()
10387 },
10388 ..Default::default()
10389 },
10390 Some(tree_sitter_rust::LANGUAGE.into()),
10391 ))
10392}
10393
10394fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10395 struct PythonMootToolchainLister(Arc<FakeFs>);
10396 #[async_trait]
10397 impl ToolchainLister for PythonMootToolchainLister {
10398 async fn list(
10399 &self,
10400 worktree_root: PathBuf,
10401 subroot_relative_path: Arc<RelPath>,
10402 _: Option<HashMap<String, String>>,
10403 _: &dyn Fs,
10404 ) -> ToolchainList {
10405 // This lister will always return a path .venv directories within ancestors
10406 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10407 let mut toolchains = vec![];
10408 for ancestor in ancestors {
10409 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10410 if self.0.is_dir(&venv_path).await {
10411 toolchains.push(Toolchain {
10412 name: SharedString::new("Python Venv"),
10413 path: venv_path.to_string_lossy().into_owned().into(),
10414 language_name: LanguageName(SharedString::new_static("Python")),
10415 as_json: serde_json::Value::Null,
10416 })
10417 }
10418 }
10419 ToolchainList {
10420 toolchains,
10421 ..Default::default()
10422 }
10423 }
10424 async fn resolve(
10425 &self,
10426 _: PathBuf,
10427 _: Option<HashMap<String, String>>,
10428 _: &dyn Fs,
10429 ) -> anyhow::Result<Toolchain> {
10430 Err(anyhow::anyhow!("Not implemented"))
10431 }
10432 fn meta(&self) -> ToolchainMetadata {
10433 ToolchainMetadata {
10434 term: SharedString::new_static("Virtual Environment"),
10435 new_toolchain_placeholder: SharedString::new_static(
10436 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10437 ),
10438 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10439 }
10440 }
10441 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10442 vec![]
10443 }
10444 }
10445 Arc::new(
10446 Language::new(
10447 LanguageConfig {
10448 name: "Python".into(),
10449 matcher: LanguageMatcher {
10450 path_suffixes: vec!["py".to_string()],
10451 ..Default::default()
10452 },
10453 ..Default::default()
10454 },
10455 None, // We're not testing Python parsing with this language.
10456 )
10457 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10458 "pyproject.toml",
10459 ))))
10460 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10461 )
10462}
10463
10464fn typescript_lang() -> Arc<Language> {
10465 Arc::new(Language::new(
10466 LanguageConfig {
10467 name: "TypeScript".into(),
10468 matcher: LanguageMatcher {
10469 path_suffixes: vec!["ts".to_string()],
10470 ..Default::default()
10471 },
10472 ..Default::default()
10473 },
10474 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10475 ))
10476}
10477
10478fn tsx_lang() -> Arc<Language> {
10479 Arc::new(Language::new(
10480 LanguageConfig {
10481 name: "tsx".into(),
10482 matcher: LanguageMatcher {
10483 path_suffixes: vec!["tsx".to_string()],
10484 ..Default::default()
10485 },
10486 ..Default::default()
10487 },
10488 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10489 ))
10490}
10491
10492fn get_all_tasks(
10493 project: &Entity<Project>,
10494 task_contexts: Arc<TaskContexts>,
10495 cx: &mut App,
10496) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10497 let new_tasks = project.update(cx, |project, cx| {
10498 project.task_store.update(cx, |task_store, cx| {
10499 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10500 this.used_and_current_resolved_tasks(task_contexts, cx)
10501 })
10502 })
10503 });
10504
10505 cx.background_spawn(async move {
10506 let (mut old, new) = new_tasks.await;
10507 old.extend(new);
10508 old
10509 })
10510}
10511
10512#[track_caller]
10513fn assert_entry_git_state(
10514 tree: &Worktree,
10515 repository: &Repository,
10516 path: &str,
10517 index_status: Option<StatusCode>,
10518 is_ignored: bool,
10519) {
10520 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10521 let entry = tree
10522 .entry_for_path(&rel_path(path))
10523 .unwrap_or_else(|| panic!("entry {path} not found"));
10524 let status = repository
10525 .status_for_path(&repo_path(path))
10526 .map(|entry| entry.status);
10527 let expected = index_status.map(|index_status| {
10528 TrackedStatus {
10529 index_status,
10530 worktree_status: StatusCode::Unmodified,
10531 }
10532 .into()
10533 });
10534 assert_eq!(
10535 status, expected,
10536 "expected {path} to have git status: {expected:?}"
10537 );
10538 assert_eq!(
10539 entry.is_ignored, is_ignored,
10540 "expected {path} to have is_ignored: {is_ignored}"
10541 );
10542}
10543
10544#[track_caller]
10545fn git_init(path: &Path) -> git2::Repository {
10546 let mut init_opts = RepositoryInitOptions::new();
10547 init_opts.initial_head("main");
10548 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10549}
10550
10551#[track_caller]
10552fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10553 let path = path.as_ref();
10554 let mut index = repo.index().expect("Failed to get index");
10555 index.add_path(path).expect("Failed to add file");
10556 index.write().expect("Failed to write index");
10557}
10558
10559#[track_caller]
10560fn git_remove_index(path: &Path, repo: &git2::Repository) {
10561 let mut index = repo.index().expect("Failed to get index");
10562 index.remove_path(path).expect("Failed to add file");
10563 index.write().expect("Failed to write index");
10564}
10565
10566#[track_caller]
10567fn git_commit(msg: &'static str, repo: &git2::Repository) {
10568 use git2::Signature;
10569
10570 let signature = Signature::now("test", "test@zed.dev").unwrap();
10571 let oid = repo.index().unwrap().write_tree().unwrap();
10572 let tree = repo.find_tree(oid).unwrap();
10573 if let Ok(head) = repo.head() {
10574 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10575
10576 let parent_commit = parent_obj.as_commit().unwrap();
10577
10578 repo.commit(
10579 Some("HEAD"),
10580 &signature,
10581 &signature,
10582 msg,
10583 &tree,
10584 &[parent_commit],
10585 )
10586 .expect("Failed to commit with parent");
10587 } else {
10588 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10589 .expect("Failed to commit");
10590 }
10591}
10592
10593#[cfg(any())]
10594#[track_caller]
10595fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10596 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10597}
10598
10599#[track_caller]
10600fn git_stash(repo: &mut git2::Repository) {
10601 use git2::Signature;
10602
10603 let signature = Signature::now("test", "test@zed.dev").unwrap();
10604 repo.stash_save(&signature, "N/A", None)
10605 .expect("Failed to stash");
10606}
10607
10608#[track_caller]
10609fn git_reset(offset: usize, repo: &git2::Repository) {
10610 let head = repo.head().expect("Couldn't get repo head");
10611 let object = head.peel(git2::ObjectType::Commit).unwrap();
10612 let commit = object.as_commit().unwrap();
10613 let new_head = commit
10614 .parents()
10615 .inspect(|parnet| {
10616 parnet.message();
10617 })
10618 .nth(offset)
10619 .expect("Not enough history");
10620 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10621 .expect("Could not reset");
10622}
10623
10624#[cfg(any())]
10625#[track_caller]
10626fn git_branch(name: &str, repo: &git2::Repository) {
10627 let head = repo
10628 .head()
10629 .expect("Couldn't get repo head")
10630 .peel_to_commit()
10631 .expect("HEAD is not a commit");
10632 repo.branch(name, &head, false).expect("Failed to commit");
10633}
10634
10635#[cfg(any())]
10636#[track_caller]
10637fn git_checkout(name: &str, repo: &git2::Repository) {
10638 repo.set_head(name).expect("Failed to set head");
10639 repo.checkout_head(None).expect("Failed to check out head");
10640}
10641
10642#[cfg(any())]
10643#[track_caller]
10644fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10645 repo.statuses(None)
10646 .unwrap()
10647 .iter()
10648 .map(|status| (status.path().unwrap().to_string(), status.status()))
10649 .collect()
10650}
10651
10652#[gpui::test]
10653async fn test_find_project_path_abs(
10654 background_executor: BackgroundExecutor,
10655 cx: &mut gpui::TestAppContext,
10656) {
10657 // find_project_path should work with absolute paths
10658 init_test(cx);
10659
10660 let fs = FakeFs::new(background_executor);
10661 fs.insert_tree(
10662 path!("/root"),
10663 json!({
10664 "project1": {
10665 "file1.txt": "content1",
10666 "subdir": {
10667 "file2.txt": "content2"
10668 }
10669 },
10670 "project2": {
10671 "file3.txt": "content3"
10672 }
10673 }),
10674 )
10675 .await;
10676
10677 let project = Project::test(
10678 fs.clone(),
10679 [
10680 path!("/root/project1").as_ref(),
10681 path!("/root/project2").as_ref(),
10682 ],
10683 cx,
10684 )
10685 .await;
10686
10687 // Make sure the worktrees are fully initialized
10688 project
10689 .update(cx, |project, cx| project.git_scans_complete(cx))
10690 .await;
10691 cx.run_until_parked();
10692
10693 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10694 project.read_with(cx, |project, cx| {
10695 let worktrees: Vec<_> = project.worktrees(cx).collect();
10696 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10697 let id1 = worktrees[0].read(cx).id();
10698 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10699 let id2 = worktrees[1].read(cx).id();
10700 (abs_path1, id1, abs_path2, id2)
10701 });
10702
10703 project.update(cx, |project, cx| {
10704 let abs_path = project1_abs_path.join("file1.txt");
10705 let found_path = project.find_project_path(abs_path, cx).unwrap();
10706 assert_eq!(found_path.worktree_id, project1_id);
10707 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10708
10709 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10710 let found_path = project.find_project_path(abs_path, cx).unwrap();
10711 assert_eq!(found_path.worktree_id, project1_id);
10712 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10713
10714 let abs_path = project2_abs_path.join("file3.txt");
10715 let found_path = project.find_project_path(abs_path, cx).unwrap();
10716 assert_eq!(found_path.worktree_id, project2_id);
10717 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10718
10719 let abs_path = project1_abs_path.join("nonexistent.txt");
10720 let found_path = project.find_project_path(abs_path, cx);
10721 assert!(
10722 found_path.is_some(),
10723 "Should find project path for nonexistent file in worktree"
10724 );
10725
10726 // Test with an absolute path outside any worktree
10727 let abs_path = Path::new("/some/other/path");
10728 let found_path = project.find_project_path(abs_path, cx);
10729 assert!(
10730 found_path.is_none(),
10731 "Should not find project path for path outside any worktree"
10732 );
10733 });
10734}
10735
10736#[gpui::test]
10737async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10738 init_test(cx);
10739
10740 let fs = FakeFs::new(cx.executor());
10741 fs.insert_tree(
10742 path!("/root"),
10743 json!({
10744 "a": {
10745 ".git": {},
10746 "src": {
10747 "main.rs": "fn main() {}",
10748 }
10749 },
10750 "b": {
10751 ".git": {},
10752 "src": {
10753 "main.rs": "fn main() {}",
10754 },
10755 "script": {
10756 "run.sh": "#!/bin/bash"
10757 }
10758 }
10759 }),
10760 )
10761 .await;
10762
10763 let project = Project::test(
10764 fs.clone(),
10765 [
10766 path!("/root/a").as_ref(),
10767 path!("/root/b/script").as_ref(),
10768 path!("/root/b").as_ref(),
10769 ],
10770 cx,
10771 )
10772 .await;
10773 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10774 scan_complete.await;
10775
10776 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10777 assert_eq!(worktrees.len(), 3);
10778
10779 let worktree_id_by_abs_path = worktrees
10780 .into_iter()
10781 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10782 .collect::<HashMap<_, _>>();
10783 let worktree_id = worktree_id_by_abs_path
10784 .get(Path::new(path!("/root/b/script")))
10785 .unwrap();
10786
10787 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10788 assert_eq!(repos.len(), 2);
10789
10790 project.update(cx, |project, cx| {
10791 project.remove_worktree(*worktree_id, cx);
10792 });
10793 cx.run_until_parked();
10794
10795 let mut repo_paths = project
10796 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10797 .values()
10798 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10799 .collect::<Vec<_>>();
10800 repo_paths.sort();
10801
10802 pretty_assertions::assert_eq!(
10803 repo_paths,
10804 [
10805 Path::new(path!("/root/a")).into(),
10806 Path::new(path!("/root/b")).into(),
10807 ]
10808 );
10809
10810 let active_repo_path = project
10811 .read_with(cx, |p, cx| {
10812 p.active_repository(cx)
10813 .map(|r| r.read(cx).work_directory_abs_path.clone())
10814 })
10815 .unwrap();
10816 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10817
10818 let worktree_id = worktree_id_by_abs_path
10819 .get(Path::new(path!("/root/a")))
10820 .unwrap();
10821 project.update(cx, |project, cx| {
10822 project.remove_worktree(*worktree_id, cx);
10823 });
10824 cx.run_until_parked();
10825
10826 let active_repo_path = project
10827 .read_with(cx, |p, cx| {
10828 p.active_repository(cx)
10829 .map(|r| r.read(cx).work_directory_abs_path.clone())
10830 })
10831 .unwrap();
10832 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10833
10834 let worktree_id = worktree_id_by_abs_path
10835 .get(Path::new(path!("/root/b")))
10836 .unwrap();
10837 project.update(cx, |project, cx| {
10838 project.remove_worktree(*worktree_id, cx);
10839 });
10840 cx.run_until_parked();
10841
10842 let active_repo_path = project.read_with(cx, |p, cx| {
10843 p.active_repository(cx)
10844 .map(|r| r.read(cx).work_directory_abs_path.clone())
10845 });
10846 assert!(active_repo_path.is_none());
10847}