1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97#[cfg(not(windows))]
98#[gpui::test]
99async fn test_symlinks(cx: &mut gpui::TestAppContext) {
100 init_test(cx);
101 cx.executor().allow_parking();
102
103 let dir = TempTree::new(json!({
104 "root": {
105 "apple": "",
106 "banana": {
107 "carrot": {
108 "date": "",
109 "endive": "",
110 }
111 },
112 "fennel": {
113 "grape": "",
114 }
115 }
116 }));
117
118 let root_link_path = dir.path().join("root_link");
119 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
120 os::unix::fs::symlink(
121 dir.path().join("root/fennel"),
122 dir.path().join("root/finnochio"),
123 )
124 .unwrap();
125
126 let project = Project::test(
127 Arc::new(RealFs::new(None, cx.executor())),
128 [root_link_path.as_ref()],
129 cx,
130 )
131 .await;
132
133 project.update(cx, |project, cx| {
134 let tree = project.worktrees(cx).next().unwrap().read(cx);
135 assert_eq!(tree.file_count(), 5);
136 assert_eq!(
137 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
138 tree.entry_for_path(rel_path("finnochio/grape"))
139 .unwrap()
140 .inode
141 );
142 });
143}
144
145#[gpui::test]
146async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let dir = TempTree::new(json!({
150 ".editorconfig": r#"
151 root = true
152 [*.rs]
153 indent_style = tab
154 indent_size = 3
155 end_of_line = lf
156 insert_final_newline = true
157 trim_trailing_whitespace = true
158 max_line_length = 120
159 [*.js]
160 tab_width = 10
161 max_line_length = off
162 "#,
163 ".zed": {
164 "settings.json": r#"{
165 "tab_size": 8,
166 "hard_tabs": false,
167 "ensure_final_newline_on_save": false,
168 "remove_trailing_whitespace_on_save": false,
169 "preferred_line_length": 64,
170 "soft_wrap": "editor_width",
171 }"#,
172 },
173 "a.rs": "fn a() {\n A\n}",
174 "b": {
175 ".editorconfig": r#"
176 [*.rs]
177 indent_size = 2
178 max_line_length = off,
179 "#,
180 "b.rs": "fn b() {\n B\n}",
181 },
182 "c.js": "def c\n C\nend",
183 "README.json": "tabs are better\n",
184 }));
185
186 let path = dir.path();
187 let fs = FakeFs::new(cx.executor());
188 fs.insert_tree_from_real_fs(path, path).await;
189 let project = Project::test(fs, [path], cx).await;
190
191 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
192 language_registry.add(js_lang());
193 language_registry.add(json_lang());
194 language_registry.add(rust_lang());
195
196 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
197
198 cx.executor().run_until_parked();
199
200 cx.update(|cx| {
201 let tree = worktree.read(cx);
202 let settings_for = |path: &str| {
203 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
204 let file = File::for_entry(file_entry, worktree.clone());
205 let file_language = project
206 .read(cx)
207 .languages()
208 .load_language_for_file_path(file.path.as_std_path());
209 let file_language = cx
210 .background_executor()
211 .block(file_language)
212 .expect("Failed to get file language");
213 let file = file as _;
214 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
215 };
216
217 let settings_a = settings_for("a.rs");
218 let settings_b = settings_for("b/b.rs");
219 let settings_c = settings_for("c.js");
220 let settings_readme = settings_for("README.json");
221
222 // .editorconfig overrides .zed/settings
223 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
224 assert_eq!(settings_a.hard_tabs, true);
225 assert_eq!(settings_a.ensure_final_newline_on_save, true);
226 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
227 assert_eq!(settings_a.preferred_line_length, 120);
228
229 // .editorconfig in b/ overrides .editorconfig in root
230 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
231
232 // "indent_size" is not set, so "tab_width" is used
233 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
234
235 // When max_line_length is "off", default to .zed/settings.json
236 assert_eq!(settings_b.preferred_line_length, 64);
237 assert_eq!(settings_c.preferred_line_length, 64);
238
239 // README.md should not be affected by .editorconfig's globe "*.rs"
240 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
241 });
242}
243
244#[gpui::test]
245async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
246 init_test(cx);
247 cx.update(|cx| {
248 GitHostingProviderRegistry::default_global(cx);
249 git_hosting_providers::init(cx);
250 });
251
252 let fs = FakeFs::new(cx.executor());
253 let str_path = path!("/dir");
254 let path = Path::new(str_path);
255
256 fs.insert_tree(
257 path!("/dir"),
258 json!({
259 ".zed": {
260 "settings.json": r#"{
261 "git_hosting_providers": [
262 {
263 "provider": "gitlab",
264 "base_url": "https://google.com",
265 "name": "foo"
266 }
267 ]
268 }"#
269 },
270 }),
271 )
272 .await;
273
274 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
275 let (_worktree, _) =
276 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
277 cx.executor().run_until_parked();
278
279 cx.update(|cx| {
280 let provider = GitHostingProviderRegistry::global(cx);
281 assert!(
282 provider
283 .list_hosting_providers()
284 .into_iter()
285 .any(|provider| provider.name() == "foo")
286 );
287 });
288
289 fs.atomic_write(
290 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
291 "{}".into(),
292 )
293 .await
294 .unwrap();
295
296 cx.run_until_parked();
297
298 cx.update(|cx| {
299 let provider = GitHostingProviderRegistry::global(cx);
300 assert!(
301 !provider
302 .list_hosting_providers()
303 .into_iter()
304 .any(|provider| provider.name() == "foo")
305 );
306 });
307}
308
309#[gpui::test]
310async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
311 init_test(cx);
312 TaskStore::init(None);
313
314 let fs = FakeFs::new(cx.executor());
315 fs.insert_tree(
316 path!("/dir"),
317 json!({
318 ".zed": {
319 "settings.json": r#"{ "tab_size": 8 }"#,
320 "tasks.json": r#"[{
321 "label": "cargo check all",
322 "command": "cargo",
323 "args": ["check", "--all"]
324 },]"#,
325 },
326 "a": {
327 "a.rs": "fn a() {\n A\n}"
328 },
329 "b": {
330 ".zed": {
331 "settings.json": r#"{ "tab_size": 2 }"#,
332 "tasks.json": r#"[{
333 "label": "cargo check",
334 "command": "cargo",
335 "args": ["check"]
336 },]"#,
337 },
338 "b.rs": "fn b() {\n B\n}"
339 }
340 }),
341 )
342 .await;
343
344 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
345 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
346
347 cx.executor().run_until_parked();
348 let worktree_id = cx.update(|cx| {
349 project.update(cx, |project, cx| {
350 project.worktrees(cx).next().unwrap().read(cx).id()
351 })
352 });
353
354 let mut task_contexts = TaskContexts::default();
355 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
356 let task_contexts = Arc::new(task_contexts);
357
358 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
359 id: worktree_id,
360 directory_in_worktree: rel_path(".zed").into(),
361 id_base: "local worktree tasks from directory \".zed\"".into(),
362 };
363
364 let all_tasks = cx
365 .update(|cx| {
366 let tree = worktree.read(cx);
367
368 let file_a = File::for_entry(
369 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
370 worktree.clone(),
371 ) as _;
372 let settings_a = language_settings(None, Some(&file_a), cx);
373 let file_b = File::for_entry(
374 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
375 worktree.clone(),
376 ) as _;
377 let settings_b = language_settings(None, Some(&file_b), cx);
378
379 assert_eq!(settings_a.tab_size.get(), 8);
380 assert_eq!(settings_b.tab_size.get(), 2);
381
382 get_all_tasks(&project, task_contexts.clone(), cx)
383 })
384 .await
385 .into_iter()
386 .map(|(source_kind, task)| {
387 let resolved = task.resolved;
388 (
389 source_kind,
390 task.resolved_label,
391 resolved.args,
392 resolved.env,
393 )
394 })
395 .collect::<Vec<_>>();
396 assert_eq!(
397 all_tasks,
398 vec![
399 (
400 TaskSourceKind::Worktree {
401 id: worktree_id,
402 directory_in_worktree: rel_path("b/.zed").into(),
403 id_base: "local worktree tasks from directory \"b/.zed\"".into()
404 },
405 "cargo check".to_string(),
406 vec!["check".to_string()],
407 HashMap::default(),
408 ),
409 (
410 topmost_local_task_source_kind.clone(),
411 "cargo check all".to_string(),
412 vec!["check".to_string(), "--all".to_string()],
413 HashMap::default(),
414 ),
415 ]
416 );
417
418 let (_, resolved_task) = cx
419 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
420 .await
421 .into_iter()
422 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
423 .expect("should have one global task");
424 project.update(cx, |project, cx| {
425 let task_inventory = project
426 .task_store
427 .read(cx)
428 .task_inventory()
429 .cloned()
430 .unwrap();
431 task_inventory.update(cx, |inventory, _| {
432 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
433 inventory
434 .update_file_based_tasks(
435 TaskSettingsLocation::Global(tasks_file()),
436 Some(
437 &json!([{
438 "label": "cargo check unstable",
439 "command": "cargo",
440 "args": [
441 "check",
442 "--all",
443 "--all-targets"
444 ],
445 "env": {
446 "RUSTFLAGS": "-Zunstable-options"
447 }
448 }])
449 .to_string(),
450 ),
451 )
452 .unwrap();
453 });
454 });
455 cx.run_until_parked();
456
457 let all_tasks = cx
458 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
459 .await
460 .into_iter()
461 .map(|(source_kind, task)| {
462 let resolved = task.resolved;
463 (
464 source_kind,
465 task.resolved_label,
466 resolved.args,
467 resolved.env,
468 )
469 })
470 .collect::<Vec<_>>();
471 assert_eq!(
472 all_tasks,
473 vec![
474 (
475 topmost_local_task_source_kind.clone(),
476 "cargo check all".to_string(),
477 vec!["check".to_string(), "--all".to_string()],
478 HashMap::default(),
479 ),
480 (
481 TaskSourceKind::Worktree {
482 id: worktree_id,
483 directory_in_worktree: rel_path("b/.zed").into(),
484 id_base: "local worktree tasks from directory \"b/.zed\"".into()
485 },
486 "cargo check".to_string(),
487 vec!["check".to_string()],
488 HashMap::default(),
489 ),
490 (
491 TaskSourceKind::AbsPath {
492 abs_path: paths::tasks_file().clone(),
493 id_base: "global tasks.json".into(),
494 },
495 "cargo check unstable".to_string(),
496 vec![
497 "check".to_string(),
498 "--all".to_string(),
499 "--all-targets".to_string(),
500 ],
501 HashMap::from_iter(Some((
502 "RUSTFLAGS".to_string(),
503 "-Zunstable-options".to_string()
504 ))),
505 ),
506 ]
507 );
508}
509
510#[gpui::test]
511async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
512 init_test(cx);
513 TaskStore::init(None);
514
515 let fs = FakeFs::new(cx.executor());
516 fs.insert_tree(
517 path!("/dir"),
518 json!({
519 ".zed": {
520 "tasks.json": r#"[{
521 "label": "test worktree root",
522 "command": "echo $ZED_WORKTREE_ROOT"
523 }]"#,
524 },
525 "a": {
526 "a.rs": "fn a() {\n A\n}"
527 },
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
533 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
534
535 cx.executor().run_until_parked();
536 let worktree_id = cx.update(|cx| {
537 project.update(cx, |project, cx| {
538 project.worktrees(cx).next().unwrap().read(cx).id()
539 })
540 });
541
542 let active_non_worktree_item_tasks = cx
543 .update(|cx| {
544 get_all_tasks(
545 &project,
546 Arc::new(TaskContexts {
547 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
548 active_worktree_context: None,
549 other_worktree_contexts: Vec::new(),
550 lsp_task_sources: HashMap::default(),
551 latest_selection: None,
552 }),
553 cx,
554 )
555 })
556 .await;
557 assert!(
558 active_non_worktree_item_tasks.is_empty(),
559 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
560 );
561
562 let active_worktree_tasks = cx
563 .update(|cx| {
564 get_all_tasks(
565 &project,
566 Arc::new(TaskContexts {
567 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
568 active_worktree_context: Some((worktree_id, {
569 let mut worktree_context = TaskContext::default();
570 worktree_context
571 .task_variables
572 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
573 worktree_context
574 })),
575 other_worktree_contexts: Vec::new(),
576 lsp_task_sources: HashMap::default(),
577 latest_selection: None,
578 }),
579 cx,
580 )
581 })
582 .await;
583 assert_eq!(
584 active_worktree_tasks
585 .into_iter()
586 .map(|(source_kind, task)| {
587 let resolved = task.resolved;
588 (source_kind, resolved.command.unwrap())
589 })
590 .collect::<Vec<_>>(),
591 vec![(
592 TaskSourceKind::Worktree {
593 id: worktree_id,
594 directory_in_worktree: rel_path(".zed").into(),
595 id_base: "local worktree tasks from directory \".zed\"".into(),
596 },
597 "echo /dir".to_string(),
598 )]
599 );
600}
601
602#[gpui::test]
603async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
604 cx: &mut gpui::TestAppContext,
605) {
606 pub(crate) struct PyprojectTomlManifestProvider;
607
608 impl ManifestProvider for PyprojectTomlManifestProvider {
609 fn name(&self) -> ManifestName {
610 SharedString::new_static("pyproject.toml").into()
611 }
612
613 fn search(
614 &self,
615 ManifestQuery {
616 path,
617 depth,
618 delegate,
619 }: ManifestQuery,
620 ) -> Option<Arc<RelPath>> {
621 for path in path.ancestors().take(depth) {
622 let p = path.join(rel_path("pyproject.toml"));
623 if delegate.exists(&p, Some(false)) {
624 return Some(path.into());
625 }
626 }
627
628 None
629 }
630 }
631
632 init_test(cx);
633 let fs = FakeFs::new(cx.executor());
634
635 fs.insert_tree(
636 path!("/the-root"),
637 json!({
638 ".zed": {
639 "settings.json": r#"
640 {
641 "languages": {
642 "Python": {
643 "language_servers": ["ty"]
644 }
645 }
646 }"#
647 },
648 "project-a": {
649 ".venv": {},
650 "file.py": "",
651 "pyproject.toml": ""
652 },
653 "project-b": {
654 ".venv": {},
655 "source_file.py":"",
656 "another_file.py": "",
657 "pyproject.toml": ""
658 }
659 }),
660 )
661 .await;
662 cx.update(|cx| {
663 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
664 });
665
666 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
667 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
668 let _fake_python_server = language_registry.register_fake_lsp(
669 "Python",
670 FakeLspAdapter {
671 name: "ty",
672 capabilities: lsp::ServerCapabilities {
673 ..Default::default()
674 },
675 ..Default::default()
676 },
677 );
678
679 language_registry.add(python_lang(fs.clone()));
680 let (first_buffer, _handle) = project
681 .update(cx, |project, cx| {
682 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
683 })
684 .await
685 .unwrap();
686 cx.executor().run_until_parked();
687 let servers = project.update(cx, |project, cx| {
688 project.lsp_store.update(cx, |this, cx| {
689 first_buffer.update(cx, |buffer, cx| {
690 this.language_servers_for_local_buffer(buffer, cx)
691 .map(|(adapter, server)| (adapter.clone(), server.clone()))
692 .collect::<Vec<_>>()
693 })
694 })
695 });
696 cx.executor().run_until_parked();
697 assert_eq!(servers.len(), 1);
698 let (adapter, server) = servers.into_iter().next().unwrap();
699 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
700 assert_eq!(server.server_id(), LanguageServerId(0));
701 // `workspace_folders` are set to the rooting point.
702 assert_eq!(
703 server.workspace_folders(),
704 BTreeSet::from_iter(
705 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
706 )
707 );
708
709 let (second_project_buffer, _other_handle) = project
710 .update(cx, |project, cx| {
711 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
712 })
713 .await
714 .unwrap();
715 cx.executor().run_until_parked();
716 let servers = project.update(cx, |project, cx| {
717 project.lsp_store.update(cx, |this, cx| {
718 second_project_buffer.update(cx, |buffer, cx| {
719 this.language_servers_for_local_buffer(buffer, cx)
720 .map(|(adapter, server)| (adapter.clone(), server.clone()))
721 .collect::<Vec<_>>()
722 })
723 })
724 });
725 cx.executor().run_until_parked();
726 assert_eq!(servers.len(), 1);
727 let (adapter, server) = servers.into_iter().next().unwrap();
728 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
729 // We're not using venvs at all here, so both folders should fall under the same root.
730 assert_eq!(server.server_id(), LanguageServerId(0));
731 // Now, let's select a different toolchain for one of subprojects.
732
733 let Toolchains {
734 toolchains: available_toolchains_for_b,
735 root_path,
736 ..
737 } = project
738 .update(cx, |this, cx| {
739 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
740 this.available_toolchains(
741 ProjectPath {
742 worktree_id,
743 path: rel_path("project-b/source_file.py").into(),
744 },
745 LanguageName::new("Python"),
746 cx,
747 )
748 })
749 .await
750 .expect("A toolchain to be discovered");
751 assert_eq!(root_path.as_ref(), rel_path("project-b"));
752 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
753 let currently_active_toolchain = project
754 .update(cx, |this, cx| {
755 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
756 this.active_toolchain(
757 ProjectPath {
758 worktree_id,
759 path: rel_path("project-b/source_file.py").into(),
760 },
761 LanguageName::new("Python"),
762 cx,
763 )
764 })
765 .await;
766
767 assert!(currently_active_toolchain.is_none());
768 let _ = project
769 .update(cx, |this, cx| {
770 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
771 this.activate_toolchain(
772 ProjectPath {
773 worktree_id,
774 path: root_path,
775 },
776 available_toolchains_for_b
777 .toolchains
778 .into_iter()
779 .next()
780 .unwrap(),
781 cx,
782 )
783 })
784 .await
785 .unwrap();
786 cx.run_until_parked();
787 let servers = project.update(cx, |project, cx| {
788 project.lsp_store.update(cx, |this, cx| {
789 second_project_buffer.update(cx, |buffer, cx| {
790 this.language_servers_for_local_buffer(buffer, cx)
791 .map(|(adapter, server)| (adapter.clone(), server.clone()))
792 .collect::<Vec<_>>()
793 })
794 })
795 });
796 cx.executor().run_until_parked();
797 assert_eq!(servers.len(), 1);
798 let (adapter, server) = servers.into_iter().next().unwrap();
799 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
800 // There's a new language server in town.
801 assert_eq!(server.server_id(), LanguageServerId(1));
802}
803
804#[gpui::test]
805async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor());
809 fs.insert_tree(
810 path!("/dir"),
811 json!({
812 "test.rs": "const A: i32 = 1;",
813 "test2.rs": "",
814 "Cargo.toml": "a = 1",
815 "package.json": "{\"a\": 1}",
816 }),
817 )
818 .await;
819
820 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
821 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
822
823 let mut fake_rust_servers = language_registry.register_fake_lsp(
824 "Rust",
825 FakeLspAdapter {
826 name: "the-rust-language-server",
827 capabilities: lsp::ServerCapabilities {
828 completion_provider: Some(lsp::CompletionOptions {
829 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
830 ..Default::default()
831 }),
832 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
833 lsp::TextDocumentSyncOptions {
834 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
835 ..Default::default()
836 },
837 )),
838 ..Default::default()
839 },
840 ..Default::default()
841 },
842 );
843 let mut fake_json_servers = language_registry.register_fake_lsp(
844 "JSON",
845 FakeLspAdapter {
846 name: "the-json-language-server",
847 capabilities: lsp::ServerCapabilities {
848 completion_provider: Some(lsp::CompletionOptions {
849 trigger_characters: Some(vec![":".to_string()]),
850 ..Default::default()
851 }),
852 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
853 lsp::TextDocumentSyncOptions {
854 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
855 ..Default::default()
856 },
857 )),
858 ..Default::default()
859 },
860 ..Default::default()
861 },
862 );
863
864 // Open a buffer without an associated language server.
865 let (toml_buffer, _handle) = project
866 .update(cx, |project, cx| {
867 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
868 })
869 .await
870 .unwrap();
871
872 // Open a buffer with an associated language server before the language for it has been loaded.
873 let (rust_buffer, _handle2) = project
874 .update(cx, |project, cx| {
875 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
876 })
877 .await
878 .unwrap();
879 rust_buffer.update(cx, |buffer, _| {
880 assert_eq!(buffer.language().map(|l| l.name()), None);
881 });
882
883 // Now we add the languages to the project, and ensure they get assigned to all
884 // the relevant open buffers.
885 language_registry.add(json_lang());
886 language_registry.add(rust_lang());
887 cx.executor().run_until_parked();
888 rust_buffer.update(cx, |buffer, _| {
889 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
890 });
891
892 // A server is started up, and it is notified about Rust files.
893 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
894 assert_eq!(
895 fake_rust_server
896 .receive_notification::<lsp::notification::DidOpenTextDocument>()
897 .await
898 .text_document,
899 lsp::TextDocumentItem {
900 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
901 version: 0,
902 text: "const A: i32 = 1;".to_string(),
903 language_id: "rust".to_string(),
904 }
905 );
906
907 // The buffer is configured based on the language server's capabilities.
908 rust_buffer.update(cx, |buffer, _| {
909 assert_eq!(
910 buffer
911 .completion_triggers()
912 .iter()
913 .cloned()
914 .collect::<Vec<_>>(),
915 &[".".to_string(), "::".to_string()]
916 );
917 });
918 toml_buffer.update(cx, |buffer, _| {
919 assert!(buffer.completion_triggers().is_empty());
920 });
921
922 // Edit a buffer. The changes are reported to the language server.
923 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
924 assert_eq!(
925 fake_rust_server
926 .receive_notification::<lsp::notification::DidChangeTextDocument>()
927 .await
928 .text_document,
929 lsp::VersionedTextDocumentIdentifier::new(
930 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
931 1
932 )
933 );
934
935 // Open a third buffer with a different associated language server.
936 let (json_buffer, _json_handle) = project
937 .update(cx, |project, cx| {
938 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
939 })
940 .await
941 .unwrap();
942
943 // A json language server is started up and is only notified about the json buffer.
944 let mut fake_json_server = fake_json_servers.next().await.unwrap();
945 assert_eq!(
946 fake_json_server
947 .receive_notification::<lsp::notification::DidOpenTextDocument>()
948 .await
949 .text_document,
950 lsp::TextDocumentItem {
951 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
952 version: 0,
953 text: "{\"a\": 1}".to_string(),
954 language_id: "json".to_string(),
955 }
956 );
957
958 // This buffer is configured based on the second language server's
959 // capabilities.
960 json_buffer.update(cx, |buffer, _| {
961 assert_eq!(
962 buffer
963 .completion_triggers()
964 .iter()
965 .cloned()
966 .collect::<Vec<_>>(),
967 &[":".to_string()]
968 );
969 });
970
971 // When opening another buffer whose language server is already running,
972 // it is also configured based on the existing language server's capabilities.
973 let (rust_buffer2, _handle4) = project
974 .update(cx, |project, cx| {
975 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
976 })
977 .await
978 .unwrap();
979 rust_buffer2.update(cx, |buffer, _| {
980 assert_eq!(
981 buffer
982 .completion_triggers()
983 .iter()
984 .cloned()
985 .collect::<Vec<_>>(),
986 &[".".to_string(), "::".to_string()]
987 );
988 });
989
990 // Changes are reported only to servers matching the buffer's language.
991 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
992 rust_buffer2.update(cx, |buffer, cx| {
993 buffer.edit([(0..0, "let x = 1;")], None, cx)
994 });
995 assert_eq!(
996 fake_rust_server
997 .receive_notification::<lsp::notification::DidChangeTextDocument>()
998 .await
999 .text_document,
1000 lsp::VersionedTextDocumentIdentifier::new(
1001 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1002 1
1003 )
1004 );
1005
1006 // Save notifications are reported to all servers.
1007 project
1008 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1009 .await
1010 .unwrap();
1011 assert_eq!(
1012 fake_rust_server
1013 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1014 .await
1015 .text_document,
1016 lsp::TextDocumentIdentifier::new(
1017 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1018 )
1019 );
1020 assert_eq!(
1021 fake_json_server
1022 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1023 .await
1024 .text_document,
1025 lsp::TextDocumentIdentifier::new(
1026 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1027 )
1028 );
1029
1030 // Renames are reported only to servers matching the buffer's language.
1031 fs.rename(
1032 Path::new(path!("/dir/test2.rs")),
1033 Path::new(path!("/dir/test3.rs")),
1034 Default::default(),
1035 )
1036 .await
1037 .unwrap();
1038 assert_eq!(
1039 fake_rust_server
1040 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1041 .await
1042 .text_document,
1043 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1044 );
1045 assert_eq!(
1046 fake_rust_server
1047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1048 .await
1049 .text_document,
1050 lsp::TextDocumentItem {
1051 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1052 version: 0,
1053 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1054 language_id: "rust".to_string(),
1055 },
1056 );
1057
1058 rust_buffer2.update(cx, |buffer, cx| {
1059 buffer.update_diagnostics(
1060 LanguageServerId(0),
1061 DiagnosticSet::from_sorted_entries(
1062 vec![DiagnosticEntry {
1063 diagnostic: Default::default(),
1064 range: Anchor::MIN..Anchor::MAX,
1065 }],
1066 &buffer.snapshot(),
1067 ),
1068 cx,
1069 );
1070 assert_eq!(
1071 buffer
1072 .snapshot()
1073 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1074 .count(),
1075 1
1076 );
1077 });
1078
1079 // When the rename changes the extension of the file, the buffer gets closed on the old
1080 // language server and gets opened on the new one.
1081 fs.rename(
1082 Path::new(path!("/dir/test3.rs")),
1083 Path::new(path!("/dir/test3.json")),
1084 Default::default(),
1085 )
1086 .await
1087 .unwrap();
1088 assert_eq!(
1089 fake_rust_server
1090 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1091 .await
1092 .text_document,
1093 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1094 );
1095 assert_eq!(
1096 fake_json_server
1097 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1098 .await
1099 .text_document,
1100 lsp::TextDocumentItem {
1101 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1102 version: 0,
1103 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1104 language_id: "json".to_string(),
1105 },
1106 );
1107
1108 // We clear the diagnostics, since the language has changed.
1109 rust_buffer2.update(cx, |buffer, _| {
1110 assert_eq!(
1111 buffer
1112 .snapshot()
1113 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1114 .count(),
1115 0
1116 );
1117 });
1118
1119 // The renamed file's version resets after changing language server.
1120 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1121 assert_eq!(
1122 fake_json_server
1123 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1124 .await
1125 .text_document,
1126 lsp::VersionedTextDocumentIdentifier::new(
1127 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1128 1
1129 )
1130 );
1131
1132 // Restart language servers
1133 project.update(cx, |project, cx| {
1134 project.restart_language_servers_for_buffers(
1135 vec![rust_buffer.clone(), json_buffer.clone()],
1136 HashSet::default(),
1137 cx,
1138 );
1139 });
1140
1141 let mut rust_shutdown_requests = fake_rust_server
1142 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1143 let mut json_shutdown_requests = fake_json_server
1144 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1145 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1146
1147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1148 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1149
1150 // Ensure rust document is reopened in new rust language server
1151 assert_eq!(
1152 fake_rust_server
1153 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1154 .await
1155 .text_document,
1156 lsp::TextDocumentItem {
1157 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1158 version: 0,
1159 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1160 language_id: "rust".to_string(),
1161 }
1162 );
1163
1164 // Ensure json documents are reopened in new json language server
1165 assert_set_eq!(
1166 [
1167 fake_json_server
1168 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1169 .await
1170 .text_document,
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 ],
1176 [
1177 lsp::TextDocumentItem {
1178 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1179 version: 0,
1180 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1181 language_id: "json".to_string(),
1182 },
1183 lsp::TextDocumentItem {
1184 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1185 version: 0,
1186 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1187 language_id: "json".to_string(),
1188 }
1189 ]
1190 );
1191
1192 // Close notifications are reported only to servers matching the buffer's language.
1193 cx.update(|_| drop(_json_handle));
1194 let close_message = lsp::DidCloseTextDocumentParams {
1195 text_document: lsp::TextDocumentIdentifier::new(
1196 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1197 ),
1198 };
1199 assert_eq!(
1200 fake_json_server
1201 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1202 .await,
1203 close_message,
1204 );
1205}
1206
1207#[gpui::test]
1208async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let fs = FakeFs::new(cx.executor());
1212 fs.insert_tree(
1213 path!("/the-root"),
1214 json!({
1215 ".gitignore": "target\n",
1216 "Cargo.lock": "",
1217 "src": {
1218 "a.rs": "",
1219 "b.rs": "",
1220 },
1221 "target": {
1222 "x": {
1223 "out": {
1224 "x.rs": ""
1225 }
1226 },
1227 "y": {
1228 "out": {
1229 "y.rs": "",
1230 }
1231 },
1232 "z": {
1233 "out": {
1234 "z.rs": ""
1235 }
1236 }
1237 }
1238 }),
1239 )
1240 .await;
1241 fs.insert_tree(
1242 path!("/the-registry"),
1243 json!({
1244 "dep1": {
1245 "src": {
1246 "dep1.rs": "",
1247 }
1248 },
1249 "dep2": {
1250 "src": {
1251 "dep2.rs": "",
1252 }
1253 },
1254 }),
1255 )
1256 .await;
1257 fs.insert_tree(
1258 path!("/the/stdlib"),
1259 json!({
1260 "LICENSE": "",
1261 "src": {
1262 "string.rs": "",
1263 }
1264 }),
1265 )
1266 .await;
1267
1268 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1269 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1270 (project.languages().clone(), project.lsp_store())
1271 });
1272 language_registry.add(rust_lang());
1273 let mut fake_servers = language_registry.register_fake_lsp(
1274 "Rust",
1275 FakeLspAdapter {
1276 name: "the-language-server",
1277 ..Default::default()
1278 },
1279 );
1280
1281 cx.executor().run_until_parked();
1282
1283 // Start the language server by opening a buffer with a compatible file extension.
1284 project
1285 .update(cx, |project, cx| {
1286 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1287 })
1288 .await
1289 .unwrap();
1290
1291 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1292 project.update(cx, |project, cx| {
1293 let worktree = project.worktrees(cx).next().unwrap();
1294 assert_eq!(
1295 worktree
1296 .read(cx)
1297 .snapshot()
1298 .entries(true, 0)
1299 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1300 .collect::<Vec<_>>(),
1301 &[
1302 ("", false),
1303 (".gitignore", false),
1304 ("Cargo.lock", false),
1305 ("src", false),
1306 ("src/a.rs", false),
1307 ("src/b.rs", false),
1308 ("target", true),
1309 ]
1310 );
1311 });
1312
1313 let prev_read_dir_count = fs.read_dir_call_count();
1314
1315 let fake_server = fake_servers.next().await.unwrap();
1316 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1317 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1318 id
1319 });
1320
1321 // Simulate jumping to a definition in a dependency outside of the worktree.
1322 let _out_of_worktree_buffer = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer_via_lsp(
1325 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1326 server_id,
1327 cx,
1328 )
1329 })
1330 .await
1331 .unwrap();
1332
1333 // Keep track of the FS events reported to the language server.
1334 let file_changes = Arc::new(Mutex::new(Vec::new()));
1335 fake_server
1336 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1337 registrations: vec![lsp::Registration {
1338 id: Default::default(),
1339 method: "workspace/didChangeWatchedFiles".to_string(),
1340 register_options: serde_json::to_value(
1341 lsp::DidChangeWatchedFilesRegistrationOptions {
1342 watchers: vec![
1343 lsp::FileSystemWatcher {
1344 glob_pattern: lsp::GlobPattern::String(
1345 path!("/the-root/Cargo.toml").to_string(),
1346 ),
1347 kind: None,
1348 },
1349 lsp::FileSystemWatcher {
1350 glob_pattern: lsp::GlobPattern::String(
1351 path!("/the-root/src/*.{rs,c}").to_string(),
1352 ),
1353 kind: None,
1354 },
1355 lsp::FileSystemWatcher {
1356 glob_pattern: lsp::GlobPattern::String(
1357 path!("/the-root/target/y/**/*.rs").to_string(),
1358 ),
1359 kind: None,
1360 },
1361 lsp::FileSystemWatcher {
1362 glob_pattern: lsp::GlobPattern::String(
1363 path!("/the/stdlib/src/**/*.rs").to_string(),
1364 ),
1365 kind: None,
1366 },
1367 lsp::FileSystemWatcher {
1368 glob_pattern: lsp::GlobPattern::String(
1369 path!("**/Cargo.lock").to_string(),
1370 ),
1371 kind: None,
1372 },
1373 ],
1374 },
1375 )
1376 .ok(),
1377 }],
1378 })
1379 .await
1380 .into_response()
1381 .unwrap();
1382 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1383 let file_changes = file_changes.clone();
1384 move |params, _| {
1385 let mut file_changes = file_changes.lock();
1386 file_changes.extend(params.changes);
1387 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1388 }
1389 });
1390
1391 cx.executor().run_until_parked();
1392 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1393 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1394
1395 let mut new_watched_paths = fs.watched_paths();
1396 new_watched_paths.retain(|path| {
1397 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1398 });
1399 assert_eq!(
1400 &new_watched_paths,
1401 &[
1402 Path::new(path!("/the-root")),
1403 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1404 Path::new(path!("/the/stdlib/src"))
1405 ]
1406 );
1407
1408 // Now the language server has asked us to watch an ignored directory path,
1409 // so we recursively load it.
1410 project.update(cx, |project, cx| {
1411 let worktree = project.visible_worktrees(cx).next().unwrap();
1412 assert_eq!(
1413 worktree
1414 .read(cx)
1415 .snapshot()
1416 .entries(true, 0)
1417 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1418 .collect::<Vec<_>>(),
1419 &[
1420 ("", false),
1421 (".gitignore", false),
1422 ("Cargo.lock", false),
1423 ("src", false),
1424 ("src/a.rs", false),
1425 ("src/b.rs", false),
1426 ("target", true),
1427 ("target/x", true),
1428 ("target/y", true),
1429 ("target/y/out", true),
1430 ("target/y/out/y.rs", true),
1431 ("target/z", true),
1432 ]
1433 );
1434 });
1435
1436 // Perform some file system mutations, two of which match the watched patterns,
1437 // and one of which does not.
1438 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1439 .await
1440 .unwrap();
1441 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.create_file(
1448 path!("/the-root/target/x/out/x2.rs").as_ref(),
1449 Default::default(),
1450 )
1451 .await
1452 .unwrap();
1453 fs.create_file(
1454 path!("/the-root/target/y/out/y2.rs").as_ref(),
1455 Default::default(),
1456 )
1457 .await
1458 .unwrap();
1459 fs.save(
1460 path!("/the-root/Cargo.lock").as_ref(),
1461 &"".into(),
1462 Default::default(),
1463 )
1464 .await
1465 .unwrap();
1466 fs.save(
1467 path!("/the-stdlib/LICENSE").as_ref(),
1468 &"".into(),
1469 Default::default(),
1470 )
1471 .await
1472 .unwrap();
1473 fs.save(
1474 path!("/the/stdlib/src/string.rs").as_ref(),
1475 &"".into(),
1476 Default::default(),
1477 )
1478 .await
1479 .unwrap();
1480
1481 // The language server receives events for the FS mutations that match its watch patterns.
1482 cx.executor().run_until_parked();
1483 assert_eq!(
1484 &*file_changes.lock(),
1485 &[
1486 lsp::FileEvent {
1487 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1488 typ: lsp::FileChangeType::CHANGED,
1489 },
1490 lsp::FileEvent {
1491 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1492 typ: lsp::FileChangeType::DELETED,
1493 },
1494 lsp::FileEvent {
1495 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1496 typ: lsp::FileChangeType::CREATED,
1497 },
1498 lsp::FileEvent {
1499 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1500 typ: lsp::FileChangeType::CREATED,
1501 },
1502 lsp::FileEvent {
1503 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1504 typ: lsp::FileChangeType::CHANGED,
1505 },
1506 ]
1507 );
1508}
1509
1510#[gpui::test]
1511async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1512 init_test(cx);
1513
1514 let fs = FakeFs::new(cx.executor());
1515 fs.insert_tree(
1516 path!("/dir"),
1517 json!({
1518 "a.rs": "let a = 1;",
1519 "b.rs": "let b = 2;"
1520 }),
1521 )
1522 .await;
1523
1524 let project = Project::test(
1525 fs,
1526 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1527 cx,
1528 )
1529 .await;
1530 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1531
1532 let buffer_a = project
1533 .update(cx, |project, cx| {
1534 project.open_local_buffer(path!("/dir/a.rs"), cx)
1535 })
1536 .await
1537 .unwrap();
1538 let buffer_b = project
1539 .update(cx, |project, cx| {
1540 project.open_local_buffer(path!("/dir/b.rs"), cx)
1541 })
1542 .await
1543 .unwrap();
1544
1545 lsp_store.update(cx, |lsp_store, cx| {
1546 lsp_store
1547 .update_diagnostics(
1548 LanguageServerId(0),
1549 lsp::PublishDiagnosticsParams {
1550 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1551 version: None,
1552 diagnostics: vec![lsp::Diagnostic {
1553 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1554 severity: Some(lsp::DiagnosticSeverity::ERROR),
1555 message: "error 1".to_string(),
1556 ..Default::default()
1557 }],
1558 },
1559 None,
1560 DiagnosticSourceKind::Pushed,
1561 &[],
1562 cx,
1563 )
1564 .unwrap();
1565 lsp_store
1566 .update_diagnostics(
1567 LanguageServerId(0),
1568 lsp::PublishDiagnosticsParams {
1569 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1570 version: None,
1571 diagnostics: vec![lsp::Diagnostic {
1572 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1573 severity: Some(DiagnosticSeverity::WARNING),
1574 message: "error 2".to_string(),
1575 ..Default::default()
1576 }],
1577 },
1578 None,
1579 DiagnosticSourceKind::Pushed,
1580 &[],
1581 cx,
1582 )
1583 .unwrap();
1584 });
1585
1586 buffer_a.update(cx, |buffer, _| {
1587 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1588 assert_eq!(
1589 chunks
1590 .iter()
1591 .map(|(s, d)| (s.as_str(), *d))
1592 .collect::<Vec<_>>(),
1593 &[
1594 ("let ", None),
1595 ("a", Some(DiagnosticSeverity::ERROR)),
1596 (" = 1;", None),
1597 ]
1598 );
1599 });
1600 buffer_b.update(cx, |buffer, _| {
1601 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1602 assert_eq!(
1603 chunks
1604 .iter()
1605 .map(|(s, d)| (s.as_str(), *d))
1606 .collect::<Vec<_>>(),
1607 &[
1608 ("let ", None),
1609 ("b", Some(DiagnosticSeverity::WARNING)),
1610 (" = 2;", None),
1611 ]
1612 );
1613 });
1614}
1615
1616#[gpui::test]
1617async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1618 init_test(cx);
1619
1620 let fs = FakeFs::new(cx.executor());
1621 fs.insert_tree(
1622 path!("/root"),
1623 json!({
1624 "dir": {
1625 ".git": {
1626 "HEAD": "ref: refs/heads/main",
1627 },
1628 ".gitignore": "b.rs",
1629 "a.rs": "let a = 1;",
1630 "b.rs": "let b = 2;",
1631 },
1632 "other.rs": "let b = c;"
1633 }),
1634 )
1635 .await;
1636
1637 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1638 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1639 let (worktree, _) = project
1640 .update(cx, |project, cx| {
1641 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1642 })
1643 .await
1644 .unwrap();
1645 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1646
1647 let (worktree, _) = project
1648 .update(cx, |project, cx| {
1649 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1650 })
1651 .await
1652 .unwrap();
1653 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1654
1655 let server_id = LanguageServerId(0);
1656 lsp_store.update(cx, |lsp_store, cx| {
1657 lsp_store
1658 .update_diagnostics(
1659 server_id,
1660 lsp::PublishDiagnosticsParams {
1661 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1662 version: None,
1663 diagnostics: vec![lsp::Diagnostic {
1664 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1665 severity: Some(lsp::DiagnosticSeverity::ERROR),
1666 message: "unused variable 'b'".to_string(),
1667 ..Default::default()
1668 }],
1669 },
1670 None,
1671 DiagnosticSourceKind::Pushed,
1672 &[],
1673 cx,
1674 )
1675 .unwrap();
1676 lsp_store
1677 .update_diagnostics(
1678 server_id,
1679 lsp::PublishDiagnosticsParams {
1680 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1681 version: None,
1682 diagnostics: vec![lsp::Diagnostic {
1683 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1684 severity: Some(lsp::DiagnosticSeverity::ERROR),
1685 message: "unknown variable 'c'".to_string(),
1686 ..Default::default()
1687 }],
1688 },
1689 None,
1690 DiagnosticSourceKind::Pushed,
1691 &[],
1692 cx,
1693 )
1694 .unwrap();
1695 });
1696
1697 let main_ignored_buffer = project
1698 .update(cx, |project, cx| {
1699 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1700 })
1701 .await
1702 .unwrap();
1703 main_ignored_buffer.update(cx, |buffer, _| {
1704 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1705 assert_eq!(
1706 chunks
1707 .iter()
1708 .map(|(s, d)| (s.as_str(), *d))
1709 .collect::<Vec<_>>(),
1710 &[
1711 ("let ", None),
1712 ("b", Some(DiagnosticSeverity::ERROR)),
1713 (" = 2;", None),
1714 ],
1715 "Gigitnored buffers should still get in-buffer diagnostics",
1716 );
1717 });
1718 let other_buffer = project
1719 .update(cx, |project, cx| {
1720 project.open_buffer((other_worktree_id, rel_path("")), cx)
1721 })
1722 .await
1723 .unwrap();
1724 other_buffer.update(cx, |buffer, _| {
1725 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1726 assert_eq!(
1727 chunks
1728 .iter()
1729 .map(|(s, d)| (s.as_str(), *d))
1730 .collect::<Vec<_>>(),
1731 &[
1732 ("let b = ", None),
1733 ("c", Some(DiagnosticSeverity::ERROR)),
1734 (";", None),
1735 ],
1736 "Buffers from hidden projects should still get in-buffer diagnostics"
1737 );
1738 });
1739
1740 project.update(cx, |project, cx| {
1741 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1742 assert_eq!(
1743 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1744 vec![(
1745 ProjectPath {
1746 worktree_id: main_worktree_id,
1747 path: rel_path("b.rs").into(),
1748 },
1749 server_id,
1750 DiagnosticSummary {
1751 error_count: 1,
1752 warning_count: 0,
1753 }
1754 )]
1755 );
1756 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1757 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1758 });
1759}
1760
1761#[gpui::test]
1762async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1763 init_test(cx);
1764
1765 let progress_token = "the-progress-token";
1766
1767 let fs = FakeFs::new(cx.executor());
1768 fs.insert_tree(
1769 path!("/dir"),
1770 json!({
1771 "a.rs": "fn a() { A }",
1772 "b.rs": "const y: i32 = 1",
1773 }),
1774 )
1775 .await;
1776
1777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1779
1780 language_registry.add(rust_lang());
1781 let mut fake_servers = language_registry.register_fake_lsp(
1782 "Rust",
1783 FakeLspAdapter {
1784 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1785 disk_based_diagnostics_sources: vec!["disk".into()],
1786 ..Default::default()
1787 },
1788 );
1789
1790 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1791
1792 // Cause worktree to start the fake language server
1793 let _ = project
1794 .update(cx, |project, cx| {
1795 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1796 })
1797 .await
1798 .unwrap();
1799
1800 let mut events = cx.events(&project);
1801
1802 let fake_server = fake_servers.next().await.unwrap();
1803 assert_eq!(
1804 events.next().await.unwrap(),
1805 Event::LanguageServerAdded(
1806 LanguageServerId(0),
1807 fake_server.server.name(),
1808 Some(worktree_id)
1809 ),
1810 );
1811
1812 fake_server
1813 .start_progress(format!("{}/0", progress_token))
1814 .await;
1815 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1816 assert_eq!(
1817 events.next().await.unwrap(),
1818 Event::DiskBasedDiagnosticsStarted {
1819 language_server_id: LanguageServerId(0),
1820 }
1821 );
1822
1823 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1824 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1825 version: None,
1826 diagnostics: vec![lsp::Diagnostic {
1827 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1828 severity: Some(lsp::DiagnosticSeverity::ERROR),
1829 message: "undefined variable 'A'".to_string(),
1830 ..Default::default()
1831 }],
1832 });
1833 assert_eq!(
1834 events.next().await.unwrap(),
1835 Event::DiagnosticsUpdated {
1836 language_server_id: LanguageServerId(0),
1837 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1838 }
1839 );
1840
1841 fake_server.end_progress(format!("{}/0", progress_token));
1842 assert_eq!(
1843 events.next().await.unwrap(),
1844 Event::DiskBasedDiagnosticsFinished {
1845 language_server_id: LanguageServerId(0)
1846 }
1847 );
1848
1849 let buffer = project
1850 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1851 .await
1852 .unwrap();
1853
1854 buffer.update(cx, |buffer, _| {
1855 let snapshot = buffer.snapshot();
1856 let diagnostics = snapshot
1857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1858 .collect::<Vec<_>>();
1859 assert_eq!(
1860 diagnostics,
1861 &[DiagnosticEntryRef {
1862 range: Point::new(0, 9)..Point::new(0, 10),
1863 diagnostic: &Diagnostic {
1864 severity: lsp::DiagnosticSeverity::ERROR,
1865 message: "undefined variable 'A'".to_string(),
1866 group_id: 0,
1867 is_primary: true,
1868 source_kind: DiagnosticSourceKind::Pushed,
1869 ..Diagnostic::default()
1870 }
1871 }]
1872 )
1873 });
1874
1875 // Ensure publishing empty diagnostics twice only results in one update event.
1876 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1877 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1878 version: None,
1879 diagnostics: Default::default(),
1880 });
1881 assert_eq!(
1882 events.next().await.unwrap(),
1883 Event::DiagnosticsUpdated {
1884 language_server_id: LanguageServerId(0),
1885 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1886 }
1887 );
1888
1889 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1891 version: None,
1892 diagnostics: Default::default(),
1893 });
1894 cx.executor().run_until_parked();
1895 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1896}
1897
1898#[gpui::test]
1899async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1900 init_test(cx);
1901
1902 let progress_token = "the-progress-token";
1903
1904 let fs = FakeFs::new(cx.executor());
1905 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1906
1907 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1908
1909 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1910 language_registry.add(rust_lang());
1911 let mut fake_servers = language_registry.register_fake_lsp(
1912 "Rust",
1913 FakeLspAdapter {
1914 name: "the-language-server",
1915 disk_based_diagnostics_sources: vec!["disk".into()],
1916 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1917 ..FakeLspAdapter::default()
1918 },
1919 );
1920
1921 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1922
1923 let (buffer, _handle) = project
1924 .update(cx, |project, cx| {
1925 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1926 })
1927 .await
1928 .unwrap();
1929 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1930 // Simulate diagnostics starting to update.
1931 let fake_server = fake_servers.next().await.unwrap();
1932 fake_server.start_progress(progress_token).await;
1933
1934 // Restart the server before the diagnostics finish updating.
1935 project.update(cx, |project, cx| {
1936 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1937 });
1938 let mut events = cx.events(&project);
1939
1940 // Simulate the newly started server sending more diagnostics.
1941 let fake_server = fake_servers.next().await.unwrap();
1942 assert_eq!(
1943 events.next().await.unwrap(),
1944 Event::LanguageServerRemoved(LanguageServerId(0))
1945 );
1946 assert_eq!(
1947 events.next().await.unwrap(),
1948 Event::LanguageServerAdded(
1949 LanguageServerId(1),
1950 fake_server.server.name(),
1951 Some(worktree_id)
1952 )
1953 );
1954 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1955 fake_server.start_progress(progress_token).await;
1956 assert_eq!(
1957 events.next().await.unwrap(),
1958 Event::LanguageServerBufferRegistered {
1959 server_id: LanguageServerId(1),
1960 buffer_id,
1961 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1962 name: Some(fake_server.server.name())
1963 }
1964 );
1965 assert_eq!(
1966 events.next().await.unwrap(),
1967 Event::DiskBasedDiagnosticsStarted {
1968 language_server_id: LanguageServerId(1)
1969 }
1970 );
1971 project.update(cx, |project, cx| {
1972 assert_eq!(
1973 project
1974 .language_servers_running_disk_based_diagnostics(cx)
1975 .collect::<Vec<_>>(),
1976 [LanguageServerId(1)]
1977 );
1978 });
1979
1980 // All diagnostics are considered done, despite the old server's diagnostic
1981 // task never completing.
1982 fake_server.end_progress(progress_token);
1983 assert_eq!(
1984 events.next().await.unwrap(),
1985 Event::DiskBasedDiagnosticsFinished {
1986 language_server_id: LanguageServerId(1)
1987 }
1988 );
1989 project.update(cx, |project, cx| {
1990 assert_eq!(
1991 project
1992 .language_servers_running_disk_based_diagnostics(cx)
1993 .collect::<Vec<_>>(),
1994 [] as [language::LanguageServerId; 0]
1995 );
1996 });
1997}
1998
1999#[gpui::test]
2000async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2001 init_test(cx);
2002
2003 let fs = FakeFs::new(cx.executor());
2004 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2005
2006 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2007
2008 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2009 language_registry.add(rust_lang());
2010 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2011
2012 let (buffer, _) = project
2013 .update(cx, |project, cx| {
2014 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2015 })
2016 .await
2017 .unwrap();
2018
2019 // Publish diagnostics
2020 let fake_server = fake_servers.next().await.unwrap();
2021 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2022 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2023 version: None,
2024 diagnostics: vec![lsp::Diagnostic {
2025 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2026 severity: Some(lsp::DiagnosticSeverity::ERROR),
2027 message: "the message".to_string(),
2028 ..Default::default()
2029 }],
2030 });
2031
2032 cx.executor().run_until_parked();
2033 buffer.update(cx, |buffer, _| {
2034 assert_eq!(
2035 buffer
2036 .snapshot()
2037 .diagnostics_in_range::<_, usize>(0..1, false)
2038 .map(|entry| entry.diagnostic.message.clone())
2039 .collect::<Vec<_>>(),
2040 ["the message".to_string()]
2041 );
2042 });
2043 project.update(cx, |project, cx| {
2044 assert_eq!(
2045 project.diagnostic_summary(false, cx),
2046 DiagnosticSummary {
2047 error_count: 1,
2048 warning_count: 0,
2049 }
2050 );
2051 });
2052
2053 project.update(cx, |project, cx| {
2054 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2055 });
2056
2057 // The diagnostics are cleared.
2058 cx.executor().run_until_parked();
2059 buffer.update(cx, |buffer, _| {
2060 assert_eq!(
2061 buffer
2062 .snapshot()
2063 .diagnostics_in_range::<_, usize>(0..1, false)
2064 .map(|entry| entry.diagnostic.message.clone())
2065 .collect::<Vec<_>>(),
2066 Vec::<String>::new(),
2067 );
2068 });
2069 project.update(cx, |project, cx| {
2070 assert_eq!(
2071 project.diagnostic_summary(false, cx),
2072 DiagnosticSummary {
2073 error_count: 0,
2074 warning_count: 0,
2075 }
2076 );
2077 });
2078}
2079
2080#[gpui::test]
2081async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2082 init_test(cx);
2083
2084 let fs = FakeFs::new(cx.executor());
2085 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2086
2087 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2088 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2089
2090 language_registry.add(rust_lang());
2091 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2092
2093 let (buffer, _handle) = project
2094 .update(cx, |project, cx| {
2095 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2096 })
2097 .await
2098 .unwrap();
2099
2100 // Before restarting the server, report diagnostics with an unknown buffer version.
2101 let fake_server = fake_servers.next().await.unwrap();
2102 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2103 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2104 version: Some(10000),
2105 diagnostics: Vec::new(),
2106 });
2107 cx.executor().run_until_parked();
2108 project.update(cx, |project, cx| {
2109 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2110 });
2111
2112 let mut fake_server = fake_servers.next().await.unwrap();
2113 let notification = fake_server
2114 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2115 .await
2116 .text_document;
2117 assert_eq!(notification.version, 0);
2118}
2119
2120#[gpui::test]
2121async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2122 init_test(cx);
2123
2124 let progress_token = "the-progress-token";
2125
2126 let fs = FakeFs::new(cx.executor());
2127 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2128
2129 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2130
2131 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2132 language_registry.add(rust_lang());
2133 let mut fake_servers = language_registry.register_fake_lsp(
2134 "Rust",
2135 FakeLspAdapter {
2136 name: "the-language-server",
2137 disk_based_diagnostics_sources: vec!["disk".into()],
2138 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2139 ..Default::default()
2140 },
2141 );
2142
2143 let (buffer, _handle) = project
2144 .update(cx, |project, cx| {
2145 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2146 })
2147 .await
2148 .unwrap();
2149
2150 // Simulate diagnostics starting to update.
2151 let mut fake_server = fake_servers.next().await.unwrap();
2152 fake_server
2153 .start_progress_with(
2154 "another-token",
2155 lsp::WorkDoneProgressBegin {
2156 cancellable: Some(false),
2157 ..Default::default()
2158 },
2159 )
2160 .await;
2161 fake_server
2162 .start_progress_with(
2163 progress_token,
2164 lsp::WorkDoneProgressBegin {
2165 cancellable: Some(true),
2166 ..Default::default()
2167 },
2168 )
2169 .await;
2170 cx.executor().run_until_parked();
2171
2172 project.update(cx, |project, cx| {
2173 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2174 });
2175
2176 let cancel_notification = fake_server
2177 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2178 .await;
2179 assert_eq!(
2180 cancel_notification.token,
2181 NumberOrString::String(progress_token.into())
2182 );
2183}
2184
2185#[gpui::test]
2186async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2187 init_test(cx);
2188
2189 let fs = FakeFs::new(cx.executor());
2190 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2191 .await;
2192
2193 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2195
2196 let mut fake_rust_servers = language_registry.register_fake_lsp(
2197 "Rust",
2198 FakeLspAdapter {
2199 name: "rust-lsp",
2200 ..Default::default()
2201 },
2202 );
2203 let mut fake_js_servers = language_registry.register_fake_lsp(
2204 "JavaScript",
2205 FakeLspAdapter {
2206 name: "js-lsp",
2207 ..Default::default()
2208 },
2209 );
2210 language_registry.add(rust_lang());
2211 language_registry.add(js_lang());
2212
2213 let _rs_buffer = project
2214 .update(cx, |project, cx| {
2215 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2216 })
2217 .await
2218 .unwrap();
2219 let _js_buffer = project
2220 .update(cx, |project, cx| {
2221 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2222 })
2223 .await
2224 .unwrap();
2225
2226 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2227 assert_eq!(
2228 fake_rust_server_1
2229 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2230 .await
2231 .text_document
2232 .uri
2233 .as_str(),
2234 uri!("file:///dir/a.rs")
2235 );
2236
2237 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2238 assert_eq!(
2239 fake_js_server
2240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2241 .await
2242 .text_document
2243 .uri
2244 .as_str(),
2245 uri!("file:///dir/b.js")
2246 );
2247
2248 // Disable Rust language server, ensuring only that server gets stopped.
2249 cx.update(|cx| {
2250 SettingsStore::update_global(cx, |settings, cx| {
2251 settings.update_user_settings(cx, |settings| {
2252 settings.languages_mut().insert(
2253 "Rust".into(),
2254 LanguageSettingsContent {
2255 enable_language_server: Some(false),
2256 ..Default::default()
2257 },
2258 );
2259 });
2260 })
2261 });
2262 fake_rust_server_1
2263 .receive_notification::<lsp::notification::Exit>()
2264 .await;
2265
2266 // Enable Rust and disable JavaScript language servers, ensuring that the
2267 // former gets started again and that the latter stops.
2268 cx.update(|cx| {
2269 SettingsStore::update_global(cx, |settings, cx| {
2270 settings.update_user_settings(cx, |settings| {
2271 settings.languages_mut().insert(
2272 "Rust".into(),
2273 LanguageSettingsContent {
2274 enable_language_server: Some(true),
2275 ..Default::default()
2276 },
2277 );
2278 settings.languages_mut().insert(
2279 "JavaScript".into(),
2280 LanguageSettingsContent {
2281 enable_language_server: Some(false),
2282 ..Default::default()
2283 },
2284 );
2285 });
2286 })
2287 });
2288 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2289 assert_eq!(
2290 fake_rust_server_2
2291 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2292 .await
2293 .text_document
2294 .uri
2295 .as_str(),
2296 uri!("file:///dir/a.rs")
2297 );
2298 fake_js_server
2299 .receive_notification::<lsp::notification::Exit>()
2300 .await;
2301}
2302
2303#[gpui::test(iterations = 3)]
2304async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2305 init_test(cx);
2306
2307 let text = "
2308 fn a() { A }
2309 fn b() { BB }
2310 fn c() { CCC }
2311 "
2312 .unindent();
2313
2314 let fs = FakeFs::new(cx.executor());
2315 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2316
2317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2318 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2319
2320 language_registry.add(rust_lang());
2321 let mut fake_servers = language_registry.register_fake_lsp(
2322 "Rust",
2323 FakeLspAdapter {
2324 disk_based_diagnostics_sources: vec!["disk".into()],
2325 ..Default::default()
2326 },
2327 );
2328
2329 let buffer = project
2330 .update(cx, |project, cx| {
2331 project.open_local_buffer(path!("/dir/a.rs"), cx)
2332 })
2333 .await
2334 .unwrap();
2335
2336 let _handle = project.update(cx, |project, cx| {
2337 project.register_buffer_with_language_servers(&buffer, cx)
2338 });
2339
2340 let mut fake_server = fake_servers.next().await.unwrap();
2341 let open_notification = fake_server
2342 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2343 .await;
2344
2345 // Edit the buffer, moving the content down
2346 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2347 let change_notification_1 = fake_server
2348 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2349 .await;
2350 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2351
2352 // Report some diagnostics for the initial version of the buffer
2353 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2354 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2355 version: Some(open_notification.text_document.version),
2356 diagnostics: vec![
2357 lsp::Diagnostic {
2358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2359 severity: Some(DiagnosticSeverity::ERROR),
2360 message: "undefined variable 'A'".to_string(),
2361 source: Some("disk".to_string()),
2362 ..Default::default()
2363 },
2364 lsp::Diagnostic {
2365 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2366 severity: Some(DiagnosticSeverity::ERROR),
2367 message: "undefined variable 'BB'".to_string(),
2368 source: Some("disk".to_string()),
2369 ..Default::default()
2370 },
2371 lsp::Diagnostic {
2372 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2373 severity: Some(DiagnosticSeverity::ERROR),
2374 source: Some("disk".to_string()),
2375 message: "undefined variable 'CCC'".to_string(),
2376 ..Default::default()
2377 },
2378 ],
2379 });
2380
2381 // The diagnostics have moved down since they were created.
2382 cx.executor().run_until_parked();
2383 buffer.update(cx, |buffer, _| {
2384 assert_eq!(
2385 buffer
2386 .snapshot()
2387 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2388 .collect::<Vec<_>>(),
2389 &[
2390 DiagnosticEntry {
2391 range: Point::new(3, 9)..Point::new(3, 11),
2392 diagnostic: Diagnostic {
2393 source: Some("disk".into()),
2394 severity: DiagnosticSeverity::ERROR,
2395 message: "undefined variable 'BB'".to_string(),
2396 is_disk_based: true,
2397 group_id: 1,
2398 is_primary: true,
2399 source_kind: DiagnosticSourceKind::Pushed,
2400 ..Diagnostic::default()
2401 },
2402 },
2403 DiagnosticEntry {
2404 range: Point::new(4, 9)..Point::new(4, 12),
2405 diagnostic: Diagnostic {
2406 source: Some("disk".into()),
2407 severity: DiagnosticSeverity::ERROR,
2408 message: "undefined variable 'CCC'".to_string(),
2409 is_disk_based: true,
2410 group_id: 2,
2411 is_primary: true,
2412 source_kind: DiagnosticSourceKind::Pushed,
2413 ..Diagnostic::default()
2414 }
2415 }
2416 ]
2417 );
2418 assert_eq!(
2419 chunks_with_diagnostics(buffer, 0..buffer.len()),
2420 [
2421 ("\n\nfn a() { ".to_string(), None),
2422 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2423 (" }\nfn b() { ".to_string(), None),
2424 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2425 (" }\nfn c() { ".to_string(), None),
2426 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2427 (" }\n".to_string(), None),
2428 ]
2429 );
2430 assert_eq!(
2431 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2432 [
2433 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2434 (" }\nfn c() { ".to_string(), None),
2435 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2436 ]
2437 );
2438 });
2439
2440 // Ensure overlapping diagnostics are highlighted correctly.
2441 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2442 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2443 version: Some(open_notification.text_document.version),
2444 diagnostics: vec![
2445 lsp::Diagnostic {
2446 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2447 severity: Some(DiagnosticSeverity::ERROR),
2448 message: "undefined variable 'A'".to_string(),
2449 source: Some("disk".to_string()),
2450 ..Default::default()
2451 },
2452 lsp::Diagnostic {
2453 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2454 severity: Some(DiagnosticSeverity::WARNING),
2455 message: "unreachable statement".to_string(),
2456 source: Some("disk".to_string()),
2457 ..Default::default()
2458 },
2459 ],
2460 });
2461
2462 cx.executor().run_until_parked();
2463 buffer.update(cx, |buffer, _| {
2464 assert_eq!(
2465 buffer
2466 .snapshot()
2467 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2468 .collect::<Vec<_>>(),
2469 &[
2470 DiagnosticEntry {
2471 range: Point::new(2, 9)..Point::new(2, 12),
2472 diagnostic: Diagnostic {
2473 source: Some("disk".into()),
2474 severity: DiagnosticSeverity::WARNING,
2475 message: "unreachable statement".to_string(),
2476 is_disk_based: true,
2477 group_id: 4,
2478 is_primary: true,
2479 source_kind: DiagnosticSourceKind::Pushed,
2480 ..Diagnostic::default()
2481 }
2482 },
2483 DiagnosticEntry {
2484 range: Point::new(2, 9)..Point::new(2, 10),
2485 diagnostic: Diagnostic {
2486 source: Some("disk".into()),
2487 severity: DiagnosticSeverity::ERROR,
2488 message: "undefined variable 'A'".to_string(),
2489 is_disk_based: true,
2490 group_id: 3,
2491 is_primary: true,
2492 source_kind: DiagnosticSourceKind::Pushed,
2493 ..Diagnostic::default()
2494 },
2495 }
2496 ]
2497 );
2498 assert_eq!(
2499 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2500 [
2501 ("fn a() { ".to_string(), None),
2502 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2503 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2504 ("\n".to_string(), None),
2505 ]
2506 );
2507 assert_eq!(
2508 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2509 [
2510 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2511 ("\n".to_string(), None),
2512 ]
2513 );
2514 });
2515
2516 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2517 // changes since the last save.
2518 buffer.update(cx, |buffer, cx| {
2519 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2520 buffer.edit(
2521 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2522 None,
2523 cx,
2524 );
2525 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2526 });
2527 let change_notification_2 = fake_server
2528 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2529 .await;
2530 assert!(
2531 change_notification_2.text_document.version > change_notification_1.text_document.version
2532 );
2533
2534 // Handle out-of-order diagnostics
2535 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2536 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2537 version: Some(change_notification_2.text_document.version),
2538 diagnostics: vec![
2539 lsp::Diagnostic {
2540 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2541 severity: Some(DiagnosticSeverity::ERROR),
2542 message: "undefined variable 'BB'".to_string(),
2543 source: Some("disk".to_string()),
2544 ..Default::default()
2545 },
2546 lsp::Diagnostic {
2547 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2548 severity: Some(DiagnosticSeverity::WARNING),
2549 message: "undefined variable 'A'".to_string(),
2550 source: Some("disk".to_string()),
2551 ..Default::default()
2552 },
2553 ],
2554 });
2555
2556 cx.executor().run_until_parked();
2557 buffer.update(cx, |buffer, _| {
2558 assert_eq!(
2559 buffer
2560 .snapshot()
2561 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2562 .collect::<Vec<_>>(),
2563 &[
2564 DiagnosticEntry {
2565 range: Point::new(2, 21)..Point::new(2, 22),
2566 diagnostic: Diagnostic {
2567 source: Some("disk".into()),
2568 severity: DiagnosticSeverity::WARNING,
2569 message: "undefined variable 'A'".to_string(),
2570 is_disk_based: true,
2571 group_id: 6,
2572 is_primary: true,
2573 source_kind: DiagnosticSourceKind::Pushed,
2574 ..Diagnostic::default()
2575 }
2576 },
2577 DiagnosticEntry {
2578 range: Point::new(3, 9)..Point::new(3, 14),
2579 diagnostic: Diagnostic {
2580 source: Some("disk".into()),
2581 severity: DiagnosticSeverity::ERROR,
2582 message: "undefined variable 'BB'".to_string(),
2583 is_disk_based: true,
2584 group_id: 5,
2585 is_primary: true,
2586 source_kind: DiagnosticSourceKind::Pushed,
2587 ..Diagnostic::default()
2588 },
2589 }
2590 ]
2591 );
2592 });
2593}
2594
2595#[gpui::test]
2596async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2597 init_test(cx);
2598
2599 let text = concat!(
2600 "let one = ;\n", //
2601 "let two = \n",
2602 "let three = 3;\n",
2603 );
2604
2605 let fs = FakeFs::new(cx.executor());
2606 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2607
2608 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2609 let buffer = project
2610 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2611 .await
2612 .unwrap();
2613
2614 project.update(cx, |project, cx| {
2615 project.lsp_store.update(cx, |lsp_store, cx| {
2616 lsp_store
2617 .update_diagnostic_entries(
2618 LanguageServerId(0),
2619 PathBuf::from("/dir/a.rs"),
2620 None,
2621 None,
2622 vec![
2623 DiagnosticEntry {
2624 range: Unclipped(PointUtf16::new(0, 10))
2625 ..Unclipped(PointUtf16::new(0, 10)),
2626 diagnostic: Diagnostic {
2627 severity: DiagnosticSeverity::ERROR,
2628 message: "syntax error 1".to_string(),
2629 source_kind: DiagnosticSourceKind::Pushed,
2630 ..Diagnostic::default()
2631 },
2632 },
2633 DiagnosticEntry {
2634 range: Unclipped(PointUtf16::new(1, 10))
2635 ..Unclipped(PointUtf16::new(1, 10)),
2636 diagnostic: Diagnostic {
2637 severity: DiagnosticSeverity::ERROR,
2638 message: "syntax error 2".to_string(),
2639 source_kind: DiagnosticSourceKind::Pushed,
2640 ..Diagnostic::default()
2641 },
2642 },
2643 ],
2644 cx,
2645 )
2646 .unwrap();
2647 })
2648 });
2649
2650 // An empty range is extended forward to include the following character.
2651 // At the end of a line, an empty range is extended backward to include
2652 // the preceding character.
2653 buffer.update(cx, |buffer, _| {
2654 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2655 assert_eq!(
2656 chunks
2657 .iter()
2658 .map(|(s, d)| (s.as_str(), *d))
2659 .collect::<Vec<_>>(),
2660 &[
2661 ("let one = ", None),
2662 (";", Some(DiagnosticSeverity::ERROR)),
2663 ("\nlet two =", None),
2664 (" ", Some(DiagnosticSeverity::ERROR)),
2665 ("\nlet three = 3;\n", None)
2666 ]
2667 );
2668 });
2669}
2670
2671#[gpui::test]
2672async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2673 init_test(cx);
2674
2675 let fs = FakeFs::new(cx.executor());
2676 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2677 .await;
2678
2679 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2680 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2681
2682 lsp_store.update(cx, |lsp_store, cx| {
2683 lsp_store
2684 .update_diagnostic_entries(
2685 LanguageServerId(0),
2686 Path::new("/dir/a.rs").to_owned(),
2687 None,
2688 None,
2689 vec![DiagnosticEntry {
2690 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2691 diagnostic: Diagnostic {
2692 severity: DiagnosticSeverity::ERROR,
2693 is_primary: true,
2694 message: "syntax error a1".to_string(),
2695 source_kind: DiagnosticSourceKind::Pushed,
2696 ..Diagnostic::default()
2697 },
2698 }],
2699 cx,
2700 )
2701 .unwrap();
2702 lsp_store
2703 .update_diagnostic_entries(
2704 LanguageServerId(1),
2705 Path::new("/dir/a.rs").to_owned(),
2706 None,
2707 None,
2708 vec![DiagnosticEntry {
2709 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2710 diagnostic: Diagnostic {
2711 severity: DiagnosticSeverity::ERROR,
2712 is_primary: true,
2713 message: "syntax error b1".to_string(),
2714 source_kind: DiagnosticSourceKind::Pushed,
2715 ..Diagnostic::default()
2716 },
2717 }],
2718 cx,
2719 )
2720 .unwrap();
2721
2722 assert_eq!(
2723 lsp_store.diagnostic_summary(false, cx),
2724 DiagnosticSummary {
2725 error_count: 2,
2726 warning_count: 0,
2727 }
2728 );
2729 });
2730}
2731
2732#[gpui::test]
2733async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2734 init_test(cx);
2735
2736 let text = "
2737 fn a() {
2738 f1();
2739 }
2740 fn b() {
2741 f2();
2742 }
2743 fn c() {
2744 f3();
2745 }
2746 "
2747 .unindent();
2748
2749 let fs = FakeFs::new(cx.executor());
2750 fs.insert_tree(
2751 path!("/dir"),
2752 json!({
2753 "a.rs": text.clone(),
2754 }),
2755 )
2756 .await;
2757
2758 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2759 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2760
2761 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2762 language_registry.add(rust_lang());
2763 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2764
2765 let (buffer, _handle) = project
2766 .update(cx, |project, cx| {
2767 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2768 })
2769 .await
2770 .unwrap();
2771
2772 let mut fake_server = fake_servers.next().await.unwrap();
2773 let lsp_document_version = fake_server
2774 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2775 .await
2776 .text_document
2777 .version;
2778
2779 // Simulate editing the buffer after the language server computes some edits.
2780 buffer.update(cx, |buffer, cx| {
2781 buffer.edit(
2782 [(
2783 Point::new(0, 0)..Point::new(0, 0),
2784 "// above first function\n",
2785 )],
2786 None,
2787 cx,
2788 );
2789 buffer.edit(
2790 [(
2791 Point::new(2, 0)..Point::new(2, 0),
2792 " // inside first function\n",
2793 )],
2794 None,
2795 cx,
2796 );
2797 buffer.edit(
2798 [(
2799 Point::new(6, 4)..Point::new(6, 4),
2800 "// inside second function ",
2801 )],
2802 None,
2803 cx,
2804 );
2805
2806 assert_eq!(
2807 buffer.text(),
2808 "
2809 // above first function
2810 fn a() {
2811 // inside first function
2812 f1();
2813 }
2814 fn b() {
2815 // inside second function f2();
2816 }
2817 fn c() {
2818 f3();
2819 }
2820 "
2821 .unindent()
2822 );
2823 });
2824
2825 let edits = lsp_store
2826 .update(cx, |lsp_store, cx| {
2827 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2828 &buffer,
2829 vec![
2830 // replace body of first function
2831 lsp::TextEdit {
2832 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2833 new_text: "
2834 fn a() {
2835 f10();
2836 }
2837 "
2838 .unindent(),
2839 },
2840 // edit inside second function
2841 lsp::TextEdit {
2842 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2843 new_text: "00".into(),
2844 },
2845 // edit inside third function via two distinct edits
2846 lsp::TextEdit {
2847 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2848 new_text: "4000".into(),
2849 },
2850 lsp::TextEdit {
2851 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2852 new_text: "".into(),
2853 },
2854 ],
2855 LanguageServerId(0),
2856 Some(lsp_document_version),
2857 cx,
2858 )
2859 })
2860 .await
2861 .unwrap();
2862
2863 buffer.update(cx, |buffer, cx| {
2864 for (range, new_text) in edits {
2865 buffer.edit([(range, new_text)], None, cx);
2866 }
2867 assert_eq!(
2868 buffer.text(),
2869 "
2870 // above first function
2871 fn a() {
2872 // inside first function
2873 f10();
2874 }
2875 fn b() {
2876 // inside second function f200();
2877 }
2878 fn c() {
2879 f4000();
2880 }
2881 "
2882 .unindent()
2883 );
2884 });
2885}
2886
2887#[gpui::test]
2888async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2889 init_test(cx);
2890
2891 let text = "
2892 use a::b;
2893 use a::c;
2894
2895 fn f() {
2896 b();
2897 c();
2898 }
2899 "
2900 .unindent();
2901
2902 let fs = FakeFs::new(cx.executor());
2903 fs.insert_tree(
2904 path!("/dir"),
2905 json!({
2906 "a.rs": text.clone(),
2907 }),
2908 )
2909 .await;
2910
2911 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2912 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2913 let buffer = project
2914 .update(cx, |project, cx| {
2915 project.open_local_buffer(path!("/dir/a.rs"), cx)
2916 })
2917 .await
2918 .unwrap();
2919
2920 // Simulate the language server sending us a small edit in the form of a very large diff.
2921 // Rust-analyzer does this when performing a merge-imports code action.
2922 let edits = lsp_store
2923 .update(cx, |lsp_store, cx| {
2924 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2925 &buffer,
2926 [
2927 // Replace the first use statement without editing the semicolon.
2928 lsp::TextEdit {
2929 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2930 new_text: "a::{b, c}".into(),
2931 },
2932 // Reinsert the remainder of the file between the semicolon and the final
2933 // newline of the file.
2934 lsp::TextEdit {
2935 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2936 new_text: "\n\n".into(),
2937 },
2938 lsp::TextEdit {
2939 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2940 new_text: "
2941 fn f() {
2942 b();
2943 c();
2944 }"
2945 .unindent(),
2946 },
2947 // Delete everything after the first newline of the file.
2948 lsp::TextEdit {
2949 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2950 new_text: "".into(),
2951 },
2952 ],
2953 LanguageServerId(0),
2954 None,
2955 cx,
2956 )
2957 })
2958 .await
2959 .unwrap();
2960
2961 buffer.update(cx, |buffer, cx| {
2962 let edits = edits
2963 .into_iter()
2964 .map(|(range, text)| {
2965 (
2966 range.start.to_point(buffer)..range.end.to_point(buffer),
2967 text,
2968 )
2969 })
2970 .collect::<Vec<_>>();
2971
2972 assert_eq!(
2973 edits,
2974 [
2975 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2976 (Point::new(1, 0)..Point::new(2, 0), "".into())
2977 ]
2978 );
2979
2980 for (range, new_text) in edits {
2981 buffer.edit([(range, new_text)], None, cx);
2982 }
2983 assert_eq!(
2984 buffer.text(),
2985 "
2986 use a::{b, c};
2987
2988 fn f() {
2989 b();
2990 c();
2991 }
2992 "
2993 .unindent()
2994 );
2995 });
2996}
2997
2998#[gpui::test]
2999async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3000 cx: &mut gpui::TestAppContext,
3001) {
3002 init_test(cx);
3003
3004 let text = "Path()";
3005
3006 let fs = FakeFs::new(cx.executor());
3007 fs.insert_tree(
3008 path!("/dir"),
3009 json!({
3010 "a.rs": text
3011 }),
3012 )
3013 .await;
3014
3015 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3016 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3017 let buffer = project
3018 .update(cx, |project, cx| {
3019 project.open_local_buffer(path!("/dir/a.rs"), cx)
3020 })
3021 .await
3022 .unwrap();
3023
3024 // Simulate the language server sending us a pair of edits at the same location,
3025 // with an insertion following a replacement (which violates the LSP spec).
3026 let edits = lsp_store
3027 .update(cx, |lsp_store, cx| {
3028 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3029 &buffer,
3030 [
3031 lsp::TextEdit {
3032 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3033 new_text: "Path".into(),
3034 },
3035 lsp::TextEdit {
3036 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3037 new_text: "from path import Path\n\n\n".into(),
3038 },
3039 ],
3040 LanguageServerId(0),
3041 None,
3042 cx,
3043 )
3044 })
3045 .await
3046 .unwrap();
3047
3048 buffer.update(cx, |buffer, cx| {
3049 buffer.edit(edits, None, cx);
3050 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3051 });
3052}
3053
3054#[gpui::test]
3055async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3056 init_test(cx);
3057
3058 let text = "
3059 use a::b;
3060 use a::c;
3061
3062 fn f() {
3063 b();
3064 c();
3065 }
3066 "
3067 .unindent();
3068
3069 let fs = FakeFs::new(cx.executor());
3070 fs.insert_tree(
3071 path!("/dir"),
3072 json!({
3073 "a.rs": text.clone(),
3074 }),
3075 )
3076 .await;
3077
3078 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3079 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3080 let buffer = project
3081 .update(cx, |project, cx| {
3082 project.open_local_buffer(path!("/dir/a.rs"), cx)
3083 })
3084 .await
3085 .unwrap();
3086
3087 // Simulate the language server sending us edits in a non-ordered fashion,
3088 // with ranges sometimes being inverted or pointing to invalid locations.
3089 let edits = lsp_store
3090 .update(cx, |lsp_store, cx| {
3091 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3092 &buffer,
3093 [
3094 lsp::TextEdit {
3095 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3096 new_text: "\n\n".into(),
3097 },
3098 lsp::TextEdit {
3099 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3100 new_text: "a::{b, c}".into(),
3101 },
3102 lsp::TextEdit {
3103 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3104 new_text: "".into(),
3105 },
3106 lsp::TextEdit {
3107 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3108 new_text: "
3109 fn f() {
3110 b();
3111 c();
3112 }"
3113 .unindent(),
3114 },
3115 ],
3116 LanguageServerId(0),
3117 None,
3118 cx,
3119 )
3120 })
3121 .await
3122 .unwrap();
3123
3124 buffer.update(cx, |buffer, cx| {
3125 let edits = edits
3126 .into_iter()
3127 .map(|(range, text)| {
3128 (
3129 range.start.to_point(buffer)..range.end.to_point(buffer),
3130 text,
3131 )
3132 })
3133 .collect::<Vec<_>>();
3134
3135 assert_eq!(
3136 edits,
3137 [
3138 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3139 (Point::new(1, 0)..Point::new(2, 0), "".into())
3140 ]
3141 );
3142
3143 for (range, new_text) in edits {
3144 buffer.edit([(range, new_text)], None, cx);
3145 }
3146 assert_eq!(
3147 buffer.text(),
3148 "
3149 use a::{b, c};
3150
3151 fn f() {
3152 b();
3153 c();
3154 }
3155 "
3156 .unindent()
3157 );
3158 });
3159}
3160
3161fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3162 buffer: &Buffer,
3163 range: Range<T>,
3164) -> Vec<(String, Option<DiagnosticSeverity>)> {
3165 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3166 for chunk in buffer.snapshot().chunks(range, true) {
3167 if chunks
3168 .last()
3169 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3170 {
3171 chunks.last_mut().unwrap().0.push_str(chunk.text);
3172 } else {
3173 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3174 }
3175 }
3176 chunks
3177}
3178
3179#[gpui::test(iterations = 10)]
3180async fn test_definition(cx: &mut gpui::TestAppContext) {
3181 init_test(cx);
3182
3183 let fs = FakeFs::new(cx.executor());
3184 fs.insert_tree(
3185 path!("/dir"),
3186 json!({
3187 "a.rs": "const fn a() { A }",
3188 "b.rs": "const y: i32 = crate::a()",
3189 }),
3190 )
3191 .await;
3192
3193 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3194
3195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3196 language_registry.add(rust_lang());
3197 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3198
3199 let (buffer, _handle) = project
3200 .update(cx, |project, cx| {
3201 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3202 })
3203 .await
3204 .unwrap();
3205
3206 let fake_server = fake_servers.next().await.unwrap();
3207 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3208 let params = params.text_document_position_params;
3209 assert_eq!(
3210 params.text_document.uri.to_file_path().unwrap(),
3211 Path::new(path!("/dir/b.rs")),
3212 );
3213 assert_eq!(params.position, lsp::Position::new(0, 22));
3214
3215 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3216 lsp::Location::new(
3217 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3218 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3219 ),
3220 )))
3221 });
3222 let mut definitions = project
3223 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3224 .await
3225 .unwrap()
3226 .unwrap();
3227
3228 // Assert no new language server started
3229 cx.executor().run_until_parked();
3230 assert!(fake_servers.try_next().is_err());
3231
3232 assert_eq!(definitions.len(), 1);
3233 let definition = definitions.pop().unwrap();
3234 cx.update(|cx| {
3235 let target_buffer = definition.target.buffer.read(cx);
3236 assert_eq!(
3237 target_buffer
3238 .file()
3239 .unwrap()
3240 .as_local()
3241 .unwrap()
3242 .abs_path(cx),
3243 Path::new(path!("/dir/a.rs")),
3244 );
3245 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3246 assert_eq!(
3247 list_worktrees(&project, cx),
3248 [
3249 (path!("/dir/a.rs").as_ref(), false),
3250 (path!("/dir/b.rs").as_ref(), true)
3251 ],
3252 );
3253
3254 drop(definition);
3255 });
3256 cx.update(|cx| {
3257 assert_eq!(
3258 list_worktrees(&project, cx),
3259 [(path!("/dir/b.rs").as_ref(), true)]
3260 );
3261 });
3262
3263 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3264 project
3265 .read(cx)
3266 .worktrees(cx)
3267 .map(|worktree| {
3268 let worktree = worktree.read(cx);
3269 (
3270 worktree.as_local().unwrap().abs_path().as_ref(),
3271 worktree.is_visible(),
3272 )
3273 })
3274 .collect::<Vec<_>>()
3275 }
3276}
3277
3278#[gpui::test]
3279async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3280 init_test(cx);
3281
3282 let fs = FakeFs::new(cx.executor());
3283 fs.insert_tree(
3284 path!("/dir"),
3285 json!({
3286 "a.ts": "",
3287 }),
3288 )
3289 .await;
3290
3291 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3292
3293 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3294 language_registry.add(typescript_lang());
3295 let mut fake_language_servers = language_registry.register_fake_lsp(
3296 "TypeScript",
3297 FakeLspAdapter {
3298 capabilities: lsp::ServerCapabilities {
3299 completion_provider: Some(lsp::CompletionOptions {
3300 trigger_characters: Some(vec![".".to_string()]),
3301 ..Default::default()
3302 }),
3303 ..Default::default()
3304 },
3305 ..Default::default()
3306 },
3307 );
3308
3309 let (buffer, _handle) = project
3310 .update(cx, |p, cx| {
3311 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3312 })
3313 .await
3314 .unwrap();
3315
3316 let fake_server = fake_language_servers.next().await.unwrap();
3317
3318 // When text_edit exists, it takes precedence over insert_text and label
3319 let text = "let a = obj.fqn";
3320 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3321 let completions = project.update(cx, |project, cx| {
3322 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3323 });
3324
3325 fake_server
3326 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3327 Ok(Some(lsp::CompletionResponse::Array(vec![
3328 lsp::CompletionItem {
3329 label: "labelText".into(),
3330 insert_text: Some("insertText".into()),
3331 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3332 range: lsp::Range::new(
3333 lsp::Position::new(0, text.len() as u32 - 3),
3334 lsp::Position::new(0, text.len() as u32),
3335 ),
3336 new_text: "textEditText".into(),
3337 })),
3338 ..Default::default()
3339 },
3340 ])))
3341 })
3342 .next()
3343 .await;
3344
3345 let completions = completions
3346 .await
3347 .unwrap()
3348 .into_iter()
3349 .flat_map(|response| response.completions)
3350 .collect::<Vec<_>>();
3351 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3352
3353 assert_eq!(completions.len(), 1);
3354 assert_eq!(completions[0].new_text, "textEditText");
3355 assert_eq!(
3356 completions[0].replace_range.to_offset(&snapshot),
3357 text.len() - 3..text.len()
3358 );
3359}
3360
3361#[gpui::test]
3362async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3363 init_test(cx);
3364
3365 let fs = FakeFs::new(cx.executor());
3366 fs.insert_tree(
3367 path!("/dir"),
3368 json!({
3369 "a.ts": "",
3370 }),
3371 )
3372 .await;
3373
3374 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3375
3376 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3377 language_registry.add(typescript_lang());
3378 let mut fake_language_servers = language_registry.register_fake_lsp(
3379 "TypeScript",
3380 FakeLspAdapter {
3381 capabilities: lsp::ServerCapabilities {
3382 completion_provider: Some(lsp::CompletionOptions {
3383 trigger_characters: Some(vec![".".to_string()]),
3384 ..Default::default()
3385 }),
3386 ..Default::default()
3387 },
3388 ..Default::default()
3389 },
3390 );
3391
3392 let (buffer, _handle) = project
3393 .update(cx, |p, cx| {
3394 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3395 })
3396 .await
3397 .unwrap();
3398
3399 let fake_server = fake_language_servers.next().await.unwrap();
3400 let text = "let a = obj.fqn";
3401
3402 // Test 1: When text_edit is None but insert_text exists with default edit_range
3403 {
3404 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3405 let completions = project.update(cx, |project, cx| {
3406 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3407 });
3408
3409 fake_server
3410 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3411 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3412 is_incomplete: false,
3413 item_defaults: Some(lsp::CompletionListItemDefaults {
3414 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3415 lsp::Range::new(
3416 lsp::Position::new(0, text.len() as u32 - 3),
3417 lsp::Position::new(0, text.len() as u32),
3418 ),
3419 )),
3420 ..Default::default()
3421 }),
3422 items: vec![lsp::CompletionItem {
3423 label: "labelText".into(),
3424 insert_text: Some("insertText".into()),
3425 text_edit: None,
3426 ..Default::default()
3427 }],
3428 })))
3429 })
3430 .next()
3431 .await;
3432
3433 let completions = completions
3434 .await
3435 .unwrap()
3436 .into_iter()
3437 .flat_map(|response| response.completions)
3438 .collect::<Vec<_>>();
3439 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3440
3441 assert_eq!(completions.len(), 1);
3442 assert_eq!(completions[0].new_text, "insertText");
3443 assert_eq!(
3444 completions[0].replace_range.to_offset(&snapshot),
3445 text.len() - 3..text.len()
3446 );
3447 }
3448
3449 // Test 2: When both text_edit and insert_text are None with default edit_range
3450 {
3451 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3452 let completions = project.update(cx, |project, cx| {
3453 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3454 });
3455
3456 fake_server
3457 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3458 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3459 is_incomplete: false,
3460 item_defaults: Some(lsp::CompletionListItemDefaults {
3461 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3462 lsp::Range::new(
3463 lsp::Position::new(0, text.len() as u32 - 3),
3464 lsp::Position::new(0, text.len() as u32),
3465 ),
3466 )),
3467 ..Default::default()
3468 }),
3469 items: vec![lsp::CompletionItem {
3470 label: "labelText".into(),
3471 insert_text: None,
3472 text_edit: None,
3473 ..Default::default()
3474 }],
3475 })))
3476 })
3477 .next()
3478 .await;
3479
3480 let completions = completions
3481 .await
3482 .unwrap()
3483 .into_iter()
3484 .flat_map(|response| response.completions)
3485 .collect::<Vec<_>>();
3486 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3487
3488 assert_eq!(completions.len(), 1);
3489 assert_eq!(completions[0].new_text, "labelText");
3490 assert_eq!(
3491 completions[0].replace_range.to_offset(&snapshot),
3492 text.len() - 3..text.len()
3493 );
3494 }
3495}
3496
3497#[gpui::test]
3498async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3499 init_test(cx);
3500
3501 let fs = FakeFs::new(cx.executor());
3502 fs.insert_tree(
3503 path!("/dir"),
3504 json!({
3505 "a.ts": "",
3506 }),
3507 )
3508 .await;
3509
3510 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3511
3512 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3513 language_registry.add(typescript_lang());
3514 let mut fake_language_servers = language_registry.register_fake_lsp(
3515 "TypeScript",
3516 FakeLspAdapter {
3517 capabilities: lsp::ServerCapabilities {
3518 completion_provider: Some(lsp::CompletionOptions {
3519 trigger_characters: Some(vec![":".to_string()]),
3520 ..Default::default()
3521 }),
3522 ..Default::default()
3523 },
3524 ..Default::default()
3525 },
3526 );
3527
3528 let (buffer, _handle) = project
3529 .update(cx, |p, cx| {
3530 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3531 })
3532 .await
3533 .unwrap();
3534
3535 let fake_server = fake_language_servers.next().await.unwrap();
3536
3537 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3538 let text = "let a = b.fqn";
3539 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3540 let completions = project.update(cx, |project, cx| {
3541 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3542 });
3543
3544 fake_server
3545 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3546 Ok(Some(lsp::CompletionResponse::Array(vec![
3547 lsp::CompletionItem {
3548 label: "fullyQualifiedName?".into(),
3549 insert_text: Some("fullyQualifiedName".into()),
3550 ..Default::default()
3551 },
3552 ])))
3553 })
3554 .next()
3555 .await;
3556 let completions = completions
3557 .await
3558 .unwrap()
3559 .into_iter()
3560 .flat_map(|response| response.completions)
3561 .collect::<Vec<_>>();
3562 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3563 assert_eq!(completions.len(), 1);
3564 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3565 assert_eq!(
3566 completions[0].replace_range.to_offset(&snapshot),
3567 text.len() - 3..text.len()
3568 );
3569
3570 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3571 let text = "let a = \"atoms/cmp\"";
3572 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3573 let completions = project.update(cx, |project, cx| {
3574 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3575 });
3576
3577 fake_server
3578 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3579 Ok(Some(lsp::CompletionResponse::Array(vec![
3580 lsp::CompletionItem {
3581 label: "component".into(),
3582 ..Default::default()
3583 },
3584 ])))
3585 })
3586 .next()
3587 .await;
3588 let completions = completions
3589 .await
3590 .unwrap()
3591 .into_iter()
3592 .flat_map(|response| response.completions)
3593 .collect::<Vec<_>>();
3594 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3595 assert_eq!(completions.len(), 1);
3596 assert_eq!(completions[0].new_text, "component");
3597 assert_eq!(
3598 completions[0].replace_range.to_offset(&snapshot),
3599 text.len() - 4..text.len() - 1
3600 );
3601}
3602
3603#[gpui::test]
3604async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3605 init_test(cx);
3606
3607 let fs = FakeFs::new(cx.executor());
3608 fs.insert_tree(
3609 path!("/dir"),
3610 json!({
3611 "a.ts": "",
3612 }),
3613 )
3614 .await;
3615
3616 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3617
3618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3619 language_registry.add(typescript_lang());
3620 let mut fake_language_servers = language_registry.register_fake_lsp(
3621 "TypeScript",
3622 FakeLspAdapter {
3623 capabilities: lsp::ServerCapabilities {
3624 completion_provider: Some(lsp::CompletionOptions {
3625 trigger_characters: Some(vec![":".to_string()]),
3626 ..Default::default()
3627 }),
3628 ..Default::default()
3629 },
3630 ..Default::default()
3631 },
3632 );
3633
3634 let (buffer, _handle) = project
3635 .update(cx, |p, cx| {
3636 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3637 })
3638 .await
3639 .unwrap();
3640
3641 let fake_server = fake_language_servers.next().await.unwrap();
3642
3643 let text = "let a = b.fqn";
3644 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3645 let completions = project.update(cx, |project, cx| {
3646 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3647 });
3648
3649 fake_server
3650 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3651 Ok(Some(lsp::CompletionResponse::Array(vec![
3652 lsp::CompletionItem {
3653 label: "fullyQualifiedName?".into(),
3654 insert_text: Some("fully\rQualified\r\nName".into()),
3655 ..Default::default()
3656 },
3657 ])))
3658 })
3659 .next()
3660 .await;
3661 let completions = completions
3662 .await
3663 .unwrap()
3664 .into_iter()
3665 .flat_map(|response| response.completions)
3666 .collect::<Vec<_>>();
3667 assert_eq!(completions.len(), 1);
3668 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3669}
3670
3671#[gpui::test(iterations = 10)]
3672async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3673 init_test(cx);
3674
3675 let fs = FakeFs::new(cx.executor());
3676 fs.insert_tree(
3677 path!("/dir"),
3678 json!({
3679 "a.ts": "a",
3680 }),
3681 )
3682 .await;
3683
3684 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3685
3686 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3687 language_registry.add(typescript_lang());
3688 let mut fake_language_servers = language_registry.register_fake_lsp(
3689 "TypeScript",
3690 FakeLspAdapter {
3691 capabilities: lsp::ServerCapabilities {
3692 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3693 lsp::CodeActionOptions {
3694 resolve_provider: Some(true),
3695 ..lsp::CodeActionOptions::default()
3696 },
3697 )),
3698 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3699 commands: vec!["_the/command".to_string()],
3700 ..lsp::ExecuteCommandOptions::default()
3701 }),
3702 ..lsp::ServerCapabilities::default()
3703 },
3704 ..FakeLspAdapter::default()
3705 },
3706 );
3707
3708 let (buffer, _handle) = project
3709 .update(cx, |p, cx| {
3710 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3711 })
3712 .await
3713 .unwrap();
3714
3715 let fake_server = fake_language_servers.next().await.unwrap();
3716
3717 // Language server returns code actions that contain commands, and not edits.
3718 let actions = project.update(cx, |project, cx| {
3719 project.code_actions(&buffer, 0..0, None, cx)
3720 });
3721 fake_server
3722 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3723 Ok(Some(vec![
3724 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3725 title: "The code action".into(),
3726 data: Some(serde_json::json!({
3727 "command": "_the/command",
3728 })),
3729 ..lsp::CodeAction::default()
3730 }),
3731 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3732 title: "two".into(),
3733 ..lsp::CodeAction::default()
3734 }),
3735 ]))
3736 })
3737 .next()
3738 .await;
3739
3740 let action = actions.await.unwrap().unwrap()[0].clone();
3741 let apply = project.update(cx, |project, cx| {
3742 project.apply_code_action(buffer.clone(), action, true, cx)
3743 });
3744
3745 // Resolving the code action does not populate its edits. In absence of
3746 // edits, we must execute the given command.
3747 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3748 |mut action, _| async move {
3749 if action.data.is_some() {
3750 action.command = Some(lsp::Command {
3751 title: "The command".into(),
3752 command: "_the/command".into(),
3753 arguments: Some(vec![json!("the-argument")]),
3754 });
3755 }
3756 Ok(action)
3757 },
3758 );
3759
3760 // While executing the command, the language server sends the editor
3761 // a `workspaceEdit` request.
3762 fake_server
3763 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3764 let fake = fake_server.clone();
3765 move |params, _| {
3766 assert_eq!(params.command, "_the/command");
3767 let fake = fake.clone();
3768 async move {
3769 fake.server
3770 .request::<lsp::request::ApplyWorkspaceEdit>(
3771 lsp::ApplyWorkspaceEditParams {
3772 label: None,
3773 edit: lsp::WorkspaceEdit {
3774 changes: Some(
3775 [(
3776 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3777 vec![lsp::TextEdit {
3778 range: lsp::Range::new(
3779 lsp::Position::new(0, 0),
3780 lsp::Position::new(0, 0),
3781 ),
3782 new_text: "X".into(),
3783 }],
3784 )]
3785 .into_iter()
3786 .collect(),
3787 ),
3788 ..Default::default()
3789 },
3790 },
3791 )
3792 .await
3793 .into_response()
3794 .unwrap();
3795 Ok(Some(json!(null)))
3796 }
3797 }
3798 })
3799 .next()
3800 .await;
3801
3802 // Applying the code action returns a project transaction containing the edits
3803 // sent by the language server in its `workspaceEdit` request.
3804 let transaction = apply.await.unwrap();
3805 assert!(transaction.0.contains_key(&buffer));
3806 buffer.update(cx, |buffer, cx| {
3807 assert_eq!(buffer.text(), "Xa");
3808 buffer.undo(cx);
3809 assert_eq!(buffer.text(), "a");
3810 });
3811}
3812
3813#[gpui::test]
3814async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3815 init_test(cx);
3816 let fs = FakeFs::new(cx.background_executor.clone());
3817 let expected_contents = "content";
3818 fs.as_fake()
3819 .insert_tree(
3820 "/root",
3821 json!({
3822 "test.txt": expected_contents
3823 }),
3824 )
3825 .await;
3826
3827 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3828
3829 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3830 let worktree = project.worktrees(cx).next().unwrap();
3831 let entry_id = worktree
3832 .read(cx)
3833 .entry_for_path(rel_path("test.txt"))
3834 .unwrap()
3835 .id;
3836 (worktree, entry_id)
3837 });
3838 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3839 let _result = project
3840 .update(cx, |project, cx| {
3841 project.rename_entry(
3842 entry_id,
3843 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3844 cx,
3845 )
3846 })
3847 .await
3848 .unwrap();
3849 worktree.read_with(cx, |worktree, _| {
3850 assert!(
3851 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3852 "Old file should have been removed"
3853 );
3854 assert!(
3855 worktree
3856 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3857 .is_some(),
3858 "Whole directory hierarchy and the new file should have been created"
3859 );
3860 });
3861 assert_eq!(
3862 worktree
3863 .update(cx, |worktree, cx| {
3864 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3865 })
3866 .await
3867 .unwrap()
3868 .text,
3869 expected_contents,
3870 "Moved file's contents should be preserved"
3871 );
3872
3873 let entry_id = worktree.read_with(cx, |worktree, _| {
3874 worktree
3875 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3876 .unwrap()
3877 .id
3878 });
3879
3880 let _result = project
3881 .update(cx, |project, cx| {
3882 project.rename_entry(
3883 entry_id,
3884 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3885 cx,
3886 )
3887 })
3888 .await
3889 .unwrap();
3890 worktree.read_with(cx, |worktree, _| {
3891 assert!(
3892 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3893 "First file should not reappear"
3894 );
3895 assert!(
3896 worktree
3897 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3898 .is_none(),
3899 "Old file should have been removed"
3900 );
3901 assert!(
3902 worktree
3903 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3904 .is_some(),
3905 "No error should have occurred after moving into existing directory"
3906 );
3907 });
3908 assert_eq!(
3909 worktree
3910 .update(cx, |worktree, cx| {
3911 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3912 })
3913 .await
3914 .unwrap()
3915 .text,
3916 expected_contents,
3917 "Moved file's contents should be preserved"
3918 );
3919}
3920
3921#[gpui::test(iterations = 10)]
3922async fn test_save_file(cx: &mut gpui::TestAppContext) {
3923 init_test(cx);
3924
3925 let fs = FakeFs::new(cx.executor());
3926 fs.insert_tree(
3927 path!("/dir"),
3928 json!({
3929 "file1": "the old contents",
3930 }),
3931 )
3932 .await;
3933
3934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3935 let buffer = project
3936 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3937 .await
3938 .unwrap();
3939 buffer.update(cx, |buffer, cx| {
3940 assert_eq!(buffer.text(), "the old contents");
3941 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3942 });
3943
3944 project
3945 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3946 .await
3947 .unwrap();
3948
3949 let new_text = fs
3950 .load(Path::new(path!("/dir/file1")))
3951 .await
3952 .unwrap()
3953 .replace("\r\n", "\n");
3954 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3955}
3956
3957#[gpui::test(iterations = 10)]
3958async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3959 // Issue: #24349
3960 init_test(cx);
3961
3962 let fs = FakeFs::new(cx.executor());
3963 fs.insert_tree(path!("/dir"), json!({})).await;
3964
3965 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3966 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3967
3968 language_registry.add(rust_lang());
3969 let mut fake_rust_servers = language_registry.register_fake_lsp(
3970 "Rust",
3971 FakeLspAdapter {
3972 name: "the-rust-language-server",
3973 capabilities: lsp::ServerCapabilities {
3974 completion_provider: Some(lsp::CompletionOptions {
3975 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3976 ..Default::default()
3977 }),
3978 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3979 lsp::TextDocumentSyncOptions {
3980 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3981 ..Default::default()
3982 },
3983 )),
3984 ..Default::default()
3985 },
3986 ..Default::default()
3987 },
3988 );
3989
3990 let buffer = project
3991 .update(cx, |this, cx| this.create_buffer(false, cx))
3992 .unwrap()
3993 .await;
3994 project.update(cx, |this, cx| {
3995 this.register_buffer_with_language_servers(&buffer, cx);
3996 buffer.update(cx, |buffer, cx| {
3997 assert!(!this.has_language_servers_for(buffer, cx));
3998 })
3999 });
4000
4001 project
4002 .update(cx, |this, cx| {
4003 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4004 this.save_buffer_as(
4005 buffer.clone(),
4006 ProjectPath {
4007 worktree_id,
4008 path: rel_path("file.rs").into(),
4009 },
4010 cx,
4011 )
4012 })
4013 .await
4014 .unwrap();
4015 // A server is started up, and it is notified about Rust files.
4016 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4017 assert_eq!(
4018 fake_rust_server
4019 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4020 .await
4021 .text_document,
4022 lsp::TextDocumentItem {
4023 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4024 version: 0,
4025 text: "".to_string(),
4026 language_id: "rust".to_string(),
4027 }
4028 );
4029
4030 project.update(cx, |this, cx| {
4031 buffer.update(cx, |buffer, cx| {
4032 assert!(this.has_language_servers_for(buffer, cx));
4033 })
4034 });
4035}
4036
4037#[gpui::test(iterations = 30)]
4038async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4039 init_test(cx);
4040
4041 let fs = FakeFs::new(cx.executor());
4042 fs.insert_tree(
4043 path!("/dir"),
4044 json!({
4045 "file1": "the original contents",
4046 }),
4047 )
4048 .await;
4049
4050 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4051 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4052 let buffer = project
4053 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4054 .await
4055 .unwrap();
4056
4057 // Simulate buffer diffs being slow, so that they don't complete before
4058 // the next file change occurs.
4059 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4060
4061 // Change the buffer's file on disk, and then wait for the file change
4062 // to be detected by the worktree, so that the buffer starts reloading.
4063 fs.save(
4064 path!("/dir/file1").as_ref(),
4065 &"the first contents".into(),
4066 Default::default(),
4067 )
4068 .await
4069 .unwrap();
4070 worktree.next_event(cx).await;
4071
4072 // Change the buffer's file again. Depending on the random seed, the
4073 // previous file change may still be in progress.
4074 fs.save(
4075 path!("/dir/file1").as_ref(),
4076 &"the second contents".into(),
4077 Default::default(),
4078 )
4079 .await
4080 .unwrap();
4081 worktree.next_event(cx).await;
4082
4083 cx.executor().run_until_parked();
4084 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4085 buffer.read_with(cx, |buffer, _| {
4086 assert_eq!(buffer.text(), on_disk_text);
4087 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4088 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4089 });
4090}
4091
4092#[gpui::test(iterations = 30)]
4093async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4094 init_test(cx);
4095
4096 let fs = FakeFs::new(cx.executor());
4097 fs.insert_tree(
4098 path!("/dir"),
4099 json!({
4100 "file1": "the original contents",
4101 }),
4102 )
4103 .await;
4104
4105 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4106 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4107 let buffer = project
4108 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4109 .await
4110 .unwrap();
4111
4112 // Simulate buffer diffs being slow, so that they don't complete before
4113 // the next file change occurs.
4114 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4115
4116 // Change the buffer's file on disk, and then wait for the file change
4117 // to be detected by the worktree, so that the buffer starts reloading.
4118 fs.save(
4119 path!("/dir/file1").as_ref(),
4120 &"the first contents".into(),
4121 Default::default(),
4122 )
4123 .await
4124 .unwrap();
4125 worktree.next_event(cx).await;
4126
4127 cx.executor()
4128 .spawn(cx.executor().simulate_random_delay())
4129 .await;
4130
4131 // Perform a noop edit, causing the buffer's version to increase.
4132 buffer.update(cx, |buffer, cx| {
4133 buffer.edit([(0..0, " ")], None, cx);
4134 buffer.undo(cx);
4135 });
4136
4137 cx.executor().run_until_parked();
4138 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4139 buffer.read_with(cx, |buffer, _| {
4140 let buffer_text = buffer.text();
4141 if buffer_text == on_disk_text {
4142 assert!(
4143 !buffer.is_dirty() && !buffer.has_conflict(),
4144 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4145 );
4146 }
4147 // If the file change occurred while the buffer was processing the first
4148 // change, the buffer will be in a conflicting state.
4149 else {
4150 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4151 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4152 }
4153 });
4154}
4155
4156#[gpui::test]
4157async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4158 init_test(cx);
4159
4160 let fs = FakeFs::new(cx.executor());
4161 fs.insert_tree(
4162 path!("/dir"),
4163 json!({
4164 "file1": "the old contents",
4165 }),
4166 )
4167 .await;
4168
4169 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4170 let buffer = project
4171 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4172 .await
4173 .unwrap();
4174 buffer.update(cx, |buffer, cx| {
4175 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4176 });
4177
4178 project
4179 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4180 .await
4181 .unwrap();
4182
4183 let new_text = fs
4184 .load(Path::new(path!("/dir/file1")))
4185 .await
4186 .unwrap()
4187 .replace("\r\n", "\n");
4188 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4189}
4190
4191#[gpui::test]
4192async fn test_save_as(cx: &mut gpui::TestAppContext) {
4193 init_test(cx);
4194
4195 let fs = FakeFs::new(cx.executor());
4196 fs.insert_tree("/dir", json!({})).await;
4197
4198 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4199
4200 let languages = project.update(cx, |project, _| project.languages().clone());
4201 languages.add(rust_lang());
4202
4203 let buffer = project.update(cx, |project, cx| {
4204 project.create_local_buffer("", None, false, cx)
4205 });
4206 buffer.update(cx, |buffer, cx| {
4207 buffer.edit([(0..0, "abc")], None, cx);
4208 assert!(buffer.is_dirty());
4209 assert!(!buffer.has_conflict());
4210 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4211 });
4212 project
4213 .update(cx, |project, cx| {
4214 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4215 let path = ProjectPath {
4216 worktree_id,
4217 path: rel_path("file1.rs").into(),
4218 };
4219 project.save_buffer_as(buffer.clone(), path, cx)
4220 })
4221 .await
4222 .unwrap();
4223 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4224
4225 cx.executor().run_until_parked();
4226 buffer.update(cx, |buffer, cx| {
4227 assert_eq!(
4228 buffer.file().unwrap().full_path(cx),
4229 Path::new("dir/file1.rs")
4230 );
4231 assert!(!buffer.is_dirty());
4232 assert!(!buffer.has_conflict());
4233 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4234 });
4235
4236 let opened_buffer = project
4237 .update(cx, |project, cx| {
4238 project.open_local_buffer("/dir/file1.rs", cx)
4239 })
4240 .await
4241 .unwrap();
4242 assert_eq!(opened_buffer, buffer);
4243}
4244
4245#[gpui::test(retries = 5)]
4246async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4247 use worktree::WorktreeModelHandle as _;
4248
4249 init_test(cx);
4250 cx.executor().allow_parking();
4251
4252 let dir = TempTree::new(json!({
4253 "a": {
4254 "file1": "",
4255 "file2": "",
4256 "file3": "",
4257 },
4258 "b": {
4259 "c": {
4260 "file4": "",
4261 "file5": "",
4262 }
4263 }
4264 }));
4265
4266 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4267
4268 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4269 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4270 async move { buffer.await.unwrap() }
4271 };
4272 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4273 project.update(cx, |project, cx| {
4274 let tree = project.worktrees(cx).next().unwrap();
4275 tree.read(cx)
4276 .entry_for_path(rel_path(path))
4277 .unwrap_or_else(|| panic!("no entry for path {}", path))
4278 .id
4279 })
4280 };
4281
4282 let buffer2 = buffer_for_path("a/file2", cx).await;
4283 let buffer3 = buffer_for_path("a/file3", cx).await;
4284 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4285 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4286
4287 let file2_id = id_for_path("a/file2", cx);
4288 let file3_id = id_for_path("a/file3", cx);
4289 let file4_id = id_for_path("b/c/file4", cx);
4290
4291 // Create a remote copy of this worktree.
4292 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4293 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4294
4295 let updates = Arc::new(Mutex::new(Vec::new()));
4296 tree.update(cx, |tree, cx| {
4297 let updates = updates.clone();
4298 tree.observe_updates(0, cx, move |update| {
4299 updates.lock().push(update);
4300 async { true }
4301 });
4302 });
4303
4304 let remote = cx.update(|cx| {
4305 Worktree::remote(
4306 0,
4307 1,
4308 metadata,
4309 project.read(cx).client().into(),
4310 project.read(cx).path_style(cx),
4311 cx,
4312 )
4313 });
4314
4315 cx.executor().run_until_parked();
4316
4317 cx.update(|cx| {
4318 assert!(!buffer2.read(cx).is_dirty());
4319 assert!(!buffer3.read(cx).is_dirty());
4320 assert!(!buffer4.read(cx).is_dirty());
4321 assert!(!buffer5.read(cx).is_dirty());
4322 });
4323
4324 // Rename and delete files and directories.
4325 tree.flush_fs_events(cx).await;
4326 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4327 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4328 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4329 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4330 tree.flush_fs_events(cx).await;
4331
4332 cx.update(|app| {
4333 assert_eq!(
4334 tree.read(app).paths().collect::<Vec<_>>(),
4335 vec![
4336 rel_path("a"),
4337 rel_path("a/file1"),
4338 rel_path("a/file2.new"),
4339 rel_path("b"),
4340 rel_path("d"),
4341 rel_path("d/file3"),
4342 rel_path("d/file4"),
4343 ]
4344 );
4345 });
4346
4347 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4348 assert_eq!(id_for_path("d/file3", cx), file3_id);
4349 assert_eq!(id_for_path("d/file4", cx), file4_id);
4350
4351 cx.update(|cx| {
4352 assert_eq!(
4353 buffer2.read(cx).file().unwrap().path().as_ref(),
4354 rel_path("a/file2.new")
4355 );
4356 assert_eq!(
4357 buffer3.read(cx).file().unwrap().path().as_ref(),
4358 rel_path("d/file3")
4359 );
4360 assert_eq!(
4361 buffer4.read(cx).file().unwrap().path().as_ref(),
4362 rel_path("d/file4")
4363 );
4364 assert_eq!(
4365 buffer5.read(cx).file().unwrap().path().as_ref(),
4366 rel_path("b/c/file5")
4367 );
4368
4369 assert_matches!(
4370 buffer2.read(cx).file().unwrap().disk_state(),
4371 DiskState::Present { .. }
4372 );
4373 assert_matches!(
4374 buffer3.read(cx).file().unwrap().disk_state(),
4375 DiskState::Present { .. }
4376 );
4377 assert_matches!(
4378 buffer4.read(cx).file().unwrap().disk_state(),
4379 DiskState::Present { .. }
4380 );
4381 assert_eq!(
4382 buffer5.read(cx).file().unwrap().disk_state(),
4383 DiskState::Deleted
4384 );
4385 });
4386
4387 // Update the remote worktree. Check that it becomes consistent with the
4388 // local worktree.
4389 cx.executor().run_until_parked();
4390
4391 remote.update(cx, |remote, _| {
4392 for update in updates.lock().drain(..) {
4393 remote.as_remote_mut().unwrap().update_from_remote(update);
4394 }
4395 });
4396 cx.executor().run_until_parked();
4397 remote.update(cx, |remote, _| {
4398 assert_eq!(
4399 remote.paths().collect::<Vec<_>>(),
4400 vec![
4401 rel_path("a"),
4402 rel_path("a/file1"),
4403 rel_path("a/file2.new"),
4404 rel_path("b"),
4405 rel_path("d"),
4406 rel_path("d/file3"),
4407 rel_path("d/file4"),
4408 ]
4409 );
4410 });
4411}
4412
4413#[gpui::test(iterations = 10)]
4414async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4415 init_test(cx);
4416
4417 let fs = FakeFs::new(cx.executor());
4418 fs.insert_tree(
4419 path!("/dir"),
4420 json!({
4421 "a": {
4422 "file1": "",
4423 }
4424 }),
4425 )
4426 .await;
4427
4428 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4429 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4430 let tree_id = tree.update(cx, |tree, _| tree.id());
4431
4432 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4433 project.update(cx, |project, cx| {
4434 let tree = project.worktrees(cx).next().unwrap();
4435 tree.read(cx)
4436 .entry_for_path(rel_path(path))
4437 .unwrap_or_else(|| panic!("no entry for path {}", path))
4438 .id
4439 })
4440 };
4441
4442 let dir_id = id_for_path("a", cx);
4443 let file_id = id_for_path("a/file1", cx);
4444 let buffer = project
4445 .update(cx, |p, cx| {
4446 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4447 })
4448 .await
4449 .unwrap();
4450 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4451
4452 project
4453 .update(cx, |project, cx| {
4454 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4455 })
4456 .unwrap()
4457 .await
4458 .into_included()
4459 .unwrap();
4460 cx.executor().run_until_parked();
4461
4462 assert_eq!(id_for_path("b", cx), dir_id);
4463 assert_eq!(id_for_path("b/file1", cx), file_id);
4464 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4465}
4466
4467#[gpui::test]
4468async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4469 init_test(cx);
4470
4471 let fs = FakeFs::new(cx.executor());
4472 fs.insert_tree(
4473 "/dir",
4474 json!({
4475 "a.txt": "a-contents",
4476 "b.txt": "b-contents",
4477 }),
4478 )
4479 .await;
4480
4481 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4482
4483 // Spawn multiple tasks to open paths, repeating some paths.
4484 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4485 (
4486 p.open_local_buffer("/dir/a.txt", cx),
4487 p.open_local_buffer("/dir/b.txt", cx),
4488 p.open_local_buffer("/dir/a.txt", cx),
4489 )
4490 });
4491
4492 let buffer_a_1 = buffer_a_1.await.unwrap();
4493 let buffer_a_2 = buffer_a_2.await.unwrap();
4494 let buffer_b = buffer_b.await.unwrap();
4495 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4496 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4497
4498 // There is only one buffer per path.
4499 let buffer_a_id = buffer_a_1.entity_id();
4500 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4501
4502 // Open the same path again while it is still open.
4503 drop(buffer_a_1);
4504 let buffer_a_3 = project
4505 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4506 .await
4507 .unwrap();
4508
4509 // There's still only one buffer per path.
4510 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4511}
4512
4513#[gpui::test]
4514async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4515 init_test(cx);
4516
4517 let fs = FakeFs::new(cx.executor());
4518 fs.insert_tree(
4519 path!("/dir"),
4520 json!({
4521 "file1": "abc",
4522 "file2": "def",
4523 "file3": "ghi",
4524 }),
4525 )
4526 .await;
4527
4528 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4529
4530 let buffer1 = project
4531 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4532 .await
4533 .unwrap();
4534 let events = Arc::new(Mutex::new(Vec::new()));
4535
4536 // initially, the buffer isn't dirty.
4537 buffer1.update(cx, |buffer, cx| {
4538 cx.subscribe(&buffer1, {
4539 let events = events.clone();
4540 move |_, _, event, _| match event {
4541 BufferEvent::Operation { .. } => {}
4542 _ => events.lock().push(event.clone()),
4543 }
4544 })
4545 .detach();
4546
4547 assert!(!buffer.is_dirty());
4548 assert!(events.lock().is_empty());
4549
4550 buffer.edit([(1..2, "")], None, cx);
4551 });
4552
4553 // after the first edit, the buffer is dirty, and emits a dirtied event.
4554 buffer1.update(cx, |buffer, cx| {
4555 assert!(buffer.text() == "ac");
4556 assert!(buffer.is_dirty());
4557 assert_eq!(
4558 *events.lock(),
4559 &[
4560 language::BufferEvent::Edited,
4561 language::BufferEvent::DirtyChanged
4562 ]
4563 );
4564 events.lock().clear();
4565 buffer.did_save(
4566 buffer.version(),
4567 buffer.file().unwrap().disk_state().mtime(),
4568 cx,
4569 );
4570 });
4571
4572 // after saving, the buffer is not dirty, and emits a saved event.
4573 buffer1.update(cx, |buffer, cx| {
4574 assert!(!buffer.is_dirty());
4575 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4576 events.lock().clear();
4577
4578 buffer.edit([(1..1, "B")], None, cx);
4579 buffer.edit([(2..2, "D")], None, cx);
4580 });
4581
4582 // after editing again, the buffer is dirty, and emits another dirty event.
4583 buffer1.update(cx, |buffer, cx| {
4584 assert!(buffer.text() == "aBDc");
4585 assert!(buffer.is_dirty());
4586 assert_eq!(
4587 *events.lock(),
4588 &[
4589 language::BufferEvent::Edited,
4590 language::BufferEvent::DirtyChanged,
4591 language::BufferEvent::Edited,
4592 ],
4593 );
4594 events.lock().clear();
4595
4596 // After restoring the buffer to its previously-saved state,
4597 // the buffer is not considered dirty anymore.
4598 buffer.edit([(1..3, "")], None, cx);
4599 assert!(buffer.text() == "ac");
4600 assert!(!buffer.is_dirty());
4601 });
4602
4603 assert_eq!(
4604 *events.lock(),
4605 &[
4606 language::BufferEvent::Edited,
4607 language::BufferEvent::DirtyChanged
4608 ]
4609 );
4610
4611 // When a file is deleted, it is not considered dirty.
4612 let events = Arc::new(Mutex::new(Vec::new()));
4613 let buffer2 = project
4614 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4615 .await
4616 .unwrap();
4617 buffer2.update(cx, |_, cx| {
4618 cx.subscribe(&buffer2, {
4619 let events = events.clone();
4620 move |_, _, event, _| match event {
4621 BufferEvent::Operation { .. } => {}
4622 _ => events.lock().push(event.clone()),
4623 }
4624 })
4625 .detach();
4626 });
4627
4628 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4629 .await
4630 .unwrap();
4631 cx.executor().run_until_parked();
4632 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4633 assert_eq!(
4634 mem::take(&mut *events.lock()),
4635 &[language::BufferEvent::FileHandleChanged]
4636 );
4637
4638 // Buffer becomes dirty when edited.
4639 buffer2.update(cx, |buffer, cx| {
4640 buffer.edit([(2..3, "")], None, cx);
4641 assert_eq!(buffer.is_dirty(), true);
4642 });
4643 assert_eq!(
4644 mem::take(&mut *events.lock()),
4645 &[
4646 language::BufferEvent::Edited,
4647 language::BufferEvent::DirtyChanged
4648 ]
4649 );
4650
4651 // Buffer becomes clean again when all of its content is removed, because
4652 // the file was deleted.
4653 buffer2.update(cx, |buffer, cx| {
4654 buffer.edit([(0..2, "")], None, cx);
4655 assert_eq!(buffer.is_empty(), true);
4656 assert_eq!(buffer.is_dirty(), false);
4657 });
4658 assert_eq!(
4659 *events.lock(),
4660 &[
4661 language::BufferEvent::Edited,
4662 language::BufferEvent::DirtyChanged
4663 ]
4664 );
4665
4666 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4667 let events = Arc::new(Mutex::new(Vec::new()));
4668 let buffer3 = project
4669 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4670 .await
4671 .unwrap();
4672 buffer3.update(cx, |_, cx| {
4673 cx.subscribe(&buffer3, {
4674 let events = events.clone();
4675 move |_, _, event, _| match event {
4676 BufferEvent::Operation { .. } => {}
4677 _ => events.lock().push(event.clone()),
4678 }
4679 })
4680 .detach();
4681 });
4682
4683 buffer3.update(cx, |buffer, cx| {
4684 buffer.edit([(0..0, "x")], None, cx);
4685 });
4686 events.lock().clear();
4687 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4688 .await
4689 .unwrap();
4690 cx.executor().run_until_parked();
4691 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4692 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4693}
4694
4695#[gpui::test]
4696async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4697 init_test(cx);
4698
4699 let (initial_contents, initial_offsets) =
4700 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4701 let fs = FakeFs::new(cx.executor());
4702 fs.insert_tree(
4703 path!("/dir"),
4704 json!({
4705 "the-file": initial_contents,
4706 }),
4707 )
4708 .await;
4709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4710 let buffer = project
4711 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4712 .await
4713 .unwrap();
4714
4715 let anchors = initial_offsets
4716 .iter()
4717 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4718 .collect::<Vec<_>>();
4719
4720 // Change the file on disk, adding two new lines of text, and removing
4721 // one line.
4722 buffer.update(cx, |buffer, _| {
4723 assert!(!buffer.is_dirty());
4724 assert!(!buffer.has_conflict());
4725 });
4726
4727 let (new_contents, new_offsets) =
4728 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4729 fs.save(
4730 path!("/dir/the-file").as_ref(),
4731 &new_contents.as_str().into(),
4732 LineEnding::Unix,
4733 )
4734 .await
4735 .unwrap();
4736
4737 // Because the buffer was not modified, it is reloaded from disk. Its
4738 // contents are edited according to the diff between the old and new
4739 // file contents.
4740 cx.executor().run_until_parked();
4741 buffer.update(cx, |buffer, _| {
4742 assert_eq!(buffer.text(), new_contents);
4743 assert!(!buffer.is_dirty());
4744 assert!(!buffer.has_conflict());
4745
4746 let anchor_offsets = anchors
4747 .iter()
4748 .map(|anchor| anchor.to_offset(&*buffer))
4749 .collect::<Vec<_>>();
4750 assert_eq!(anchor_offsets, new_offsets);
4751 });
4752
4753 // Modify the buffer
4754 buffer.update(cx, |buffer, cx| {
4755 buffer.edit([(0..0, " ")], None, cx);
4756 assert!(buffer.is_dirty());
4757 assert!(!buffer.has_conflict());
4758 });
4759
4760 // Change the file on disk again, adding blank lines to the beginning.
4761 fs.save(
4762 path!("/dir/the-file").as_ref(),
4763 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4764 LineEnding::Unix,
4765 )
4766 .await
4767 .unwrap();
4768
4769 // Because the buffer is modified, it doesn't reload from disk, but is
4770 // marked as having a conflict.
4771 cx.executor().run_until_parked();
4772 buffer.update(cx, |buffer, _| {
4773 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4774 assert!(buffer.has_conflict());
4775 });
4776}
4777
4778#[gpui::test]
4779async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4780 init_test(cx);
4781
4782 let fs = FakeFs::new(cx.executor());
4783 fs.insert_tree(
4784 path!("/dir"),
4785 json!({
4786 "file1": "a\nb\nc\n",
4787 "file2": "one\r\ntwo\r\nthree\r\n",
4788 }),
4789 )
4790 .await;
4791
4792 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4793 let buffer1 = project
4794 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4795 .await
4796 .unwrap();
4797 let buffer2 = project
4798 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4799 .await
4800 .unwrap();
4801
4802 buffer1.update(cx, |buffer, _| {
4803 assert_eq!(buffer.text(), "a\nb\nc\n");
4804 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4805 });
4806 buffer2.update(cx, |buffer, _| {
4807 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4808 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4809 });
4810
4811 // Change a file's line endings on disk from unix to windows. The buffer's
4812 // state updates correctly.
4813 fs.save(
4814 path!("/dir/file1").as_ref(),
4815 &"aaa\nb\nc\n".into(),
4816 LineEnding::Windows,
4817 )
4818 .await
4819 .unwrap();
4820 cx.executor().run_until_parked();
4821 buffer1.update(cx, |buffer, _| {
4822 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4823 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4824 });
4825
4826 // Save a file with windows line endings. The file is written correctly.
4827 buffer2.update(cx, |buffer, cx| {
4828 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4829 });
4830 project
4831 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4832 .await
4833 .unwrap();
4834 assert_eq!(
4835 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4836 "one\r\ntwo\r\nthree\r\nfour\r\n",
4837 );
4838}
4839
4840#[gpui::test]
4841async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4842 init_test(cx);
4843
4844 let fs = FakeFs::new(cx.executor());
4845 fs.insert_tree(
4846 path!("/dir"),
4847 json!({
4848 "a.rs": "
4849 fn foo(mut v: Vec<usize>) {
4850 for x in &v {
4851 v.push(1);
4852 }
4853 }
4854 "
4855 .unindent(),
4856 }),
4857 )
4858 .await;
4859
4860 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4861 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4862 let buffer = project
4863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4864 .await
4865 .unwrap();
4866
4867 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4868 let message = lsp::PublishDiagnosticsParams {
4869 uri: buffer_uri.clone(),
4870 diagnostics: vec![
4871 lsp::Diagnostic {
4872 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4873 severity: Some(DiagnosticSeverity::WARNING),
4874 message: "error 1".to_string(),
4875 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4876 location: lsp::Location {
4877 uri: buffer_uri.clone(),
4878 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4879 },
4880 message: "error 1 hint 1".to_string(),
4881 }]),
4882 ..Default::default()
4883 },
4884 lsp::Diagnostic {
4885 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4886 severity: Some(DiagnosticSeverity::HINT),
4887 message: "error 1 hint 1".to_string(),
4888 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4889 location: lsp::Location {
4890 uri: buffer_uri.clone(),
4891 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4892 },
4893 message: "original diagnostic".to_string(),
4894 }]),
4895 ..Default::default()
4896 },
4897 lsp::Diagnostic {
4898 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4899 severity: Some(DiagnosticSeverity::ERROR),
4900 message: "error 2".to_string(),
4901 related_information: Some(vec![
4902 lsp::DiagnosticRelatedInformation {
4903 location: lsp::Location {
4904 uri: buffer_uri.clone(),
4905 range: lsp::Range::new(
4906 lsp::Position::new(1, 13),
4907 lsp::Position::new(1, 15),
4908 ),
4909 },
4910 message: "error 2 hint 1".to_string(),
4911 },
4912 lsp::DiagnosticRelatedInformation {
4913 location: lsp::Location {
4914 uri: buffer_uri.clone(),
4915 range: lsp::Range::new(
4916 lsp::Position::new(1, 13),
4917 lsp::Position::new(1, 15),
4918 ),
4919 },
4920 message: "error 2 hint 2".to_string(),
4921 },
4922 ]),
4923 ..Default::default()
4924 },
4925 lsp::Diagnostic {
4926 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4927 severity: Some(DiagnosticSeverity::HINT),
4928 message: "error 2 hint 1".to_string(),
4929 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4930 location: lsp::Location {
4931 uri: buffer_uri.clone(),
4932 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4933 },
4934 message: "original diagnostic".to_string(),
4935 }]),
4936 ..Default::default()
4937 },
4938 lsp::Diagnostic {
4939 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4940 severity: Some(DiagnosticSeverity::HINT),
4941 message: "error 2 hint 2".to_string(),
4942 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4943 location: lsp::Location {
4944 uri: buffer_uri,
4945 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4946 },
4947 message: "original diagnostic".to_string(),
4948 }]),
4949 ..Default::default()
4950 },
4951 ],
4952 version: None,
4953 };
4954
4955 lsp_store
4956 .update(cx, |lsp_store, cx| {
4957 lsp_store.update_diagnostics(
4958 LanguageServerId(0),
4959 message,
4960 None,
4961 DiagnosticSourceKind::Pushed,
4962 &[],
4963 cx,
4964 )
4965 })
4966 .unwrap();
4967 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4968
4969 assert_eq!(
4970 buffer
4971 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4972 .collect::<Vec<_>>(),
4973 &[
4974 DiagnosticEntry {
4975 range: Point::new(1, 8)..Point::new(1, 9),
4976 diagnostic: Diagnostic {
4977 severity: DiagnosticSeverity::WARNING,
4978 message: "error 1".to_string(),
4979 group_id: 1,
4980 is_primary: true,
4981 source_kind: DiagnosticSourceKind::Pushed,
4982 ..Diagnostic::default()
4983 }
4984 },
4985 DiagnosticEntry {
4986 range: Point::new(1, 8)..Point::new(1, 9),
4987 diagnostic: Diagnostic {
4988 severity: DiagnosticSeverity::HINT,
4989 message: "error 1 hint 1".to_string(),
4990 group_id: 1,
4991 is_primary: false,
4992 source_kind: DiagnosticSourceKind::Pushed,
4993 ..Diagnostic::default()
4994 }
4995 },
4996 DiagnosticEntry {
4997 range: Point::new(1, 13)..Point::new(1, 15),
4998 diagnostic: Diagnostic {
4999 severity: DiagnosticSeverity::HINT,
5000 message: "error 2 hint 1".to_string(),
5001 group_id: 0,
5002 is_primary: false,
5003 source_kind: DiagnosticSourceKind::Pushed,
5004 ..Diagnostic::default()
5005 }
5006 },
5007 DiagnosticEntry {
5008 range: Point::new(1, 13)..Point::new(1, 15),
5009 diagnostic: Diagnostic {
5010 severity: DiagnosticSeverity::HINT,
5011 message: "error 2 hint 2".to_string(),
5012 group_id: 0,
5013 is_primary: false,
5014 source_kind: DiagnosticSourceKind::Pushed,
5015 ..Diagnostic::default()
5016 }
5017 },
5018 DiagnosticEntry {
5019 range: Point::new(2, 8)..Point::new(2, 17),
5020 diagnostic: Diagnostic {
5021 severity: DiagnosticSeverity::ERROR,
5022 message: "error 2".to_string(),
5023 group_id: 0,
5024 is_primary: true,
5025 source_kind: DiagnosticSourceKind::Pushed,
5026 ..Diagnostic::default()
5027 }
5028 }
5029 ]
5030 );
5031
5032 assert_eq!(
5033 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5034 &[
5035 DiagnosticEntry {
5036 range: Point::new(1, 13)..Point::new(1, 15),
5037 diagnostic: Diagnostic {
5038 severity: DiagnosticSeverity::HINT,
5039 message: "error 2 hint 1".to_string(),
5040 group_id: 0,
5041 is_primary: false,
5042 source_kind: DiagnosticSourceKind::Pushed,
5043 ..Diagnostic::default()
5044 }
5045 },
5046 DiagnosticEntry {
5047 range: Point::new(1, 13)..Point::new(1, 15),
5048 diagnostic: Diagnostic {
5049 severity: DiagnosticSeverity::HINT,
5050 message: "error 2 hint 2".to_string(),
5051 group_id: 0,
5052 is_primary: false,
5053 source_kind: DiagnosticSourceKind::Pushed,
5054 ..Diagnostic::default()
5055 }
5056 },
5057 DiagnosticEntry {
5058 range: Point::new(2, 8)..Point::new(2, 17),
5059 diagnostic: Diagnostic {
5060 severity: DiagnosticSeverity::ERROR,
5061 message: "error 2".to_string(),
5062 group_id: 0,
5063 is_primary: true,
5064 source_kind: DiagnosticSourceKind::Pushed,
5065 ..Diagnostic::default()
5066 }
5067 }
5068 ]
5069 );
5070
5071 assert_eq!(
5072 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5073 &[
5074 DiagnosticEntry {
5075 range: Point::new(1, 8)..Point::new(1, 9),
5076 diagnostic: Diagnostic {
5077 severity: DiagnosticSeverity::WARNING,
5078 message: "error 1".to_string(),
5079 group_id: 1,
5080 is_primary: true,
5081 source_kind: DiagnosticSourceKind::Pushed,
5082 ..Diagnostic::default()
5083 }
5084 },
5085 DiagnosticEntry {
5086 range: Point::new(1, 8)..Point::new(1, 9),
5087 diagnostic: Diagnostic {
5088 severity: DiagnosticSeverity::HINT,
5089 message: "error 1 hint 1".to_string(),
5090 group_id: 1,
5091 is_primary: false,
5092 source_kind: DiagnosticSourceKind::Pushed,
5093 ..Diagnostic::default()
5094 }
5095 },
5096 ]
5097 );
5098}
5099
5100#[gpui::test]
5101async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5102 init_test(cx);
5103
5104 let fs = FakeFs::new(cx.executor());
5105 fs.insert_tree(
5106 path!("/dir"),
5107 json!({
5108 "one.rs": "const ONE: usize = 1;",
5109 "two": {
5110 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5111 }
5112
5113 }),
5114 )
5115 .await;
5116 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5117
5118 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5119 language_registry.add(rust_lang());
5120 let watched_paths = lsp::FileOperationRegistrationOptions {
5121 filters: vec![
5122 FileOperationFilter {
5123 scheme: Some("file".to_owned()),
5124 pattern: lsp::FileOperationPattern {
5125 glob: "**/*.rs".to_owned(),
5126 matches: Some(lsp::FileOperationPatternKind::File),
5127 options: None,
5128 },
5129 },
5130 FileOperationFilter {
5131 scheme: Some("file".to_owned()),
5132 pattern: lsp::FileOperationPattern {
5133 glob: "**/**".to_owned(),
5134 matches: Some(lsp::FileOperationPatternKind::Folder),
5135 options: None,
5136 },
5137 },
5138 ],
5139 };
5140 let mut fake_servers = language_registry.register_fake_lsp(
5141 "Rust",
5142 FakeLspAdapter {
5143 capabilities: lsp::ServerCapabilities {
5144 workspace: Some(lsp::WorkspaceServerCapabilities {
5145 workspace_folders: None,
5146 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5147 did_rename: Some(watched_paths.clone()),
5148 will_rename: Some(watched_paths),
5149 ..Default::default()
5150 }),
5151 }),
5152 ..Default::default()
5153 },
5154 ..Default::default()
5155 },
5156 );
5157
5158 let _ = project
5159 .update(cx, |project, cx| {
5160 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5161 })
5162 .await
5163 .unwrap();
5164
5165 let fake_server = fake_servers.next().await.unwrap();
5166 let response = project.update(cx, |project, cx| {
5167 let worktree = project.worktrees(cx).next().unwrap();
5168 let entry = worktree
5169 .read(cx)
5170 .entry_for_path(rel_path("one.rs"))
5171 .unwrap();
5172 project.rename_entry(
5173 entry.id,
5174 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5175 cx,
5176 )
5177 });
5178 let expected_edit = lsp::WorkspaceEdit {
5179 changes: None,
5180 document_changes: Some(DocumentChanges::Edits({
5181 vec![TextDocumentEdit {
5182 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5183 range: lsp::Range {
5184 start: lsp::Position {
5185 line: 0,
5186 character: 1,
5187 },
5188 end: lsp::Position {
5189 line: 0,
5190 character: 3,
5191 },
5192 },
5193 new_text: "This is not a drill".to_owned(),
5194 })],
5195 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5196 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5197 version: Some(1337),
5198 },
5199 }]
5200 })),
5201 change_annotations: None,
5202 };
5203 let resolved_workspace_edit = Arc::new(OnceLock::new());
5204 fake_server
5205 .set_request_handler::<WillRenameFiles, _, _>({
5206 let resolved_workspace_edit = resolved_workspace_edit.clone();
5207 let expected_edit = expected_edit.clone();
5208 move |params, _| {
5209 let resolved_workspace_edit = resolved_workspace_edit.clone();
5210 let expected_edit = expected_edit.clone();
5211 async move {
5212 assert_eq!(params.files.len(), 1);
5213 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5214 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5215 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5216 Ok(Some(expected_edit))
5217 }
5218 }
5219 })
5220 .next()
5221 .await
5222 .unwrap();
5223 let _ = response.await.unwrap();
5224 fake_server
5225 .handle_notification::<DidRenameFiles, _>(|params, _| {
5226 assert_eq!(params.files.len(), 1);
5227 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5228 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5229 })
5230 .next()
5231 .await
5232 .unwrap();
5233 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5234}
5235
5236#[gpui::test]
5237async fn test_rename(cx: &mut gpui::TestAppContext) {
5238 // hi
5239 init_test(cx);
5240
5241 let fs = FakeFs::new(cx.executor());
5242 fs.insert_tree(
5243 path!("/dir"),
5244 json!({
5245 "one.rs": "const ONE: usize = 1;",
5246 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5247 }),
5248 )
5249 .await;
5250
5251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5252
5253 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5254 language_registry.add(rust_lang());
5255 let mut fake_servers = language_registry.register_fake_lsp(
5256 "Rust",
5257 FakeLspAdapter {
5258 capabilities: lsp::ServerCapabilities {
5259 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5260 prepare_provider: Some(true),
5261 work_done_progress_options: Default::default(),
5262 })),
5263 ..Default::default()
5264 },
5265 ..Default::default()
5266 },
5267 );
5268
5269 let (buffer, _handle) = project
5270 .update(cx, |project, cx| {
5271 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5272 })
5273 .await
5274 .unwrap();
5275
5276 let fake_server = fake_servers.next().await.unwrap();
5277
5278 let response = project.update(cx, |project, cx| {
5279 project.prepare_rename(buffer.clone(), 7, cx)
5280 });
5281 fake_server
5282 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5283 assert_eq!(
5284 params.text_document.uri.as_str(),
5285 uri!("file:///dir/one.rs")
5286 );
5287 assert_eq!(params.position, lsp::Position::new(0, 7));
5288 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5289 lsp::Position::new(0, 6),
5290 lsp::Position::new(0, 9),
5291 ))))
5292 })
5293 .next()
5294 .await
5295 .unwrap();
5296 let response = response.await.unwrap();
5297 let PrepareRenameResponse::Success(range) = response else {
5298 panic!("{:?}", response);
5299 };
5300 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5301 assert_eq!(range, 6..9);
5302
5303 let response = project.update(cx, |project, cx| {
5304 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5305 });
5306 fake_server
5307 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5308 assert_eq!(
5309 params.text_document_position.text_document.uri.as_str(),
5310 uri!("file:///dir/one.rs")
5311 );
5312 assert_eq!(
5313 params.text_document_position.position,
5314 lsp::Position::new(0, 7)
5315 );
5316 assert_eq!(params.new_name, "THREE");
5317 Ok(Some(lsp::WorkspaceEdit {
5318 changes: Some(
5319 [
5320 (
5321 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5322 vec![lsp::TextEdit::new(
5323 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5324 "THREE".to_string(),
5325 )],
5326 ),
5327 (
5328 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5329 vec![
5330 lsp::TextEdit::new(
5331 lsp::Range::new(
5332 lsp::Position::new(0, 24),
5333 lsp::Position::new(0, 27),
5334 ),
5335 "THREE".to_string(),
5336 ),
5337 lsp::TextEdit::new(
5338 lsp::Range::new(
5339 lsp::Position::new(0, 35),
5340 lsp::Position::new(0, 38),
5341 ),
5342 "THREE".to_string(),
5343 ),
5344 ],
5345 ),
5346 ]
5347 .into_iter()
5348 .collect(),
5349 ),
5350 ..Default::default()
5351 }))
5352 })
5353 .next()
5354 .await
5355 .unwrap();
5356 let mut transaction = response.await.unwrap().0;
5357 assert_eq!(transaction.len(), 2);
5358 assert_eq!(
5359 transaction
5360 .remove_entry(&buffer)
5361 .unwrap()
5362 .0
5363 .update(cx, |buffer, _| buffer.text()),
5364 "const THREE: usize = 1;"
5365 );
5366 assert_eq!(
5367 transaction
5368 .into_keys()
5369 .next()
5370 .unwrap()
5371 .update(cx, |buffer, _| buffer.text()),
5372 "const TWO: usize = one::THREE + one::THREE;"
5373 );
5374}
5375
5376#[gpui::test]
5377async fn test_search(cx: &mut gpui::TestAppContext) {
5378 init_test(cx);
5379
5380 let fs = FakeFs::new(cx.executor());
5381 fs.insert_tree(
5382 path!("/dir"),
5383 json!({
5384 "one.rs": "const ONE: usize = 1;",
5385 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5386 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5387 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5388 }),
5389 )
5390 .await;
5391 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5392 assert_eq!(
5393 search(
5394 &project,
5395 SearchQuery::text(
5396 "TWO",
5397 false,
5398 true,
5399 false,
5400 Default::default(),
5401 Default::default(),
5402 false,
5403 None
5404 )
5405 .unwrap(),
5406 cx
5407 )
5408 .await
5409 .unwrap(),
5410 HashMap::from_iter([
5411 (path!("dir/two.rs").to_string(), vec![6..9]),
5412 (path!("dir/three.rs").to_string(), vec![37..40])
5413 ])
5414 );
5415
5416 let buffer_4 = project
5417 .update(cx, |project, cx| {
5418 project.open_local_buffer(path!("/dir/four.rs"), cx)
5419 })
5420 .await
5421 .unwrap();
5422 buffer_4.update(cx, |buffer, cx| {
5423 let text = "two::TWO";
5424 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5425 });
5426
5427 assert_eq!(
5428 search(
5429 &project,
5430 SearchQuery::text(
5431 "TWO",
5432 false,
5433 true,
5434 false,
5435 Default::default(),
5436 Default::default(),
5437 false,
5438 None,
5439 )
5440 .unwrap(),
5441 cx
5442 )
5443 .await
5444 .unwrap(),
5445 HashMap::from_iter([
5446 (path!("dir/two.rs").to_string(), vec![6..9]),
5447 (path!("dir/three.rs").to_string(), vec![37..40]),
5448 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5449 ])
5450 );
5451}
5452
5453#[gpui::test]
5454async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5455 init_test(cx);
5456
5457 let search_query = "file";
5458
5459 let fs = FakeFs::new(cx.executor());
5460 fs.insert_tree(
5461 path!("/dir"),
5462 json!({
5463 "one.rs": r#"// Rust file one"#,
5464 "one.ts": r#"// TypeScript file one"#,
5465 "two.rs": r#"// Rust file two"#,
5466 "two.ts": r#"// TypeScript file two"#,
5467 }),
5468 )
5469 .await;
5470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5471
5472 assert!(
5473 search(
5474 &project,
5475 SearchQuery::text(
5476 search_query,
5477 false,
5478 true,
5479 false,
5480 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5481 Default::default(),
5482 false,
5483 None
5484 )
5485 .unwrap(),
5486 cx
5487 )
5488 .await
5489 .unwrap()
5490 .is_empty(),
5491 "If no inclusions match, no files should be returned"
5492 );
5493
5494 assert_eq!(
5495 search(
5496 &project,
5497 SearchQuery::text(
5498 search_query,
5499 false,
5500 true,
5501 false,
5502 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5503 Default::default(),
5504 false,
5505 None
5506 )
5507 .unwrap(),
5508 cx
5509 )
5510 .await
5511 .unwrap(),
5512 HashMap::from_iter([
5513 (path!("dir/one.rs").to_string(), vec![8..12]),
5514 (path!("dir/two.rs").to_string(), vec![8..12]),
5515 ]),
5516 "Rust only search should give only Rust files"
5517 );
5518
5519 assert_eq!(
5520 search(
5521 &project,
5522 SearchQuery::text(
5523 search_query,
5524 false,
5525 true,
5526 false,
5527 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5528 .unwrap(),
5529 Default::default(),
5530 false,
5531 None,
5532 )
5533 .unwrap(),
5534 cx
5535 )
5536 .await
5537 .unwrap(),
5538 HashMap::from_iter([
5539 (path!("dir/one.ts").to_string(), vec![14..18]),
5540 (path!("dir/two.ts").to_string(), vec![14..18]),
5541 ]),
5542 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5543 );
5544
5545 assert_eq!(
5546 search(
5547 &project,
5548 SearchQuery::text(
5549 search_query,
5550 false,
5551 true,
5552 false,
5553 PathMatcher::new(
5554 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5555 PathStyle::local()
5556 )
5557 .unwrap(),
5558 Default::default(),
5559 false,
5560 None,
5561 )
5562 .unwrap(),
5563 cx
5564 )
5565 .await
5566 .unwrap(),
5567 HashMap::from_iter([
5568 (path!("dir/two.ts").to_string(), vec![14..18]),
5569 (path!("dir/one.rs").to_string(), vec![8..12]),
5570 (path!("dir/one.ts").to_string(), vec![14..18]),
5571 (path!("dir/two.rs").to_string(), vec![8..12]),
5572 ]),
5573 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5574 );
5575}
5576
5577#[gpui::test]
5578async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5579 init_test(cx);
5580
5581 let search_query = "file";
5582
5583 let fs = FakeFs::new(cx.executor());
5584 fs.insert_tree(
5585 path!("/dir"),
5586 json!({
5587 "one.rs": r#"// Rust file one"#,
5588 "one.ts": r#"// TypeScript file one"#,
5589 "two.rs": r#"// Rust file two"#,
5590 "two.ts": r#"// TypeScript file two"#,
5591 }),
5592 )
5593 .await;
5594 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5595
5596 assert_eq!(
5597 search(
5598 &project,
5599 SearchQuery::text(
5600 search_query,
5601 false,
5602 true,
5603 false,
5604 Default::default(),
5605 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5606 false,
5607 None,
5608 )
5609 .unwrap(),
5610 cx
5611 )
5612 .await
5613 .unwrap(),
5614 HashMap::from_iter([
5615 (path!("dir/one.rs").to_string(), vec![8..12]),
5616 (path!("dir/one.ts").to_string(), vec![14..18]),
5617 (path!("dir/two.rs").to_string(), vec![8..12]),
5618 (path!("dir/two.ts").to_string(), vec![14..18]),
5619 ]),
5620 "If no exclusions match, all files should be returned"
5621 );
5622
5623 assert_eq!(
5624 search(
5625 &project,
5626 SearchQuery::text(
5627 search_query,
5628 false,
5629 true,
5630 false,
5631 Default::default(),
5632 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5633 false,
5634 None,
5635 )
5636 .unwrap(),
5637 cx
5638 )
5639 .await
5640 .unwrap(),
5641 HashMap::from_iter([
5642 (path!("dir/one.ts").to_string(), vec![14..18]),
5643 (path!("dir/two.ts").to_string(), vec![14..18]),
5644 ]),
5645 "Rust exclusion search should give only TypeScript files"
5646 );
5647
5648 assert_eq!(
5649 search(
5650 &project,
5651 SearchQuery::text(
5652 search_query,
5653 false,
5654 true,
5655 false,
5656 Default::default(),
5657 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5658 .unwrap(),
5659 false,
5660 None,
5661 )
5662 .unwrap(),
5663 cx
5664 )
5665 .await
5666 .unwrap(),
5667 HashMap::from_iter([
5668 (path!("dir/one.rs").to_string(), vec![8..12]),
5669 (path!("dir/two.rs").to_string(), vec![8..12]),
5670 ]),
5671 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5672 );
5673
5674 assert!(
5675 search(
5676 &project,
5677 SearchQuery::text(
5678 search_query,
5679 false,
5680 true,
5681 false,
5682 Default::default(),
5683 PathMatcher::new(
5684 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5685 PathStyle::local(),
5686 )
5687 .unwrap(),
5688 false,
5689 None,
5690 )
5691 .unwrap(),
5692 cx
5693 )
5694 .await
5695 .unwrap()
5696 .is_empty(),
5697 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5698 );
5699}
5700
5701#[gpui::test]
5702async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5703 init_test(cx);
5704
5705 let search_query = "file";
5706
5707 let fs = FakeFs::new(cx.executor());
5708 fs.insert_tree(
5709 path!("/dir"),
5710 json!({
5711 "one.rs": r#"// Rust file one"#,
5712 "one.ts": r#"// TypeScript file one"#,
5713 "two.rs": r#"// Rust file two"#,
5714 "two.ts": r#"// TypeScript file two"#,
5715 }),
5716 )
5717 .await;
5718
5719 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5720 let path_style = PathStyle::local();
5721 let _buffer = project.update(cx, |project, cx| {
5722 project.create_local_buffer("file", None, false, cx)
5723 });
5724
5725 assert_eq!(
5726 search(
5727 &project,
5728 SearchQuery::text(
5729 search_query,
5730 false,
5731 true,
5732 false,
5733 Default::default(),
5734 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5735 false,
5736 None,
5737 )
5738 .unwrap(),
5739 cx
5740 )
5741 .await
5742 .unwrap(),
5743 HashMap::from_iter([
5744 (path!("dir/one.rs").to_string(), vec![8..12]),
5745 (path!("dir/one.ts").to_string(), vec![14..18]),
5746 (path!("dir/two.rs").to_string(), vec![8..12]),
5747 (path!("dir/two.ts").to_string(), vec![14..18]),
5748 ]),
5749 "If no exclusions match, all files should be returned"
5750 );
5751
5752 assert_eq!(
5753 search(
5754 &project,
5755 SearchQuery::text(
5756 search_query,
5757 false,
5758 true,
5759 false,
5760 Default::default(),
5761 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5762 false,
5763 None,
5764 )
5765 .unwrap(),
5766 cx
5767 )
5768 .await
5769 .unwrap(),
5770 HashMap::from_iter([
5771 (path!("dir/one.ts").to_string(), vec![14..18]),
5772 (path!("dir/two.ts").to_string(), vec![14..18]),
5773 ]),
5774 "Rust exclusion search should give only TypeScript files"
5775 );
5776
5777 assert_eq!(
5778 search(
5779 &project,
5780 SearchQuery::text(
5781 search_query,
5782 false,
5783 true,
5784 false,
5785 Default::default(),
5786 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5787 false,
5788 None,
5789 )
5790 .unwrap(),
5791 cx
5792 )
5793 .await
5794 .unwrap(),
5795 HashMap::from_iter([
5796 (path!("dir/one.rs").to_string(), vec![8..12]),
5797 (path!("dir/two.rs").to_string(), vec![8..12]),
5798 ]),
5799 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5800 );
5801
5802 assert!(
5803 search(
5804 &project,
5805 SearchQuery::text(
5806 search_query,
5807 false,
5808 true,
5809 false,
5810 Default::default(),
5811 PathMatcher::new(
5812 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5813 PathStyle::local(),
5814 )
5815 .unwrap(),
5816 false,
5817 None,
5818 )
5819 .unwrap(),
5820 cx
5821 )
5822 .await
5823 .unwrap()
5824 .is_empty(),
5825 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5826 );
5827}
5828
5829#[gpui::test]
5830async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5831 init_test(cx);
5832
5833 let search_query = "file";
5834
5835 let fs = FakeFs::new(cx.executor());
5836 fs.insert_tree(
5837 path!("/dir"),
5838 json!({
5839 "one.rs": r#"// Rust file one"#,
5840 "one.ts": r#"// TypeScript file one"#,
5841 "two.rs": r#"// Rust file two"#,
5842 "two.ts": r#"// TypeScript file two"#,
5843 }),
5844 )
5845 .await;
5846 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5847 assert!(
5848 search(
5849 &project,
5850 SearchQuery::text(
5851 search_query,
5852 false,
5853 true,
5854 false,
5855 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5856 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5857 false,
5858 None,
5859 )
5860 .unwrap(),
5861 cx
5862 )
5863 .await
5864 .unwrap()
5865 .is_empty(),
5866 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5867 );
5868
5869 assert!(
5870 search(
5871 &project,
5872 SearchQuery::text(
5873 search_query,
5874 false,
5875 true,
5876 false,
5877 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5878 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5879 false,
5880 None,
5881 )
5882 .unwrap(),
5883 cx
5884 )
5885 .await
5886 .unwrap()
5887 .is_empty(),
5888 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5889 );
5890
5891 assert!(
5892 search(
5893 &project,
5894 SearchQuery::text(
5895 search_query,
5896 false,
5897 true,
5898 false,
5899 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5900 .unwrap(),
5901 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5902 .unwrap(),
5903 false,
5904 None,
5905 )
5906 .unwrap(),
5907 cx
5908 )
5909 .await
5910 .unwrap()
5911 .is_empty(),
5912 "Non-matching inclusions and exclusions should not change that."
5913 );
5914
5915 assert_eq!(
5916 search(
5917 &project,
5918 SearchQuery::text(
5919 search_query,
5920 false,
5921 true,
5922 false,
5923 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5924 .unwrap(),
5925 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5926 .unwrap(),
5927 false,
5928 None,
5929 )
5930 .unwrap(),
5931 cx
5932 )
5933 .await
5934 .unwrap(),
5935 HashMap::from_iter([
5936 (path!("dir/one.ts").to_string(), vec![14..18]),
5937 (path!("dir/two.ts").to_string(), vec![14..18]),
5938 ]),
5939 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5940 );
5941}
5942
5943#[gpui::test]
5944async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5945 init_test(cx);
5946
5947 let fs = FakeFs::new(cx.executor());
5948 fs.insert_tree(
5949 path!("/worktree-a"),
5950 json!({
5951 "haystack.rs": r#"// NEEDLE"#,
5952 "haystack.ts": r#"// NEEDLE"#,
5953 }),
5954 )
5955 .await;
5956 fs.insert_tree(
5957 path!("/worktree-b"),
5958 json!({
5959 "haystack.rs": r#"// NEEDLE"#,
5960 "haystack.ts": r#"// NEEDLE"#,
5961 }),
5962 )
5963 .await;
5964
5965 let path_style = PathStyle::local();
5966 let project = Project::test(
5967 fs.clone(),
5968 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5969 cx,
5970 )
5971 .await;
5972
5973 assert_eq!(
5974 search(
5975 &project,
5976 SearchQuery::text(
5977 "NEEDLE",
5978 false,
5979 true,
5980 false,
5981 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
5982 Default::default(),
5983 true,
5984 None,
5985 )
5986 .unwrap(),
5987 cx
5988 )
5989 .await
5990 .unwrap(),
5991 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5992 "should only return results from included worktree"
5993 );
5994 assert_eq!(
5995 search(
5996 &project,
5997 SearchQuery::text(
5998 "NEEDLE",
5999 false,
6000 true,
6001 false,
6002 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6003 Default::default(),
6004 true,
6005 None,
6006 )
6007 .unwrap(),
6008 cx
6009 )
6010 .await
6011 .unwrap(),
6012 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6013 "should only return results from included worktree"
6014 );
6015
6016 assert_eq!(
6017 search(
6018 &project,
6019 SearchQuery::text(
6020 "NEEDLE",
6021 false,
6022 true,
6023 false,
6024 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6025 Default::default(),
6026 false,
6027 None,
6028 )
6029 .unwrap(),
6030 cx
6031 )
6032 .await
6033 .unwrap(),
6034 HashMap::from_iter([
6035 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6036 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6037 ]),
6038 "should return results from both worktrees"
6039 );
6040}
6041
6042#[gpui::test]
6043async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6044 init_test(cx);
6045
6046 let fs = FakeFs::new(cx.background_executor.clone());
6047 fs.insert_tree(
6048 path!("/dir"),
6049 json!({
6050 ".git": {},
6051 ".gitignore": "**/target\n/node_modules\n",
6052 "target": {
6053 "index.txt": "index_key:index_value"
6054 },
6055 "node_modules": {
6056 "eslint": {
6057 "index.ts": "const eslint_key = 'eslint value'",
6058 "package.json": r#"{ "some_key": "some value" }"#,
6059 },
6060 "prettier": {
6061 "index.ts": "const prettier_key = 'prettier value'",
6062 "package.json": r#"{ "other_key": "other value" }"#,
6063 },
6064 },
6065 "package.json": r#"{ "main_key": "main value" }"#,
6066 }),
6067 )
6068 .await;
6069 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6070
6071 let query = "key";
6072 assert_eq!(
6073 search(
6074 &project,
6075 SearchQuery::text(
6076 query,
6077 false,
6078 false,
6079 false,
6080 Default::default(),
6081 Default::default(),
6082 false,
6083 None,
6084 )
6085 .unwrap(),
6086 cx
6087 )
6088 .await
6089 .unwrap(),
6090 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6091 "Only one non-ignored file should have the query"
6092 );
6093
6094 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6095 let path_style = PathStyle::local();
6096 assert_eq!(
6097 search(
6098 &project,
6099 SearchQuery::text(
6100 query,
6101 false,
6102 false,
6103 true,
6104 Default::default(),
6105 Default::default(),
6106 false,
6107 None,
6108 )
6109 .unwrap(),
6110 cx
6111 )
6112 .await
6113 .unwrap(),
6114 HashMap::from_iter([
6115 (path!("dir/package.json").to_string(), vec![8..11]),
6116 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6117 (
6118 path!("dir/node_modules/prettier/package.json").to_string(),
6119 vec![9..12]
6120 ),
6121 (
6122 path!("dir/node_modules/prettier/index.ts").to_string(),
6123 vec![15..18]
6124 ),
6125 (
6126 path!("dir/node_modules/eslint/index.ts").to_string(),
6127 vec![13..16]
6128 ),
6129 (
6130 path!("dir/node_modules/eslint/package.json").to_string(),
6131 vec![8..11]
6132 ),
6133 ]),
6134 "Unrestricted search with ignored directories should find every file with the query"
6135 );
6136
6137 let files_to_include =
6138 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6139 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6140 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6141 assert_eq!(
6142 search(
6143 &project,
6144 SearchQuery::text(
6145 query,
6146 false,
6147 false,
6148 true,
6149 files_to_include,
6150 files_to_exclude,
6151 false,
6152 None,
6153 )
6154 .unwrap(),
6155 cx
6156 )
6157 .await
6158 .unwrap(),
6159 HashMap::from_iter([(
6160 path!("dir/node_modules/prettier/package.json").to_string(),
6161 vec![9..12]
6162 )]),
6163 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6164 );
6165}
6166
6167#[gpui::test]
6168async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6169 init_test(cx);
6170
6171 let fs = FakeFs::new(cx.executor());
6172 fs.insert_tree(
6173 path!("/dir"),
6174 json!({
6175 "one.rs": "// ПРИВЕТ? привет!",
6176 "two.rs": "// ПРИВЕТ.",
6177 "three.rs": "// привет",
6178 }),
6179 )
6180 .await;
6181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6182 let unicode_case_sensitive_query = SearchQuery::text(
6183 "привет",
6184 false,
6185 true,
6186 false,
6187 Default::default(),
6188 Default::default(),
6189 false,
6190 None,
6191 );
6192 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6193 assert_eq!(
6194 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6195 .await
6196 .unwrap(),
6197 HashMap::from_iter([
6198 (path!("dir/one.rs").to_string(), vec![17..29]),
6199 (path!("dir/three.rs").to_string(), vec![3..15]),
6200 ])
6201 );
6202
6203 let unicode_case_insensitive_query = SearchQuery::text(
6204 "привет",
6205 false,
6206 false,
6207 false,
6208 Default::default(),
6209 Default::default(),
6210 false,
6211 None,
6212 );
6213 assert_matches!(
6214 unicode_case_insensitive_query,
6215 Ok(SearchQuery::Regex { .. })
6216 );
6217 assert_eq!(
6218 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6219 .await
6220 .unwrap(),
6221 HashMap::from_iter([
6222 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6223 (path!("dir/two.rs").to_string(), vec![3..15]),
6224 (path!("dir/three.rs").to_string(), vec![3..15]),
6225 ])
6226 );
6227
6228 assert_eq!(
6229 search(
6230 &project,
6231 SearchQuery::text(
6232 "привет.",
6233 false,
6234 false,
6235 false,
6236 Default::default(),
6237 Default::default(),
6238 false,
6239 None,
6240 )
6241 .unwrap(),
6242 cx
6243 )
6244 .await
6245 .unwrap(),
6246 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6247 );
6248}
6249
6250#[gpui::test]
6251async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6252 init_test(cx);
6253
6254 let fs = FakeFs::new(cx.executor());
6255 fs.insert_tree(
6256 "/one/two",
6257 json!({
6258 "three": {
6259 "a.txt": "",
6260 "four": {}
6261 },
6262 "c.rs": ""
6263 }),
6264 )
6265 .await;
6266
6267 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6268 project
6269 .update(cx, |project, cx| {
6270 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6271 project.create_entry((id, rel_path("b..")), true, cx)
6272 })
6273 .await
6274 .unwrap()
6275 .into_included()
6276 .unwrap();
6277
6278 assert_eq!(
6279 fs.paths(true),
6280 vec![
6281 PathBuf::from(path!("/")),
6282 PathBuf::from(path!("/one")),
6283 PathBuf::from(path!("/one/two")),
6284 PathBuf::from(path!("/one/two/c.rs")),
6285 PathBuf::from(path!("/one/two/three")),
6286 PathBuf::from(path!("/one/two/three/a.txt")),
6287 PathBuf::from(path!("/one/two/three/b..")),
6288 PathBuf::from(path!("/one/two/three/four")),
6289 ]
6290 );
6291}
6292
6293#[gpui::test]
6294async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6295 init_test(cx);
6296
6297 let fs = FakeFs::new(cx.executor());
6298 fs.insert_tree(
6299 path!("/dir"),
6300 json!({
6301 "a.tsx": "a",
6302 }),
6303 )
6304 .await;
6305
6306 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6307
6308 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6309 language_registry.add(tsx_lang());
6310 let language_server_names = [
6311 "TypeScriptServer",
6312 "TailwindServer",
6313 "ESLintServer",
6314 "NoHoverCapabilitiesServer",
6315 ];
6316 let mut language_servers = [
6317 language_registry.register_fake_lsp(
6318 "tsx",
6319 FakeLspAdapter {
6320 name: language_server_names[0],
6321 capabilities: lsp::ServerCapabilities {
6322 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6323 ..lsp::ServerCapabilities::default()
6324 },
6325 ..FakeLspAdapter::default()
6326 },
6327 ),
6328 language_registry.register_fake_lsp(
6329 "tsx",
6330 FakeLspAdapter {
6331 name: language_server_names[1],
6332 capabilities: lsp::ServerCapabilities {
6333 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6334 ..lsp::ServerCapabilities::default()
6335 },
6336 ..FakeLspAdapter::default()
6337 },
6338 ),
6339 language_registry.register_fake_lsp(
6340 "tsx",
6341 FakeLspAdapter {
6342 name: language_server_names[2],
6343 capabilities: lsp::ServerCapabilities {
6344 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6345 ..lsp::ServerCapabilities::default()
6346 },
6347 ..FakeLspAdapter::default()
6348 },
6349 ),
6350 language_registry.register_fake_lsp(
6351 "tsx",
6352 FakeLspAdapter {
6353 name: language_server_names[3],
6354 capabilities: lsp::ServerCapabilities {
6355 hover_provider: None,
6356 ..lsp::ServerCapabilities::default()
6357 },
6358 ..FakeLspAdapter::default()
6359 },
6360 ),
6361 ];
6362
6363 let (buffer, _handle) = project
6364 .update(cx, |p, cx| {
6365 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6366 })
6367 .await
6368 .unwrap();
6369 cx.executor().run_until_parked();
6370
6371 let mut servers_with_hover_requests = HashMap::default();
6372 for i in 0..language_server_names.len() {
6373 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6374 panic!(
6375 "Failed to get language server #{i} with name {}",
6376 &language_server_names[i]
6377 )
6378 });
6379 let new_server_name = new_server.server.name();
6380 assert!(
6381 !servers_with_hover_requests.contains_key(&new_server_name),
6382 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6383 );
6384 match new_server_name.as_ref() {
6385 "TailwindServer" | "TypeScriptServer" => {
6386 servers_with_hover_requests.insert(
6387 new_server_name.clone(),
6388 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6389 move |_, _| {
6390 let name = new_server_name.clone();
6391 async move {
6392 Ok(Some(lsp::Hover {
6393 contents: lsp::HoverContents::Scalar(
6394 lsp::MarkedString::String(format!("{name} hover")),
6395 ),
6396 range: None,
6397 }))
6398 }
6399 },
6400 ),
6401 );
6402 }
6403 "ESLintServer" => {
6404 servers_with_hover_requests.insert(
6405 new_server_name,
6406 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6407 |_, _| async move { Ok(None) },
6408 ),
6409 );
6410 }
6411 "NoHoverCapabilitiesServer" => {
6412 let _never_handled = new_server
6413 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6414 panic!(
6415 "Should not call for hovers server with no corresponding capabilities"
6416 )
6417 });
6418 }
6419 unexpected => panic!("Unexpected server name: {unexpected}"),
6420 }
6421 }
6422
6423 let hover_task = project.update(cx, |project, cx| {
6424 project.hover(&buffer, Point::new(0, 0), cx)
6425 });
6426 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6427 |mut hover_request| async move {
6428 hover_request
6429 .next()
6430 .await
6431 .expect("All hover requests should have been triggered")
6432 },
6433 ))
6434 .await;
6435 assert_eq!(
6436 vec!["TailwindServer hover", "TypeScriptServer hover"],
6437 hover_task
6438 .await
6439 .into_iter()
6440 .flatten()
6441 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6442 .sorted()
6443 .collect::<Vec<_>>(),
6444 "Should receive hover responses from all related servers with hover capabilities"
6445 );
6446}
6447
6448#[gpui::test]
6449async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6450 init_test(cx);
6451
6452 let fs = FakeFs::new(cx.executor());
6453 fs.insert_tree(
6454 path!("/dir"),
6455 json!({
6456 "a.ts": "a",
6457 }),
6458 )
6459 .await;
6460
6461 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6462
6463 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6464 language_registry.add(typescript_lang());
6465 let mut fake_language_servers = language_registry.register_fake_lsp(
6466 "TypeScript",
6467 FakeLspAdapter {
6468 capabilities: lsp::ServerCapabilities {
6469 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6470 ..lsp::ServerCapabilities::default()
6471 },
6472 ..FakeLspAdapter::default()
6473 },
6474 );
6475
6476 let (buffer, _handle) = project
6477 .update(cx, |p, cx| {
6478 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6479 })
6480 .await
6481 .unwrap();
6482 cx.executor().run_until_parked();
6483
6484 let fake_server = fake_language_servers
6485 .next()
6486 .await
6487 .expect("failed to get the language server");
6488
6489 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6490 move |_, _| async move {
6491 Ok(Some(lsp::Hover {
6492 contents: lsp::HoverContents::Array(vec![
6493 lsp::MarkedString::String("".to_string()),
6494 lsp::MarkedString::String(" ".to_string()),
6495 lsp::MarkedString::String("\n\n\n".to_string()),
6496 ]),
6497 range: None,
6498 }))
6499 },
6500 );
6501
6502 let hover_task = project.update(cx, |project, cx| {
6503 project.hover(&buffer, Point::new(0, 0), cx)
6504 });
6505 let () = request_handled
6506 .next()
6507 .await
6508 .expect("All hover requests should have been triggered");
6509 assert_eq!(
6510 Vec::<String>::new(),
6511 hover_task
6512 .await
6513 .into_iter()
6514 .flatten()
6515 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6516 .sorted()
6517 .collect::<Vec<_>>(),
6518 "Empty hover parts should be ignored"
6519 );
6520}
6521
6522#[gpui::test]
6523async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6524 init_test(cx);
6525
6526 let fs = FakeFs::new(cx.executor());
6527 fs.insert_tree(
6528 path!("/dir"),
6529 json!({
6530 "a.ts": "a",
6531 }),
6532 )
6533 .await;
6534
6535 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6536
6537 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6538 language_registry.add(typescript_lang());
6539 let mut fake_language_servers = language_registry.register_fake_lsp(
6540 "TypeScript",
6541 FakeLspAdapter {
6542 capabilities: lsp::ServerCapabilities {
6543 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6544 ..lsp::ServerCapabilities::default()
6545 },
6546 ..FakeLspAdapter::default()
6547 },
6548 );
6549
6550 let (buffer, _handle) = project
6551 .update(cx, |p, cx| {
6552 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6553 })
6554 .await
6555 .unwrap();
6556 cx.executor().run_until_parked();
6557
6558 let fake_server = fake_language_servers
6559 .next()
6560 .await
6561 .expect("failed to get the language server");
6562
6563 let mut request_handled = fake_server
6564 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6565 Ok(Some(vec![
6566 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6567 title: "organize imports".to_string(),
6568 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6569 ..lsp::CodeAction::default()
6570 }),
6571 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6572 title: "fix code".to_string(),
6573 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6574 ..lsp::CodeAction::default()
6575 }),
6576 ]))
6577 });
6578
6579 let code_actions_task = project.update(cx, |project, cx| {
6580 project.code_actions(
6581 &buffer,
6582 0..buffer.read(cx).len(),
6583 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6584 cx,
6585 )
6586 });
6587
6588 let () = request_handled
6589 .next()
6590 .await
6591 .expect("The code action request should have been triggered");
6592
6593 let code_actions = code_actions_task.await.unwrap().unwrap();
6594 assert_eq!(code_actions.len(), 1);
6595 assert_eq!(
6596 code_actions[0].lsp_action.action_kind(),
6597 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6598 );
6599}
6600
6601#[gpui::test]
6602async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6603 init_test(cx);
6604
6605 let fs = FakeFs::new(cx.executor());
6606 fs.insert_tree(
6607 path!("/dir"),
6608 json!({
6609 "a.tsx": "a",
6610 }),
6611 )
6612 .await;
6613
6614 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6615
6616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6617 language_registry.add(tsx_lang());
6618 let language_server_names = [
6619 "TypeScriptServer",
6620 "TailwindServer",
6621 "ESLintServer",
6622 "NoActionsCapabilitiesServer",
6623 ];
6624
6625 let mut language_server_rxs = [
6626 language_registry.register_fake_lsp(
6627 "tsx",
6628 FakeLspAdapter {
6629 name: language_server_names[0],
6630 capabilities: lsp::ServerCapabilities {
6631 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6632 ..lsp::ServerCapabilities::default()
6633 },
6634 ..FakeLspAdapter::default()
6635 },
6636 ),
6637 language_registry.register_fake_lsp(
6638 "tsx",
6639 FakeLspAdapter {
6640 name: language_server_names[1],
6641 capabilities: lsp::ServerCapabilities {
6642 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6643 ..lsp::ServerCapabilities::default()
6644 },
6645 ..FakeLspAdapter::default()
6646 },
6647 ),
6648 language_registry.register_fake_lsp(
6649 "tsx",
6650 FakeLspAdapter {
6651 name: language_server_names[2],
6652 capabilities: lsp::ServerCapabilities {
6653 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6654 ..lsp::ServerCapabilities::default()
6655 },
6656 ..FakeLspAdapter::default()
6657 },
6658 ),
6659 language_registry.register_fake_lsp(
6660 "tsx",
6661 FakeLspAdapter {
6662 name: language_server_names[3],
6663 capabilities: lsp::ServerCapabilities {
6664 code_action_provider: None,
6665 ..lsp::ServerCapabilities::default()
6666 },
6667 ..FakeLspAdapter::default()
6668 },
6669 ),
6670 ];
6671
6672 let (buffer, _handle) = project
6673 .update(cx, |p, cx| {
6674 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6675 })
6676 .await
6677 .unwrap();
6678 cx.executor().run_until_parked();
6679
6680 let mut servers_with_actions_requests = HashMap::default();
6681 for i in 0..language_server_names.len() {
6682 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6683 panic!(
6684 "Failed to get language server #{i} with name {}",
6685 &language_server_names[i]
6686 )
6687 });
6688 let new_server_name = new_server.server.name();
6689
6690 assert!(
6691 !servers_with_actions_requests.contains_key(&new_server_name),
6692 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6693 );
6694 match new_server_name.0.as_ref() {
6695 "TailwindServer" | "TypeScriptServer" => {
6696 servers_with_actions_requests.insert(
6697 new_server_name.clone(),
6698 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6699 move |_, _| {
6700 let name = new_server_name.clone();
6701 async move {
6702 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6703 lsp::CodeAction {
6704 title: format!("{name} code action"),
6705 ..lsp::CodeAction::default()
6706 },
6707 )]))
6708 }
6709 },
6710 ),
6711 );
6712 }
6713 "ESLintServer" => {
6714 servers_with_actions_requests.insert(
6715 new_server_name,
6716 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6717 |_, _| async move { Ok(None) },
6718 ),
6719 );
6720 }
6721 "NoActionsCapabilitiesServer" => {
6722 let _never_handled = new_server
6723 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6724 panic!(
6725 "Should not call for code actions server with no corresponding capabilities"
6726 )
6727 });
6728 }
6729 unexpected => panic!("Unexpected server name: {unexpected}"),
6730 }
6731 }
6732
6733 let code_actions_task = project.update(cx, |project, cx| {
6734 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6735 });
6736
6737 // cx.run_until_parked();
6738 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6739 |mut code_actions_request| async move {
6740 code_actions_request
6741 .next()
6742 .await
6743 .expect("All code actions requests should have been triggered")
6744 },
6745 ))
6746 .await;
6747 assert_eq!(
6748 vec!["TailwindServer code action", "TypeScriptServer code action"],
6749 code_actions_task
6750 .await
6751 .unwrap()
6752 .unwrap()
6753 .into_iter()
6754 .map(|code_action| code_action.lsp_action.title().to_owned())
6755 .sorted()
6756 .collect::<Vec<_>>(),
6757 "Should receive code actions responses from all related servers with hover capabilities"
6758 );
6759}
6760
6761#[gpui::test]
6762async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6763 init_test(cx);
6764
6765 let fs = FakeFs::new(cx.executor());
6766 fs.insert_tree(
6767 "/dir",
6768 json!({
6769 "a.rs": "let a = 1;",
6770 "b.rs": "let b = 2;",
6771 "c.rs": "let c = 2;",
6772 }),
6773 )
6774 .await;
6775
6776 let project = Project::test(
6777 fs,
6778 [
6779 "/dir/a.rs".as_ref(),
6780 "/dir/b.rs".as_ref(),
6781 "/dir/c.rs".as_ref(),
6782 ],
6783 cx,
6784 )
6785 .await;
6786
6787 // check the initial state and get the worktrees
6788 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6789 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6790 assert_eq!(worktrees.len(), 3);
6791
6792 let worktree_a = worktrees[0].read(cx);
6793 let worktree_b = worktrees[1].read(cx);
6794 let worktree_c = worktrees[2].read(cx);
6795
6796 // check they start in the right order
6797 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6798 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6799 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6800
6801 (
6802 worktrees[0].clone(),
6803 worktrees[1].clone(),
6804 worktrees[2].clone(),
6805 )
6806 });
6807
6808 // move first worktree to after the second
6809 // [a, b, c] -> [b, a, c]
6810 project
6811 .update(cx, |project, cx| {
6812 let first = worktree_a.read(cx);
6813 let second = worktree_b.read(cx);
6814 project.move_worktree(first.id(), second.id(), cx)
6815 })
6816 .expect("moving first after second");
6817
6818 // check the state after moving
6819 project.update(cx, |project, cx| {
6820 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6821 assert_eq!(worktrees.len(), 3);
6822
6823 let first = worktrees[0].read(cx);
6824 let second = worktrees[1].read(cx);
6825 let third = worktrees[2].read(cx);
6826
6827 // check they are now in the right order
6828 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6829 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6830 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6831 });
6832
6833 // move the second worktree to before the first
6834 // [b, a, c] -> [a, b, c]
6835 project
6836 .update(cx, |project, cx| {
6837 let second = worktree_a.read(cx);
6838 let first = worktree_b.read(cx);
6839 project.move_worktree(first.id(), second.id(), cx)
6840 })
6841 .expect("moving second before first");
6842
6843 // check the state after moving
6844 project.update(cx, |project, cx| {
6845 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6846 assert_eq!(worktrees.len(), 3);
6847
6848 let first = worktrees[0].read(cx);
6849 let second = worktrees[1].read(cx);
6850 let third = worktrees[2].read(cx);
6851
6852 // check they are now in the right order
6853 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6854 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6855 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6856 });
6857
6858 // move the second worktree to after the third
6859 // [a, b, c] -> [a, c, b]
6860 project
6861 .update(cx, |project, cx| {
6862 let second = worktree_b.read(cx);
6863 let third = worktree_c.read(cx);
6864 project.move_worktree(second.id(), third.id(), cx)
6865 })
6866 .expect("moving second after third");
6867
6868 // check the state after moving
6869 project.update(cx, |project, cx| {
6870 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6871 assert_eq!(worktrees.len(), 3);
6872
6873 let first = worktrees[0].read(cx);
6874 let second = worktrees[1].read(cx);
6875 let third = worktrees[2].read(cx);
6876
6877 // check they are now in the right order
6878 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6879 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6880 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6881 });
6882
6883 // move the third worktree to before the second
6884 // [a, c, b] -> [a, b, c]
6885 project
6886 .update(cx, |project, cx| {
6887 let third = worktree_c.read(cx);
6888 let second = worktree_b.read(cx);
6889 project.move_worktree(third.id(), second.id(), cx)
6890 })
6891 .expect("moving third before second");
6892
6893 // check the state after moving
6894 project.update(cx, |project, cx| {
6895 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6896 assert_eq!(worktrees.len(), 3);
6897
6898 let first = worktrees[0].read(cx);
6899 let second = worktrees[1].read(cx);
6900 let third = worktrees[2].read(cx);
6901
6902 // check they are now in the right order
6903 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6904 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6905 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6906 });
6907
6908 // move the first worktree to after the third
6909 // [a, b, c] -> [b, c, a]
6910 project
6911 .update(cx, |project, cx| {
6912 let first = worktree_a.read(cx);
6913 let third = worktree_c.read(cx);
6914 project.move_worktree(first.id(), third.id(), cx)
6915 })
6916 .expect("moving first after third");
6917
6918 // check the state after moving
6919 project.update(cx, |project, cx| {
6920 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6921 assert_eq!(worktrees.len(), 3);
6922
6923 let first = worktrees[0].read(cx);
6924 let second = worktrees[1].read(cx);
6925 let third = worktrees[2].read(cx);
6926
6927 // check they are now in the right order
6928 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6929 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6930 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6931 });
6932
6933 // move the third worktree to before the first
6934 // [b, c, a] -> [a, b, c]
6935 project
6936 .update(cx, |project, cx| {
6937 let third = worktree_a.read(cx);
6938 let first = worktree_b.read(cx);
6939 project.move_worktree(third.id(), first.id(), cx)
6940 })
6941 .expect("moving third before first");
6942
6943 // check the state after moving
6944 project.update(cx, |project, cx| {
6945 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6946 assert_eq!(worktrees.len(), 3);
6947
6948 let first = worktrees[0].read(cx);
6949 let second = worktrees[1].read(cx);
6950 let third = worktrees[2].read(cx);
6951
6952 // check they are now in the right order
6953 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6954 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6955 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6956 });
6957}
6958
6959#[gpui::test]
6960async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6961 init_test(cx);
6962
6963 let staged_contents = r#"
6964 fn main() {
6965 println!("hello world");
6966 }
6967 "#
6968 .unindent();
6969 let file_contents = r#"
6970 // print goodbye
6971 fn main() {
6972 println!("goodbye world");
6973 }
6974 "#
6975 .unindent();
6976
6977 let fs = FakeFs::new(cx.background_executor.clone());
6978 fs.insert_tree(
6979 "/dir",
6980 json!({
6981 ".git": {},
6982 "src": {
6983 "main.rs": file_contents,
6984 }
6985 }),
6986 )
6987 .await;
6988
6989 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
6990
6991 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6992
6993 let buffer = project
6994 .update(cx, |project, cx| {
6995 project.open_local_buffer("/dir/src/main.rs", cx)
6996 })
6997 .await
6998 .unwrap();
6999 let unstaged_diff = project
7000 .update(cx, |project, cx| {
7001 project.open_unstaged_diff(buffer.clone(), cx)
7002 })
7003 .await
7004 .unwrap();
7005
7006 cx.run_until_parked();
7007 unstaged_diff.update(cx, |unstaged_diff, cx| {
7008 let snapshot = buffer.read(cx).snapshot();
7009 assert_hunks(
7010 unstaged_diff.hunks(&snapshot, cx),
7011 &snapshot,
7012 &unstaged_diff.base_text_string().unwrap(),
7013 &[
7014 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7015 (
7016 2..3,
7017 " println!(\"hello world\");\n",
7018 " println!(\"goodbye world\");\n",
7019 DiffHunkStatus::modified_none(),
7020 ),
7021 ],
7022 );
7023 });
7024
7025 let staged_contents = r#"
7026 // print goodbye
7027 fn main() {
7028 }
7029 "#
7030 .unindent();
7031
7032 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7033
7034 cx.run_until_parked();
7035 unstaged_diff.update(cx, |unstaged_diff, cx| {
7036 let snapshot = buffer.read(cx).snapshot();
7037 assert_hunks(
7038 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7039 &snapshot,
7040 &unstaged_diff.base_text().text(),
7041 &[(
7042 2..3,
7043 "",
7044 " println!(\"goodbye world\");\n",
7045 DiffHunkStatus::added_none(),
7046 )],
7047 );
7048 });
7049}
7050
7051#[gpui::test]
7052async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7053 init_test(cx);
7054
7055 let committed_contents = r#"
7056 fn main() {
7057 println!("hello world");
7058 }
7059 "#
7060 .unindent();
7061 let staged_contents = r#"
7062 fn main() {
7063 println!("goodbye world");
7064 }
7065 "#
7066 .unindent();
7067 let file_contents = r#"
7068 // print goodbye
7069 fn main() {
7070 println!("goodbye world");
7071 }
7072 "#
7073 .unindent();
7074
7075 let fs = FakeFs::new(cx.background_executor.clone());
7076 fs.insert_tree(
7077 "/dir",
7078 json!({
7079 ".git": {},
7080 "src": {
7081 "modification.rs": file_contents,
7082 }
7083 }),
7084 )
7085 .await;
7086
7087 fs.set_head_for_repo(
7088 Path::new("/dir/.git"),
7089 &[
7090 ("src/modification.rs", committed_contents),
7091 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7092 ],
7093 "deadbeef",
7094 );
7095 fs.set_index_for_repo(
7096 Path::new("/dir/.git"),
7097 &[
7098 ("src/modification.rs", staged_contents),
7099 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7100 ],
7101 );
7102
7103 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7104 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7105 let language = rust_lang();
7106 language_registry.add(language.clone());
7107
7108 let buffer_1 = project
7109 .update(cx, |project, cx| {
7110 project.open_local_buffer("/dir/src/modification.rs", cx)
7111 })
7112 .await
7113 .unwrap();
7114 let diff_1 = project
7115 .update(cx, |project, cx| {
7116 project.open_uncommitted_diff(buffer_1.clone(), cx)
7117 })
7118 .await
7119 .unwrap();
7120 diff_1.read_with(cx, |diff, _| {
7121 assert_eq!(diff.base_text().language().cloned(), Some(language))
7122 });
7123 cx.run_until_parked();
7124 diff_1.update(cx, |diff, cx| {
7125 let snapshot = buffer_1.read(cx).snapshot();
7126 assert_hunks(
7127 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7128 &snapshot,
7129 &diff.base_text_string().unwrap(),
7130 &[
7131 (
7132 0..1,
7133 "",
7134 "// print goodbye\n",
7135 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7136 ),
7137 (
7138 2..3,
7139 " println!(\"hello world\");\n",
7140 " println!(\"goodbye world\");\n",
7141 DiffHunkStatus::modified_none(),
7142 ),
7143 ],
7144 );
7145 });
7146
7147 // Reset HEAD to a version that differs from both the buffer and the index.
7148 let committed_contents = r#"
7149 // print goodbye
7150 fn main() {
7151 }
7152 "#
7153 .unindent();
7154 fs.set_head_for_repo(
7155 Path::new("/dir/.git"),
7156 &[
7157 ("src/modification.rs", committed_contents.clone()),
7158 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7159 ],
7160 "deadbeef",
7161 );
7162
7163 // Buffer now has an unstaged hunk.
7164 cx.run_until_parked();
7165 diff_1.update(cx, |diff, cx| {
7166 let snapshot = buffer_1.read(cx).snapshot();
7167 assert_hunks(
7168 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7169 &snapshot,
7170 &diff.base_text().text(),
7171 &[(
7172 2..3,
7173 "",
7174 " println!(\"goodbye world\");\n",
7175 DiffHunkStatus::added_none(),
7176 )],
7177 );
7178 });
7179
7180 // Open a buffer for a file that's been deleted.
7181 let buffer_2 = project
7182 .update(cx, |project, cx| {
7183 project.open_local_buffer("/dir/src/deletion.rs", cx)
7184 })
7185 .await
7186 .unwrap();
7187 let diff_2 = project
7188 .update(cx, |project, cx| {
7189 project.open_uncommitted_diff(buffer_2.clone(), cx)
7190 })
7191 .await
7192 .unwrap();
7193 cx.run_until_parked();
7194 diff_2.update(cx, |diff, cx| {
7195 let snapshot = buffer_2.read(cx).snapshot();
7196 assert_hunks(
7197 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7198 &snapshot,
7199 &diff.base_text_string().unwrap(),
7200 &[(
7201 0..0,
7202 "// the-deleted-contents\n",
7203 "",
7204 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7205 )],
7206 );
7207 });
7208
7209 // Stage the deletion of this file
7210 fs.set_index_for_repo(
7211 Path::new("/dir/.git"),
7212 &[("src/modification.rs", committed_contents.clone())],
7213 );
7214 cx.run_until_parked();
7215 diff_2.update(cx, |diff, cx| {
7216 let snapshot = buffer_2.read(cx).snapshot();
7217 assert_hunks(
7218 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7219 &snapshot,
7220 &diff.base_text_string().unwrap(),
7221 &[(
7222 0..0,
7223 "// the-deleted-contents\n",
7224 "",
7225 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7226 )],
7227 );
7228 });
7229}
7230
7231#[gpui::test]
7232async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7233 use DiffHunkSecondaryStatus::*;
7234 init_test(cx);
7235
7236 let committed_contents = r#"
7237 zero
7238 one
7239 two
7240 three
7241 four
7242 five
7243 "#
7244 .unindent();
7245 let file_contents = r#"
7246 one
7247 TWO
7248 three
7249 FOUR
7250 five
7251 "#
7252 .unindent();
7253
7254 let fs = FakeFs::new(cx.background_executor.clone());
7255 fs.insert_tree(
7256 "/dir",
7257 json!({
7258 ".git": {},
7259 "file.txt": file_contents.clone()
7260 }),
7261 )
7262 .await;
7263
7264 fs.set_head_and_index_for_repo(
7265 path!("/dir/.git").as_ref(),
7266 &[("file.txt", committed_contents.clone())],
7267 );
7268
7269 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7270
7271 let buffer = project
7272 .update(cx, |project, cx| {
7273 project.open_local_buffer("/dir/file.txt", cx)
7274 })
7275 .await
7276 .unwrap();
7277 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7278 let uncommitted_diff = project
7279 .update(cx, |project, cx| {
7280 project.open_uncommitted_diff(buffer.clone(), cx)
7281 })
7282 .await
7283 .unwrap();
7284 let mut diff_events = cx.events(&uncommitted_diff);
7285
7286 // The hunks are initially unstaged.
7287 uncommitted_diff.read_with(cx, |diff, cx| {
7288 assert_hunks(
7289 diff.hunks(&snapshot, cx),
7290 &snapshot,
7291 &diff.base_text_string().unwrap(),
7292 &[
7293 (
7294 0..0,
7295 "zero\n",
7296 "",
7297 DiffHunkStatus::deleted(HasSecondaryHunk),
7298 ),
7299 (
7300 1..2,
7301 "two\n",
7302 "TWO\n",
7303 DiffHunkStatus::modified(HasSecondaryHunk),
7304 ),
7305 (
7306 3..4,
7307 "four\n",
7308 "FOUR\n",
7309 DiffHunkStatus::modified(HasSecondaryHunk),
7310 ),
7311 ],
7312 );
7313 });
7314
7315 // Stage a hunk. It appears as optimistically staged.
7316 uncommitted_diff.update(cx, |diff, cx| {
7317 let range =
7318 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7319 let hunks = diff
7320 .hunks_intersecting_range(range, &snapshot, cx)
7321 .collect::<Vec<_>>();
7322 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7323
7324 assert_hunks(
7325 diff.hunks(&snapshot, cx),
7326 &snapshot,
7327 &diff.base_text_string().unwrap(),
7328 &[
7329 (
7330 0..0,
7331 "zero\n",
7332 "",
7333 DiffHunkStatus::deleted(HasSecondaryHunk),
7334 ),
7335 (
7336 1..2,
7337 "two\n",
7338 "TWO\n",
7339 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7340 ),
7341 (
7342 3..4,
7343 "four\n",
7344 "FOUR\n",
7345 DiffHunkStatus::modified(HasSecondaryHunk),
7346 ),
7347 ],
7348 );
7349 });
7350
7351 // The diff emits a change event for the range of the staged hunk.
7352 assert!(matches!(
7353 diff_events.next().await.unwrap(),
7354 BufferDiffEvent::HunksStagedOrUnstaged(_)
7355 ));
7356 let event = diff_events.next().await.unwrap();
7357 if let BufferDiffEvent::DiffChanged {
7358 changed_range: Some(changed_range),
7359 } = event
7360 {
7361 let changed_range = changed_range.to_point(&snapshot);
7362 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7363 } else {
7364 panic!("Unexpected event {event:?}");
7365 }
7366
7367 // When the write to the index completes, it appears as staged.
7368 cx.run_until_parked();
7369 uncommitted_diff.update(cx, |diff, cx| {
7370 assert_hunks(
7371 diff.hunks(&snapshot, cx),
7372 &snapshot,
7373 &diff.base_text_string().unwrap(),
7374 &[
7375 (
7376 0..0,
7377 "zero\n",
7378 "",
7379 DiffHunkStatus::deleted(HasSecondaryHunk),
7380 ),
7381 (
7382 1..2,
7383 "two\n",
7384 "TWO\n",
7385 DiffHunkStatus::modified(NoSecondaryHunk),
7386 ),
7387 (
7388 3..4,
7389 "four\n",
7390 "FOUR\n",
7391 DiffHunkStatus::modified(HasSecondaryHunk),
7392 ),
7393 ],
7394 );
7395 });
7396
7397 // The diff emits a change event for the changed index text.
7398 let event = diff_events.next().await.unwrap();
7399 if let BufferDiffEvent::DiffChanged {
7400 changed_range: Some(changed_range),
7401 } = event
7402 {
7403 let changed_range = changed_range.to_point(&snapshot);
7404 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7405 } else {
7406 panic!("Unexpected event {event:?}");
7407 }
7408
7409 // Simulate a problem writing to the git index.
7410 fs.set_error_message_for_index_write(
7411 "/dir/.git".as_ref(),
7412 Some("failed to write git index".into()),
7413 );
7414
7415 // Stage another hunk.
7416 uncommitted_diff.update(cx, |diff, cx| {
7417 let range =
7418 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7419 let hunks = diff
7420 .hunks_intersecting_range(range, &snapshot, cx)
7421 .collect::<Vec<_>>();
7422 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7423
7424 assert_hunks(
7425 diff.hunks(&snapshot, cx),
7426 &snapshot,
7427 &diff.base_text_string().unwrap(),
7428 &[
7429 (
7430 0..0,
7431 "zero\n",
7432 "",
7433 DiffHunkStatus::deleted(HasSecondaryHunk),
7434 ),
7435 (
7436 1..2,
7437 "two\n",
7438 "TWO\n",
7439 DiffHunkStatus::modified(NoSecondaryHunk),
7440 ),
7441 (
7442 3..4,
7443 "four\n",
7444 "FOUR\n",
7445 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7446 ),
7447 ],
7448 );
7449 });
7450 assert!(matches!(
7451 diff_events.next().await.unwrap(),
7452 BufferDiffEvent::HunksStagedOrUnstaged(_)
7453 ));
7454 let event = diff_events.next().await.unwrap();
7455 if let BufferDiffEvent::DiffChanged {
7456 changed_range: Some(changed_range),
7457 } = event
7458 {
7459 let changed_range = changed_range.to_point(&snapshot);
7460 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7461 } else {
7462 panic!("Unexpected event {event:?}");
7463 }
7464
7465 // When the write fails, the hunk returns to being unstaged.
7466 cx.run_until_parked();
7467 uncommitted_diff.update(cx, |diff, cx| {
7468 assert_hunks(
7469 diff.hunks(&snapshot, cx),
7470 &snapshot,
7471 &diff.base_text_string().unwrap(),
7472 &[
7473 (
7474 0..0,
7475 "zero\n",
7476 "",
7477 DiffHunkStatus::deleted(HasSecondaryHunk),
7478 ),
7479 (
7480 1..2,
7481 "two\n",
7482 "TWO\n",
7483 DiffHunkStatus::modified(NoSecondaryHunk),
7484 ),
7485 (
7486 3..4,
7487 "four\n",
7488 "FOUR\n",
7489 DiffHunkStatus::modified(HasSecondaryHunk),
7490 ),
7491 ],
7492 );
7493 });
7494
7495 let event = diff_events.next().await.unwrap();
7496 if let BufferDiffEvent::DiffChanged {
7497 changed_range: Some(changed_range),
7498 } = event
7499 {
7500 let changed_range = changed_range.to_point(&snapshot);
7501 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7502 } else {
7503 panic!("Unexpected event {event:?}");
7504 }
7505
7506 // Allow writing to the git index to succeed again.
7507 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7508
7509 // Stage two hunks with separate operations.
7510 uncommitted_diff.update(cx, |diff, cx| {
7511 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7512 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7513 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7514 });
7515
7516 // Both staged hunks appear as pending.
7517 uncommitted_diff.update(cx, |diff, cx| {
7518 assert_hunks(
7519 diff.hunks(&snapshot, cx),
7520 &snapshot,
7521 &diff.base_text_string().unwrap(),
7522 &[
7523 (
7524 0..0,
7525 "zero\n",
7526 "",
7527 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7528 ),
7529 (
7530 1..2,
7531 "two\n",
7532 "TWO\n",
7533 DiffHunkStatus::modified(NoSecondaryHunk),
7534 ),
7535 (
7536 3..4,
7537 "four\n",
7538 "FOUR\n",
7539 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7540 ),
7541 ],
7542 );
7543 });
7544
7545 // Both staging operations take effect.
7546 cx.run_until_parked();
7547 uncommitted_diff.update(cx, |diff, cx| {
7548 assert_hunks(
7549 diff.hunks(&snapshot, cx),
7550 &snapshot,
7551 &diff.base_text_string().unwrap(),
7552 &[
7553 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7554 (
7555 1..2,
7556 "two\n",
7557 "TWO\n",
7558 DiffHunkStatus::modified(NoSecondaryHunk),
7559 ),
7560 (
7561 3..4,
7562 "four\n",
7563 "FOUR\n",
7564 DiffHunkStatus::modified(NoSecondaryHunk),
7565 ),
7566 ],
7567 );
7568 });
7569}
7570
7571#[gpui::test(seeds(340, 472))]
7572async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7573 use DiffHunkSecondaryStatus::*;
7574 init_test(cx);
7575
7576 let committed_contents = r#"
7577 zero
7578 one
7579 two
7580 three
7581 four
7582 five
7583 "#
7584 .unindent();
7585 let file_contents = r#"
7586 one
7587 TWO
7588 three
7589 FOUR
7590 five
7591 "#
7592 .unindent();
7593
7594 let fs = FakeFs::new(cx.background_executor.clone());
7595 fs.insert_tree(
7596 "/dir",
7597 json!({
7598 ".git": {},
7599 "file.txt": file_contents.clone()
7600 }),
7601 )
7602 .await;
7603
7604 fs.set_head_for_repo(
7605 "/dir/.git".as_ref(),
7606 &[("file.txt", committed_contents.clone())],
7607 "deadbeef",
7608 );
7609 fs.set_index_for_repo(
7610 "/dir/.git".as_ref(),
7611 &[("file.txt", committed_contents.clone())],
7612 );
7613
7614 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7615
7616 let buffer = project
7617 .update(cx, |project, cx| {
7618 project.open_local_buffer("/dir/file.txt", cx)
7619 })
7620 .await
7621 .unwrap();
7622 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7623 let uncommitted_diff = project
7624 .update(cx, |project, cx| {
7625 project.open_uncommitted_diff(buffer.clone(), cx)
7626 })
7627 .await
7628 .unwrap();
7629
7630 // The hunks are initially unstaged.
7631 uncommitted_diff.read_with(cx, |diff, cx| {
7632 assert_hunks(
7633 diff.hunks(&snapshot, cx),
7634 &snapshot,
7635 &diff.base_text_string().unwrap(),
7636 &[
7637 (
7638 0..0,
7639 "zero\n",
7640 "",
7641 DiffHunkStatus::deleted(HasSecondaryHunk),
7642 ),
7643 (
7644 1..2,
7645 "two\n",
7646 "TWO\n",
7647 DiffHunkStatus::modified(HasSecondaryHunk),
7648 ),
7649 (
7650 3..4,
7651 "four\n",
7652 "FOUR\n",
7653 DiffHunkStatus::modified(HasSecondaryHunk),
7654 ),
7655 ],
7656 );
7657 });
7658
7659 // Pause IO events
7660 fs.pause_events();
7661
7662 // Stage the first hunk.
7663 uncommitted_diff.update(cx, |diff, cx| {
7664 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7665 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7666 assert_hunks(
7667 diff.hunks(&snapshot, cx),
7668 &snapshot,
7669 &diff.base_text_string().unwrap(),
7670 &[
7671 (
7672 0..0,
7673 "zero\n",
7674 "",
7675 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7676 ),
7677 (
7678 1..2,
7679 "two\n",
7680 "TWO\n",
7681 DiffHunkStatus::modified(HasSecondaryHunk),
7682 ),
7683 (
7684 3..4,
7685 "four\n",
7686 "FOUR\n",
7687 DiffHunkStatus::modified(HasSecondaryHunk),
7688 ),
7689 ],
7690 );
7691 });
7692
7693 // Stage the second hunk *before* receiving the FS event for the first hunk.
7694 cx.run_until_parked();
7695 uncommitted_diff.update(cx, |diff, cx| {
7696 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7697 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7698 assert_hunks(
7699 diff.hunks(&snapshot, cx),
7700 &snapshot,
7701 &diff.base_text_string().unwrap(),
7702 &[
7703 (
7704 0..0,
7705 "zero\n",
7706 "",
7707 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7708 ),
7709 (
7710 1..2,
7711 "two\n",
7712 "TWO\n",
7713 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7714 ),
7715 (
7716 3..4,
7717 "four\n",
7718 "FOUR\n",
7719 DiffHunkStatus::modified(HasSecondaryHunk),
7720 ),
7721 ],
7722 );
7723 });
7724
7725 // Process the FS event for staging the first hunk (second event is still pending).
7726 fs.flush_events(1);
7727 cx.run_until_parked();
7728
7729 // Stage the third hunk before receiving the second FS event.
7730 uncommitted_diff.update(cx, |diff, cx| {
7731 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7732 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7733 });
7734
7735 // Wait for all remaining IO.
7736 cx.run_until_parked();
7737 fs.flush_events(fs.buffered_event_count());
7738
7739 // Now all hunks are staged.
7740 cx.run_until_parked();
7741 uncommitted_diff.update(cx, |diff, cx| {
7742 assert_hunks(
7743 diff.hunks(&snapshot, cx),
7744 &snapshot,
7745 &diff.base_text_string().unwrap(),
7746 &[
7747 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7748 (
7749 1..2,
7750 "two\n",
7751 "TWO\n",
7752 DiffHunkStatus::modified(NoSecondaryHunk),
7753 ),
7754 (
7755 3..4,
7756 "four\n",
7757 "FOUR\n",
7758 DiffHunkStatus::modified(NoSecondaryHunk),
7759 ),
7760 ],
7761 );
7762 });
7763}
7764
7765#[gpui::test(iterations = 25)]
7766async fn test_staging_random_hunks(
7767 mut rng: StdRng,
7768 executor: BackgroundExecutor,
7769 cx: &mut gpui::TestAppContext,
7770) {
7771 let operations = env::var("OPERATIONS")
7772 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7773 .unwrap_or(20);
7774
7775 // Try to induce races between diff recalculation and index writes.
7776 if rng.random_bool(0.5) {
7777 executor.deprioritize(*CALCULATE_DIFF_TASK);
7778 }
7779
7780 use DiffHunkSecondaryStatus::*;
7781 init_test(cx);
7782
7783 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7784 let index_text = committed_text.clone();
7785 let buffer_text = (0..30)
7786 .map(|i| match i % 5 {
7787 0 => format!("line {i} (modified)\n"),
7788 _ => format!("line {i}\n"),
7789 })
7790 .collect::<String>();
7791
7792 let fs = FakeFs::new(cx.background_executor.clone());
7793 fs.insert_tree(
7794 path!("/dir"),
7795 json!({
7796 ".git": {},
7797 "file.txt": buffer_text.clone()
7798 }),
7799 )
7800 .await;
7801 fs.set_head_for_repo(
7802 path!("/dir/.git").as_ref(),
7803 &[("file.txt", committed_text.clone())],
7804 "deadbeef",
7805 );
7806 fs.set_index_for_repo(
7807 path!("/dir/.git").as_ref(),
7808 &[("file.txt", index_text.clone())],
7809 );
7810 let repo = fs
7811 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7812 .unwrap();
7813
7814 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7815 let buffer = project
7816 .update(cx, |project, cx| {
7817 project.open_local_buffer(path!("/dir/file.txt"), cx)
7818 })
7819 .await
7820 .unwrap();
7821 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7822 let uncommitted_diff = project
7823 .update(cx, |project, cx| {
7824 project.open_uncommitted_diff(buffer.clone(), cx)
7825 })
7826 .await
7827 .unwrap();
7828
7829 let mut hunks =
7830 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7831 assert_eq!(hunks.len(), 6);
7832
7833 for _i in 0..operations {
7834 let hunk_ix = rng.random_range(0..hunks.len());
7835 let hunk = &mut hunks[hunk_ix];
7836 let row = hunk.range.start.row;
7837
7838 if hunk.status().has_secondary_hunk() {
7839 log::info!("staging hunk at {row}");
7840 uncommitted_diff.update(cx, |diff, cx| {
7841 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7842 });
7843 hunk.secondary_status = SecondaryHunkRemovalPending;
7844 } else {
7845 log::info!("unstaging hunk at {row}");
7846 uncommitted_diff.update(cx, |diff, cx| {
7847 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7848 });
7849 hunk.secondary_status = SecondaryHunkAdditionPending;
7850 }
7851
7852 for _ in 0..rng.random_range(0..10) {
7853 log::info!("yielding");
7854 cx.executor().simulate_random_delay().await;
7855 }
7856 }
7857
7858 cx.executor().run_until_parked();
7859
7860 for hunk in &mut hunks {
7861 if hunk.secondary_status == SecondaryHunkRemovalPending {
7862 hunk.secondary_status = NoSecondaryHunk;
7863 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7864 hunk.secondary_status = HasSecondaryHunk;
7865 }
7866 }
7867
7868 log::info!(
7869 "index text:\n{}",
7870 repo.load_index_text(rel_path("file.txt").into())
7871 .await
7872 .unwrap()
7873 );
7874
7875 uncommitted_diff.update(cx, |diff, cx| {
7876 let expected_hunks = hunks
7877 .iter()
7878 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7879 .collect::<Vec<_>>();
7880 let actual_hunks = diff
7881 .hunks(&snapshot, cx)
7882 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7883 .collect::<Vec<_>>();
7884 assert_eq!(actual_hunks, expected_hunks);
7885 });
7886}
7887
7888#[gpui::test]
7889async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7890 init_test(cx);
7891
7892 let committed_contents = r#"
7893 fn main() {
7894 println!("hello from HEAD");
7895 }
7896 "#
7897 .unindent();
7898 let file_contents = r#"
7899 fn main() {
7900 println!("hello from the working copy");
7901 }
7902 "#
7903 .unindent();
7904
7905 let fs = FakeFs::new(cx.background_executor.clone());
7906 fs.insert_tree(
7907 "/dir",
7908 json!({
7909 ".git": {},
7910 "src": {
7911 "main.rs": file_contents,
7912 }
7913 }),
7914 )
7915 .await;
7916
7917 fs.set_head_for_repo(
7918 Path::new("/dir/.git"),
7919 &[("src/main.rs", committed_contents.clone())],
7920 "deadbeef",
7921 );
7922 fs.set_index_for_repo(
7923 Path::new("/dir/.git"),
7924 &[("src/main.rs", committed_contents.clone())],
7925 );
7926
7927 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7928
7929 let buffer = project
7930 .update(cx, |project, cx| {
7931 project.open_local_buffer("/dir/src/main.rs", cx)
7932 })
7933 .await
7934 .unwrap();
7935 let uncommitted_diff = project
7936 .update(cx, |project, cx| {
7937 project.open_uncommitted_diff(buffer.clone(), cx)
7938 })
7939 .await
7940 .unwrap();
7941
7942 cx.run_until_parked();
7943 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7944 let snapshot = buffer.read(cx).snapshot();
7945 assert_hunks(
7946 uncommitted_diff.hunks(&snapshot, cx),
7947 &snapshot,
7948 &uncommitted_diff.base_text_string().unwrap(),
7949 &[(
7950 1..2,
7951 " println!(\"hello from HEAD\");\n",
7952 " println!(\"hello from the working copy\");\n",
7953 DiffHunkStatus {
7954 kind: DiffHunkStatusKind::Modified,
7955 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7956 },
7957 )],
7958 );
7959 });
7960}
7961
7962#[gpui::test]
7963async fn test_repository_and_path_for_project_path(
7964 background_executor: BackgroundExecutor,
7965 cx: &mut gpui::TestAppContext,
7966) {
7967 init_test(cx);
7968 let fs = FakeFs::new(background_executor);
7969 fs.insert_tree(
7970 path!("/root"),
7971 json!({
7972 "c.txt": "",
7973 "dir1": {
7974 ".git": {},
7975 "deps": {
7976 "dep1": {
7977 ".git": {},
7978 "src": {
7979 "a.txt": ""
7980 }
7981 }
7982 },
7983 "src": {
7984 "b.txt": ""
7985 }
7986 },
7987 }),
7988 )
7989 .await;
7990
7991 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7992 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7993 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7994 project
7995 .update(cx, |project, cx| project.git_scans_complete(cx))
7996 .await;
7997 cx.run_until_parked();
7998
7999 project.read_with(cx, |project, cx| {
8000 let git_store = project.git_store().read(cx);
8001 let pairs = [
8002 ("c.txt", None),
8003 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8004 (
8005 "dir1/deps/dep1/src/a.txt",
8006 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8007 ),
8008 ];
8009 let expected = pairs
8010 .iter()
8011 .map(|(path, result)| {
8012 (
8013 path,
8014 result.map(|(repo, repo_path)| {
8015 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8016 }),
8017 )
8018 })
8019 .collect::<Vec<_>>();
8020 let actual = pairs
8021 .iter()
8022 .map(|(path, _)| {
8023 let project_path = (tree_id, rel_path(path)).into();
8024 let result = maybe!({
8025 let (repo, repo_path) =
8026 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8027 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8028 });
8029 (path, result)
8030 })
8031 .collect::<Vec<_>>();
8032 pretty_assertions::assert_eq!(expected, actual);
8033 });
8034
8035 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8036 .await
8037 .unwrap();
8038 cx.run_until_parked();
8039
8040 project.read_with(cx, |project, cx| {
8041 let git_store = project.git_store().read(cx);
8042 assert_eq!(
8043 git_store.repository_and_path_for_project_path(
8044 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8045 cx
8046 ),
8047 None
8048 );
8049 });
8050}
8051
8052#[gpui::test]
8053async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8054 init_test(cx);
8055 let fs = FakeFs::new(cx.background_executor.clone());
8056 let home = paths::home_dir();
8057 fs.insert_tree(
8058 home,
8059 json!({
8060 ".git": {},
8061 "project": {
8062 "a.txt": "A"
8063 },
8064 }),
8065 )
8066 .await;
8067
8068 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8069 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8070 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8071
8072 project
8073 .update(cx, |project, cx| project.git_scans_complete(cx))
8074 .await;
8075 tree.flush_fs_events(cx).await;
8076
8077 project.read_with(cx, |project, cx| {
8078 let containing = project
8079 .git_store()
8080 .read(cx)
8081 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8082 assert!(containing.is_none());
8083 });
8084
8085 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8086 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8087 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8088 project
8089 .update(cx, |project, cx| project.git_scans_complete(cx))
8090 .await;
8091 tree.flush_fs_events(cx).await;
8092
8093 project.read_with(cx, |project, cx| {
8094 let containing = project
8095 .git_store()
8096 .read(cx)
8097 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8098 assert_eq!(
8099 containing
8100 .unwrap()
8101 .0
8102 .read(cx)
8103 .work_directory_abs_path
8104 .as_ref(),
8105 home,
8106 );
8107 });
8108}
8109
8110#[gpui::test]
8111async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8112 init_test(cx);
8113 cx.executor().allow_parking();
8114
8115 let root = TempTree::new(json!({
8116 "project": {
8117 "a.txt": "a", // Modified
8118 "b.txt": "bb", // Added
8119 "c.txt": "ccc", // Unchanged
8120 "d.txt": "dddd", // Deleted
8121 },
8122 }));
8123
8124 // Set up git repository before creating the project.
8125 let work_dir = root.path().join("project");
8126 let repo = git_init(work_dir.as_path());
8127 git_add("a.txt", &repo);
8128 git_add("c.txt", &repo);
8129 git_add("d.txt", &repo);
8130 git_commit("Initial commit", &repo);
8131 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8132 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8133
8134 let project = Project::test(
8135 Arc::new(RealFs::new(None, cx.executor())),
8136 [root.path()],
8137 cx,
8138 )
8139 .await;
8140
8141 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8142 tree.flush_fs_events(cx).await;
8143 project
8144 .update(cx, |project, cx| project.git_scans_complete(cx))
8145 .await;
8146 cx.executor().run_until_parked();
8147
8148 let repository = project.read_with(cx, |project, cx| {
8149 project.repositories(cx).values().next().unwrap().clone()
8150 });
8151
8152 // Check that the right git state is observed on startup
8153 repository.read_with(cx, |repository, _| {
8154 let entries = repository.cached_status().collect::<Vec<_>>();
8155 assert_eq!(
8156 entries,
8157 [
8158 StatusEntry {
8159 repo_path: repo_path("a.txt"),
8160 status: StatusCode::Modified.worktree(),
8161 },
8162 StatusEntry {
8163 repo_path: repo_path("b.txt"),
8164 status: FileStatus::Untracked,
8165 },
8166 StatusEntry {
8167 repo_path: repo_path("d.txt"),
8168 status: StatusCode::Deleted.worktree(),
8169 },
8170 ]
8171 );
8172 });
8173
8174 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8175
8176 tree.flush_fs_events(cx).await;
8177 project
8178 .update(cx, |project, cx| project.git_scans_complete(cx))
8179 .await;
8180 cx.executor().run_until_parked();
8181
8182 repository.read_with(cx, |repository, _| {
8183 let entries = repository.cached_status().collect::<Vec<_>>();
8184 assert_eq!(
8185 entries,
8186 [
8187 StatusEntry {
8188 repo_path: repo_path("a.txt"),
8189 status: StatusCode::Modified.worktree(),
8190 },
8191 StatusEntry {
8192 repo_path: repo_path("b.txt"),
8193 status: FileStatus::Untracked,
8194 },
8195 StatusEntry {
8196 repo_path: repo_path("c.txt"),
8197 status: StatusCode::Modified.worktree(),
8198 },
8199 StatusEntry {
8200 repo_path: repo_path("d.txt"),
8201 status: StatusCode::Deleted.worktree(),
8202 },
8203 ]
8204 );
8205 });
8206
8207 git_add("a.txt", &repo);
8208 git_add("c.txt", &repo);
8209 git_remove_index(Path::new("d.txt"), &repo);
8210 git_commit("Another commit", &repo);
8211 tree.flush_fs_events(cx).await;
8212 project
8213 .update(cx, |project, cx| project.git_scans_complete(cx))
8214 .await;
8215 cx.executor().run_until_parked();
8216
8217 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8218 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8219 tree.flush_fs_events(cx).await;
8220 project
8221 .update(cx, |project, cx| project.git_scans_complete(cx))
8222 .await;
8223 cx.executor().run_until_parked();
8224
8225 repository.read_with(cx, |repository, _cx| {
8226 let entries = repository.cached_status().collect::<Vec<_>>();
8227
8228 // Deleting an untracked entry, b.txt, should leave no status
8229 // a.txt was tracked, and so should have a status
8230 assert_eq!(
8231 entries,
8232 [StatusEntry {
8233 repo_path: repo_path("a.txt"),
8234 status: StatusCode::Deleted.worktree(),
8235 }]
8236 );
8237 });
8238}
8239
8240#[gpui::test]
8241async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8242 init_test(cx);
8243 cx.executor().allow_parking();
8244
8245 let root = TempTree::new(json!({
8246 "project": {
8247 "sub": {},
8248 "a.txt": "",
8249 },
8250 }));
8251
8252 let work_dir = root.path().join("project");
8253 let repo = git_init(work_dir.as_path());
8254 // a.txt exists in HEAD and the working copy but is deleted in the index.
8255 git_add("a.txt", &repo);
8256 git_commit("Initial commit", &repo);
8257 git_remove_index("a.txt".as_ref(), &repo);
8258 // `sub` is a nested git repository.
8259 let _sub = git_init(&work_dir.join("sub"));
8260
8261 let project = Project::test(
8262 Arc::new(RealFs::new(None, cx.executor())),
8263 [root.path()],
8264 cx,
8265 )
8266 .await;
8267
8268 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8269 tree.flush_fs_events(cx).await;
8270 project
8271 .update(cx, |project, cx| project.git_scans_complete(cx))
8272 .await;
8273 cx.executor().run_until_parked();
8274
8275 let repository = project.read_with(cx, |project, cx| {
8276 project
8277 .repositories(cx)
8278 .values()
8279 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8280 .unwrap()
8281 .clone()
8282 });
8283
8284 repository.read_with(cx, |repository, _cx| {
8285 let entries = repository.cached_status().collect::<Vec<_>>();
8286
8287 // `sub` doesn't appear in our computed statuses.
8288 // a.txt appears with a combined `DA` status.
8289 assert_eq!(
8290 entries,
8291 [StatusEntry {
8292 repo_path: repo_path("a.txt"),
8293 status: TrackedStatus {
8294 index_status: StatusCode::Deleted,
8295 worktree_status: StatusCode::Added
8296 }
8297 .into(),
8298 }]
8299 )
8300 });
8301}
8302
8303#[gpui::test]
8304async fn test_repository_subfolder_git_status(
8305 executor: gpui::BackgroundExecutor,
8306 cx: &mut gpui::TestAppContext,
8307) {
8308 init_test(cx);
8309
8310 let fs = FakeFs::new(executor);
8311 fs.insert_tree(
8312 path!("/root"),
8313 json!({
8314 "my-repo": {
8315 ".git": {},
8316 "a.txt": "a",
8317 "sub-folder-1": {
8318 "sub-folder-2": {
8319 "c.txt": "cc",
8320 "d": {
8321 "e.txt": "eee"
8322 }
8323 },
8324 }
8325 },
8326 }),
8327 )
8328 .await;
8329
8330 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8331 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8332
8333 fs.set_status_for_repo(
8334 path!("/root/my-repo/.git").as_ref(),
8335 &[(E_TXT, FileStatus::Untracked)],
8336 );
8337
8338 let project = Project::test(
8339 fs.clone(),
8340 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8341 cx,
8342 )
8343 .await;
8344
8345 project
8346 .update(cx, |project, cx| project.git_scans_complete(cx))
8347 .await;
8348 cx.run_until_parked();
8349
8350 let repository = project.read_with(cx, |project, cx| {
8351 project.repositories(cx).values().next().unwrap().clone()
8352 });
8353
8354 // Ensure that the git status is loaded correctly
8355 repository.read_with(cx, |repository, _cx| {
8356 assert_eq!(
8357 repository.work_directory_abs_path,
8358 Path::new(path!("/root/my-repo")).into()
8359 );
8360
8361 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8362 assert_eq!(
8363 repository
8364 .status_for_path(&repo_path(E_TXT))
8365 .unwrap()
8366 .status,
8367 FileStatus::Untracked
8368 );
8369 });
8370
8371 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8372 project
8373 .update(cx, |project, cx| project.git_scans_complete(cx))
8374 .await;
8375 cx.run_until_parked();
8376
8377 repository.read_with(cx, |repository, _cx| {
8378 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8379 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8380 });
8381}
8382
8383// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8384#[cfg(any())]
8385#[gpui::test]
8386async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8387 init_test(cx);
8388 cx.executor().allow_parking();
8389
8390 let root = TempTree::new(json!({
8391 "project": {
8392 "a.txt": "a",
8393 },
8394 }));
8395 let root_path = root.path();
8396
8397 let repo = git_init(&root_path.join("project"));
8398 git_add("a.txt", &repo);
8399 git_commit("init", &repo);
8400
8401 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8402
8403 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8404 tree.flush_fs_events(cx).await;
8405 project
8406 .update(cx, |project, cx| project.git_scans_complete(cx))
8407 .await;
8408 cx.executor().run_until_parked();
8409
8410 let repository = project.read_with(cx, |project, cx| {
8411 project.repositories(cx).values().next().unwrap().clone()
8412 });
8413
8414 git_branch("other-branch", &repo);
8415 git_checkout("refs/heads/other-branch", &repo);
8416 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8417 git_add("a.txt", &repo);
8418 git_commit("capitalize", &repo);
8419 let commit = repo
8420 .head()
8421 .expect("Failed to get HEAD")
8422 .peel_to_commit()
8423 .expect("HEAD is not a commit");
8424 git_checkout("refs/heads/main", &repo);
8425 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8426 git_add("a.txt", &repo);
8427 git_commit("improve letter", &repo);
8428 git_cherry_pick(&commit, &repo);
8429 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8430 .expect("No CHERRY_PICK_HEAD");
8431 pretty_assertions::assert_eq!(
8432 git_status(&repo),
8433 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8434 );
8435 tree.flush_fs_events(cx).await;
8436 project
8437 .update(cx, |project, cx| project.git_scans_complete(cx))
8438 .await;
8439 cx.executor().run_until_parked();
8440 let conflicts = repository.update(cx, |repository, _| {
8441 repository
8442 .merge_conflicts
8443 .iter()
8444 .cloned()
8445 .collect::<Vec<_>>()
8446 });
8447 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8448
8449 git_add("a.txt", &repo);
8450 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8451 git_commit("whatevs", &repo);
8452 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8453 .expect("Failed to remove CHERRY_PICK_HEAD");
8454 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8455 tree.flush_fs_events(cx).await;
8456 let conflicts = repository.update(cx, |repository, _| {
8457 repository
8458 .merge_conflicts
8459 .iter()
8460 .cloned()
8461 .collect::<Vec<_>>()
8462 });
8463 pretty_assertions::assert_eq!(conflicts, []);
8464}
8465
8466#[gpui::test]
8467async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8468 init_test(cx);
8469 let fs = FakeFs::new(cx.background_executor.clone());
8470 fs.insert_tree(
8471 path!("/root"),
8472 json!({
8473 ".git": {},
8474 ".gitignore": "*.txt\n",
8475 "a.xml": "<a></a>",
8476 "b.txt": "Some text"
8477 }),
8478 )
8479 .await;
8480
8481 fs.set_head_and_index_for_repo(
8482 path!("/root/.git").as_ref(),
8483 &[
8484 (".gitignore", "*.txt\n".into()),
8485 ("a.xml", "<a></a>".into()),
8486 ],
8487 );
8488
8489 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8490
8491 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8492 tree.flush_fs_events(cx).await;
8493 project
8494 .update(cx, |project, cx| project.git_scans_complete(cx))
8495 .await;
8496 cx.executor().run_until_parked();
8497
8498 let repository = project.read_with(cx, |project, cx| {
8499 project.repositories(cx).values().next().unwrap().clone()
8500 });
8501
8502 // One file is unmodified, the other is ignored.
8503 cx.read(|cx| {
8504 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8505 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8506 });
8507
8508 // Change the gitignore, and stage the newly non-ignored file.
8509 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8510 .await
8511 .unwrap();
8512 fs.set_index_for_repo(
8513 Path::new(path!("/root/.git")),
8514 &[
8515 (".gitignore", "*.txt\n".into()),
8516 ("a.xml", "<a></a>".into()),
8517 ("b.txt", "Some text".into()),
8518 ],
8519 );
8520
8521 cx.executor().run_until_parked();
8522 cx.read(|cx| {
8523 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8524 assert_entry_git_state(
8525 tree.read(cx),
8526 repository.read(cx),
8527 "b.txt",
8528 Some(StatusCode::Added),
8529 false,
8530 );
8531 });
8532}
8533
8534// NOTE:
8535// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8536// a directory which some program has already open.
8537// This is a limitation of the Windows.
8538// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8539#[gpui::test]
8540#[cfg_attr(target_os = "windows", ignore)]
8541async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8542 init_test(cx);
8543 cx.executor().allow_parking();
8544 let root = TempTree::new(json!({
8545 "projects": {
8546 "project1": {
8547 "a": "",
8548 "b": "",
8549 }
8550 },
8551
8552 }));
8553 let root_path = root.path();
8554
8555 let repo = git_init(&root_path.join("projects/project1"));
8556 git_add("a", &repo);
8557 git_commit("init", &repo);
8558 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8559
8560 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8561
8562 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8563 tree.flush_fs_events(cx).await;
8564 project
8565 .update(cx, |project, cx| project.git_scans_complete(cx))
8566 .await;
8567 cx.executor().run_until_parked();
8568
8569 let repository = project.read_with(cx, |project, cx| {
8570 project.repositories(cx).values().next().unwrap().clone()
8571 });
8572
8573 repository.read_with(cx, |repository, _| {
8574 assert_eq!(
8575 repository.work_directory_abs_path.as_ref(),
8576 root_path.join("projects/project1").as_path()
8577 );
8578 assert_eq!(
8579 repository
8580 .status_for_path(&repo_path("a"))
8581 .map(|entry| entry.status),
8582 Some(StatusCode::Modified.worktree()),
8583 );
8584 assert_eq!(
8585 repository
8586 .status_for_path(&repo_path("b"))
8587 .map(|entry| entry.status),
8588 Some(FileStatus::Untracked),
8589 );
8590 });
8591
8592 std::fs::rename(
8593 root_path.join("projects/project1"),
8594 root_path.join("projects/project2"),
8595 )
8596 .unwrap();
8597 tree.flush_fs_events(cx).await;
8598
8599 repository.read_with(cx, |repository, _| {
8600 assert_eq!(
8601 repository.work_directory_abs_path.as_ref(),
8602 root_path.join("projects/project2").as_path()
8603 );
8604 assert_eq!(
8605 repository.status_for_path(&repo_path("a")).unwrap().status,
8606 StatusCode::Modified.worktree(),
8607 );
8608 assert_eq!(
8609 repository.status_for_path(&repo_path("b")).unwrap().status,
8610 FileStatus::Untracked,
8611 );
8612 });
8613}
8614
8615// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8616// you can't rename a directory which some program has already open. This is a
8617// limitation of the Windows. See:
8618// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8619#[gpui::test]
8620#[cfg_attr(target_os = "windows", ignore)]
8621async fn test_file_status(cx: &mut gpui::TestAppContext) {
8622 init_test(cx);
8623 cx.executor().allow_parking();
8624 const IGNORE_RULE: &str = "**/target";
8625
8626 let root = TempTree::new(json!({
8627 "project": {
8628 "a.txt": "a",
8629 "b.txt": "bb",
8630 "c": {
8631 "d": {
8632 "e.txt": "eee"
8633 }
8634 },
8635 "f.txt": "ffff",
8636 "target": {
8637 "build_file": "???"
8638 },
8639 ".gitignore": IGNORE_RULE
8640 },
8641
8642 }));
8643 let root_path = root.path();
8644
8645 const A_TXT: &str = "a.txt";
8646 const B_TXT: &str = "b.txt";
8647 const E_TXT: &str = "c/d/e.txt";
8648 const F_TXT: &str = "f.txt";
8649 const DOTGITIGNORE: &str = ".gitignore";
8650 const BUILD_FILE: &str = "target/build_file";
8651
8652 // Set up git repository before creating the worktree.
8653 let work_dir = root.path().join("project");
8654 let mut repo = git_init(work_dir.as_path());
8655 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8656 git_add(A_TXT, &repo);
8657 git_add(E_TXT, &repo);
8658 git_add(DOTGITIGNORE, &repo);
8659 git_commit("Initial commit", &repo);
8660
8661 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8662
8663 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8664 tree.flush_fs_events(cx).await;
8665 project
8666 .update(cx, |project, cx| project.git_scans_complete(cx))
8667 .await;
8668 cx.executor().run_until_parked();
8669
8670 let repository = project.read_with(cx, |project, cx| {
8671 project.repositories(cx).values().next().unwrap().clone()
8672 });
8673
8674 // Check that the right git state is observed on startup
8675 repository.read_with(cx, |repository, _cx| {
8676 assert_eq!(
8677 repository.work_directory_abs_path.as_ref(),
8678 root_path.join("project").as_path()
8679 );
8680
8681 assert_eq!(
8682 repository
8683 .status_for_path(&repo_path(B_TXT))
8684 .unwrap()
8685 .status,
8686 FileStatus::Untracked,
8687 );
8688 assert_eq!(
8689 repository
8690 .status_for_path(&repo_path(F_TXT))
8691 .unwrap()
8692 .status,
8693 FileStatus::Untracked,
8694 );
8695 });
8696
8697 // Modify a file in the working copy.
8698 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8699 tree.flush_fs_events(cx).await;
8700 project
8701 .update(cx, |project, cx| project.git_scans_complete(cx))
8702 .await;
8703 cx.executor().run_until_parked();
8704
8705 // The worktree detects that the file's git status has changed.
8706 repository.read_with(cx, |repository, _| {
8707 assert_eq!(
8708 repository
8709 .status_for_path(&repo_path(A_TXT))
8710 .unwrap()
8711 .status,
8712 StatusCode::Modified.worktree(),
8713 );
8714 });
8715
8716 // Create a commit in the git repository.
8717 git_add(A_TXT, &repo);
8718 git_add(B_TXT, &repo);
8719 git_commit("Committing modified and added", &repo);
8720 tree.flush_fs_events(cx).await;
8721 project
8722 .update(cx, |project, cx| project.git_scans_complete(cx))
8723 .await;
8724 cx.executor().run_until_parked();
8725
8726 // The worktree detects that the files' git status have changed.
8727 repository.read_with(cx, |repository, _cx| {
8728 assert_eq!(
8729 repository
8730 .status_for_path(&repo_path(F_TXT))
8731 .unwrap()
8732 .status,
8733 FileStatus::Untracked,
8734 );
8735 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8736 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8737 });
8738
8739 // Modify files in the working copy and perform git operations on other files.
8740 git_reset(0, &repo);
8741 git_remove_index(Path::new(B_TXT), &repo);
8742 git_stash(&mut repo);
8743 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8744 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8745 tree.flush_fs_events(cx).await;
8746 project
8747 .update(cx, |project, cx| project.git_scans_complete(cx))
8748 .await;
8749 cx.executor().run_until_parked();
8750
8751 // Check that more complex repo changes are tracked
8752 repository.read_with(cx, |repository, _cx| {
8753 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8754 assert_eq!(
8755 repository
8756 .status_for_path(&repo_path(B_TXT))
8757 .unwrap()
8758 .status,
8759 FileStatus::Untracked,
8760 );
8761 assert_eq!(
8762 repository
8763 .status_for_path(&repo_path(E_TXT))
8764 .unwrap()
8765 .status,
8766 StatusCode::Modified.worktree(),
8767 );
8768 });
8769
8770 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8771 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8772 std::fs::write(
8773 work_dir.join(DOTGITIGNORE),
8774 [IGNORE_RULE, "f.txt"].join("\n"),
8775 )
8776 .unwrap();
8777
8778 git_add(Path::new(DOTGITIGNORE), &repo);
8779 git_commit("Committing modified git ignore", &repo);
8780
8781 tree.flush_fs_events(cx).await;
8782 cx.executor().run_until_parked();
8783
8784 let mut renamed_dir_name = "first_directory/second_directory";
8785 const RENAMED_FILE: &str = "rf.txt";
8786
8787 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8788 std::fs::write(
8789 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8790 "new-contents",
8791 )
8792 .unwrap();
8793
8794 tree.flush_fs_events(cx).await;
8795 project
8796 .update(cx, |project, cx| project.git_scans_complete(cx))
8797 .await;
8798 cx.executor().run_until_parked();
8799
8800 repository.read_with(cx, |repository, _cx| {
8801 assert_eq!(
8802 repository
8803 .status_for_path(
8804 &rel_path(renamed_dir_name)
8805 .join(rel_path(RENAMED_FILE))
8806 .into()
8807 )
8808 .unwrap()
8809 .status,
8810 FileStatus::Untracked,
8811 );
8812 });
8813
8814 renamed_dir_name = "new_first_directory/second_directory";
8815
8816 std::fs::rename(
8817 work_dir.join("first_directory"),
8818 work_dir.join("new_first_directory"),
8819 )
8820 .unwrap();
8821
8822 tree.flush_fs_events(cx).await;
8823 project
8824 .update(cx, |project, cx| project.git_scans_complete(cx))
8825 .await;
8826 cx.executor().run_until_parked();
8827
8828 repository.read_with(cx, |repository, _cx| {
8829 assert_eq!(
8830 repository
8831 .status_for_path(
8832 &rel_path(renamed_dir_name)
8833 .join(rel_path(RENAMED_FILE))
8834 .into()
8835 )
8836 .unwrap()
8837 .status,
8838 FileStatus::Untracked,
8839 );
8840 });
8841}
8842
8843#[gpui::test]
8844#[cfg_attr(target_os = "windows", ignore)]
8845async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8846 init_test(cx);
8847 cx.executor().allow_parking();
8848
8849 const IGNORE_RULE: &str = "**/target";
8850
8851 let root = TempTree::new(json!({
8852 "project": {
8853 "src": {
8854 "main.rs": "fn main() {}"
8855 },
8856 "target": {
8857 "debug": {
8858 "important_text.txt": "important text",
8859 },
8860 },
8861 ".gitignore": IGNORE_RULE
8862 },
8863
8864 }));
8865 let root_path = root.path();
8866
8867 // Set up git repository before creating the worktree.
8868 let work_dir = root.path().join("project");
8869 let repo = git_init(work_dir.as_path());
8870 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8871 git_add("src/main.rs", &repo);
8872 git_add(".gitignore", &repo);
8873 git_commit("Initial commit", &repo);
8874
8875 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8876 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8877 let project_events = Arc::new(Mutex::new(Vec::new()));
8878 project.update(cx, |project, cx| {
8879 let repo_events = repository_updates.clone();
8880 cx.subscribe(project.git_store(), move |_, _, e, _| {
8881 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8882 repo_events.lock().push(e.clone());
8883 }
8884 })
8885 .detach();
8886 let project_events = project_events.clone();
8887 cx.subscribe_self(move |_, e, _| {
8888 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8889 project_events.lock().extend(
8890 updates
8891 .iter()
8892 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8893 .filter(|(path, _)| path != "fs-event-sentinel"),
8894 );
8895 }
8896 })
8897 .detach();
8898 });
8899
8900 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8901 tree.flush_fs_events(cx).await;
8902 tree.update(cx, |tree, cx| {
8903 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8904 })
8905 .await
8906 .unwrap();
8907 tree.update(cx, |tree, _| {
8908 assert_eq!(
8909 tree.entries(true, 0)
8910 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8911 .collect::<Vec<_>>(),
8912 vec![
8913 (rel_path(""), false),
8914 (rel_path("project/"), false),
8915 (rel_path("project/.gitignore"), false),
8916 (rel_path("project/src"), false),
8917 (rel_path("project/src/main.rs"), false),
8918 (rel_path("project/target"), true),
8919 (rel_path("project/target/debug"), true),
8920 (rel_path("project/target/debug/important_text.txt"), true),
8921 ]
8922 );
8923 });
8924
8925 assert_eq!(
8926 repository_updates.lock().drain(..).collect::<Vec<_>>(),
8927 vec![
8928 RepositoryEvent::Updated {
8929 full_scan: true,
8930 new_instance: false,
8931 },
8932 RepositoryEvent::MergeHeadsChanged,
8933 ],
8934 "Initial worktree scan should produce a repo update event"
8935 );
8936 assert_eq!(
8937 project_events.lock().drain(..).collect::<Vec<_>>(),
8938 vec![
8939 ("project/target".to_string(), PathChange::Loaded),
8940 ("project/target/debug".to_string(), PathChange::Loaded),
8941 (
8942 "project/target/debug/important_text.txt".to_string(),
8943 PathChange::Loaded
8944 ),
8945 ],
8946 "Initial project changes should show that all not-ignored and all opened files are loaded"
8947 );
8948
8949 let deps_dir = work_dir.join("target").join("debug").join("deps");
8950 std::fs::create_dir_all(&deps_dir).unwrap();
8951 tree.flush_fs_events(cx).await;
8952 project
8953 .update(cx, |project, cx| project.git_scans_complete(cx))
8954 .await;
8955 cx.executor().run_until_parked();
8956 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
8957 tree.flush_fs_events(cx).await;
8958 project
8959 .update(cx, |project, cx| project.git_scans_complete(cx))
8960 .await;
8961 cx.executor().run_until_parked();
8962 std::fs::remove_dir_all(&deps_dir).unwrap();
8963 tree.flush_fs_events(cx).await;
8964 project
8965 .update(cx, |project, cx| project.git_scans_complete(cx))
8966 .await;
8967 cx.executor().run_until_parked();
8968
8969 tree.update(cx, |tree, _| {
8970 assert_eq!(
8971 tree.entries(true, 0)
8972 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8973 .collect::<Vec<_>>(),
8974 vec![
8975 (rel_path(""), false),
8976 (rel_path("project/"), false),
8977 (rel_path("project/.gitignore"), false),
8978 (rel_path("project/src"), false),
8979 (rel_path("project/src/main.rs"), false),
8980 (rel_path("project/target"), true),
8981 (rel_path("project/target/debug"), true),
8982 (rel_path("project/target/debug/important_text.txt"), true),
8983 ],
8984 "No stray temp files should be left after the flycheck changes"
8985 );
8986 });
8987
8988 assert_eq!(
8989 repository_updates.lock().as_slice(),
8990 Vec::new(),
8991 "No further repo events should happen, as only ignored dirs' contents was changed",
8992 );
8993 assert_eq!(
8994 project_events.lock().as_slice(),
8995 vec![
8996 ("project/target/debug/deps".to_string(), PathChange::Added),
8997 ("project/target/debug/deps".to_string(), PathChange::Removed),
8998 ],
8999 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9000 No updates for more nested directories should happen as those are ignored",
9001 );
9002}
9003
9004#[gpui::test]
9005async fn test_odd_events_for_ignored_dirs(
9006 executor: BackgroundExecutor,
9007 cx: &mut gpui::TestAppContext,
9008) {
9009 init_test(cx);
9010 let fs = FakeFs::new(executor);
9011 fs.insert_tree(
9012 path!("/root"),
9013 json!({
9014 ".git": {},
9015 ".gitignore": "**/target/",
9016 "src": {
9017 "main.rs": "fn main() {}",
9018 },
9019 "target": {
9020 "debug": {
9021 "foo.txt": "foo",
9022 "deps": {}
9023 }
9024 }
9025 }),
9026 )
9027 .await;
9028 fs.set_head_and_index_for_repo(
9029 path!("/root/.git").as_ref(),
9030 &[
9031 (".gitignore", "**/target/".into()),
9032 ("src/main.rs", "fn main() {}".into()),
9033 ],
9034 );
9035
9036 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9037 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9038 let project_events = Arc::new(Mutex::new(Vec::new()));
9039 project.update(cx, |project, cx| {
9040 let repository_updates = repository_updates.clone();
9041 cx.subscribe(project.git_store(), move |_, _, e, _| {
9042 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9043 repository_updates.lock().push(e.clone());
9044 }
9045 })
9046 .detach();
9047 let project_events = project_events.clone();
9048 cx.subscribe_self(move |_, e, _| {
9049 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9050 project_events.lock().extend(
9051 updates
9052 .iter()
9053 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9054 .filter(|(path, _)| path != "fs-event-sentinel"),
9055 );
9056 }
9057 })
9058 .detach();
9059 });
9060
9061 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9062 tree.update(cx, |tree, cx| {
9063 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9064 })
9065 .await
9066 .unwrap();
9067 tree.flush_fs_events(cx).await;
9068 project
9069 .update(cx, |project, cx| project.git_scans_complete(cx))
9070 .await;
9071 cx.run_until_parked();
9072 tree.update(cx, |tree, _| {
9073 assert_eq!(
9074 tree.entries(true, 0)
9075 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9076 .collect::<Vec<_>>(),
9077 vec![
9078 (rel_path(""), false),
9079 (rel_path(".gitignore"), false),
9080 (rel_path("src"), false),
9081 (rel_path("src/main.rs"), false),
9082 (rel_path("target"), true),
9083 (rel_path("target/debug"), true),
9084 (rel_path("target/debug/deps"), true),
9085 (rel_path("target/debug/foo.txt"), true),
9086 ]
9087 );
9088 });
9089
9090 assert_eq!(
9091 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9092 vec![
9093 RepositoryEvent::Updated {
9094 full_scan: true,
9095 new_instance: false,
9096 },
9097 RepositoryEvent::MergeHeadsChanged,
9098 ],
9099 "Initial worktree scan should produce a repo update event"
9100 );
9101 assert_eq!(
9102 project_events.lock().drain(..).collect::<Vec<_>>(),
9103 vec![
9104 ("target".to_string(), PathChange::Loaded),
9105 ("target/debug".to_string(), PathChange::Loaded),
9106 ("target/debug/deps".to_string(), PathChange::Loaded),
9107 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9108 ],
9109 "All non-ignored entries and all opened firs should be getting a project event",
9110 );
9111
9112 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9113 // This may happen multiple times during a single flycheck, but once is enough for testing.
9114 fs.emit_fs_event("/root/target/debug/deps", None);
9115 tree.flush_fs_events(cx).await;
9116 project
9117 .update(cx, |project, cx| project.git_scans_complete(cx))
9118 .await;
9119 cx.executor().run_until_parked();
9120
9121 assert_eq!(
9122 repository_updates.lock().as_slice(),
9123 Vec::new(),
9124 "No further repo events should happen, as only ignored dirs received FS events",
9125 );
9126 assert_eq!(
9127 project_events.lock().as_slice(),
9128 Vec::new(),
9129 "No further project events should happen, as only ignored dirs received FS events",
9130 );
9131}
9132
9133#[gpui::test]
9134async fn test_repos_in_invisible_worktrees(
9135 executor: BackgroundExecutor,
9136 cx: &mut gpui::TestAppContext,
9137) {
9138 init_test(cx);
9139 let fs = FakeFs::new(executor);
9140 fs.insert_tree(
9141 path!("/root"),
9142 json!({
9143 "dir1": {
9144 ".git": {},
9145 "dep1": {
9146 ".git": {},
9147 "src": {
9148 "a.txt": "",
9149 },
9150 },
9151 "b.txt": "",
9152 },
9153 }),
9154 )
9155 .await;
9156
9157 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9158 let _visible_worktree =
9159 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9160 project
9161 .update(cx, |project, cx| project.git_scans_complete(cx))
9162 .await;
9163
9164 let repos = project.read_with(cx, |project, cx| {
9165 project
9166 .repositories(cx)
9167 .values()
9168 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9169 .collect::<Vec<_>>()
9170 });
9171 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9172
9173 let (_invisible_worktree, _) = project
9174 .update(cx, |project, cx| {
9175 project.worktree_store.update(cx, |worktree_store, cx| {
9176 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9177 })
9178 })
9179 .await
9180 .expect("failed to create worktree");
9181 project
9182 .update(cx, |project, cx| project.git_scans_complete(cx))
9183 .await;
9184
9185 let repos = project.read_with(cx, |project, cx| {
9186 project
9187 .repositories(cx)
9188 .values()
9189 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9190 .collect::<Vec<_>>()
9191 });
9192 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9193}
9194
9195#[gpui::test(iterations = 10)]
9196async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9197 init_test(cx);
9198 cx.update(|cx| {
9199 cx.update_global::<SettingsStore, _>(|store, cx| {
9200 store.update_user_settings(cx, |settings| {
9201 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9202 });
9203 });
9204 });
9205 let fs = FakeFs::new(cx.background_executor.clone());
9206 fs.insert_tree(
9207 path!("/root"),
9208 json!({
9209 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9210 "tree": {
9211 ".git": {},
9212 ".gitignore": "ignored-dir\n",
9213 "tracked-dir": {
9214 "tracked-file1": "",
9215 "ancestor-ignored-file1": "",
9216 },
9217 "ignored-dir": {
9218 "ignored-file1": ""
9219 }
9220 }
9221 }),
9222 )
9223 .await;
9224 fs.set_head_and_index_for_repo(
9225 path!("/root/tree/.git").as_ref(),
9226 &[
9227 (".gitignore", "ignored-dir\n".into()),
9228 ("tracked-dir/tracked-file1", "".into()),
9229 ],
9230 );
9231
9232 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9233
9234 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9235 tree.flush_fs_events(cx).await;
9236 project
9237 .update(cx, |project, cx| project.git_scans_complete(cx))
9238 .await;
9239 cx.executor().run_until_parked();
9240
9241 let repository = project.read_with(cx, |project, cx| {
9242 project.repositories(cx).values().next().unwrap().clone()
9243 });
9244
9245 tree.read_with(cx, |tree, _| {
9246 tree.as_local()
9247 .unwrap()
9248 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9249 })
9250 .recv()
9251 .await;
9252
9253 cx.read(|cx| {
9254 assert_entry_git_state(
9255 tree.read(cx),
9256 repository.read(cx),
9257 "tracked-dir/tracked-file1",
9258 None,
9259 false,
9260 );
9261 assert_entry_git_state(
9262 tree.read(cx),
9263 repository.read(cx),
9264 "tracked-dir/ancestor-ignored-file1",
9265 None,
9266 false,
9267 );
9268 assert_entry_git_state(
9269 tree.read(cx),
9270 repository.read(cx),
9271 "ignored-dir/ignored-file1",
9272 None,
9273 true,
9274 );
9275 });
9276
9277 fs.create_file(
9278 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9279 Default::default(),
9280 )
9281 .await
9282 .unwrap();
9283 fs.set_index_for_repo(
9284 path!("/root/tree/.git").as_ref(),
9285 &[
9286 (".gitignore", "ignored-dir\n".into()),
9287 ("tracked-dir/tracked-file1", "".into()),
9288 ("tracked-dir/tracked-file2", "".into()),
9289 ],
9290 );
9291 fs.create_file(
9292 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9293 Default::default(),
9294 )
9295 .await
9296 .unwrap();
9297 fs.create_file(
9298 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9299 Default::default(),
9300 )
9301 .await
9302 .unwrap();
9303
9304 cx.executor().run_until_parked();
9305 cx.read(|cx| {
9306 assert_entry_git_state(
9307 tree.read(cx),
9308 repository.read(cx),
9309 "tracked-dir/tracked-file2",
9310 Some(StatusCode::Added),
9311 false,
9312 );
9313 assert_entry_git_state(
9314 tree.read(cx),
9315 repository.read(cx),
9316 "tracked-dir/ancestor-ignored-file2",
9317 None,
9318 false,
9319 );
9320 assert_entry_git_state(
9321 tree.read(cx),
9322 repository.read(cx),
9323 "ignored-dir/ignored-file2",
9324 None,
9325 true,
9326 );
9327 assert!(
9328 tree.read(cx)
9329 .entry_for_path(&rel_path(".git"))
9330 .unwrap()
9331 .is_ignored
9332 );
9333 });
9334}
9335
9336#[gpui::test]
9337async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9338 init_test(cx);
9339
9340 let fs = FakeFs::new(cx.executor());
9341 fs.insert_tree(
9342 path!("/project"),
9343 json!({
9344 ".git": {
9345 "worktrees": {
9346 "some-worktree": {
9347 "commondir": "../..\n",
9348 // For is_git_dir
9349 "HEAD": "",
9350 "config": ""
9351 }
9352 },
9353 "modules": {
9354 "subdir": {
9355 "some-submodule": {
9356 // For is_git_dir
9357 "HEAD": "",
9358 "config": "",
9359 }
9360 }
9361 }
9362 },
9363 "src": {
9364 "a.txt": "A",
9365 },
9366 "some-worktree": {
9367 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9368 "src": {
9369 "b.txt": "B",
9370 }
9371 },
9372 "subdir": {
9373 "some-submodule": {
9374 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9375 "c.txt": "C",
9376 }
9377 }
9378 }),
9379 )
9380 .await;
9381
9382 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9383 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9384 scan_complete.await;
9385
9386 let mut repositories = project.update(cx, |project, cx| {
9387 project
9388 .repositories(cx)
9389 .values()
9390 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9391 .collect::<Vec<_>>()
9392 });
9393 repositories.sort();
9394 pretty_assertions::assert_eq!(
9395 repositories,
9396 [
9397 Path::new(path!("/project")).into(),
9398 Path::new(path!("/project/some-worktree")).into(),
9399 Path::new(path!("/project/subdir/some-submodule")).into(),
9400 ]
9401 );
9402
9403 // Generate a git-related event for the worktree and check that it's refreshed.
9404 fs.with_git_state(
9405 path!("/project/some-worktree/.git").as_ref(),
9406 true,
9407 |state| {
9408 state
9409 .head_contents
9410 .insert(repo_path("src/b.txt"), "b".to_owned());
9411 state
9412 .index_contents
9413 .insert(repo_path("src/b.txt"), "b".to_owned());
9414 },
9415 )
9416 .unwrap();
9417 cx.run_until_parked();
9418
9419 let buffer = project
9420 .update(cx, |project, cx| {
9421 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9422 })
9423 .await
9424 .unwrap();
9425 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9426 let (repo, _) = project
9427 .git_store()
9428 .read(cx)
9429 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9430 .unwrap();
9431 pretty_assertions::assert_eq!(
9432 repo.read(cx).work_directory_abs_path,
9433 Path::new(path!("/project/some-worktree")).into(),
9434 );
9435 let barrier = repo.update(cx, |repo, _| repo.barrier());
9436 (repo.clone(), barrier)
9437 });
9438 barrier.await.unwrap();
9439 worktree_repo.update(cx, |repo, _| {
9440 pretty_assertions::assert_eq!(
9441 repo.status_for_path(&repo_path("src/b.txt"))
9442 .unwrap()
9443 .status,
9444 StatusCode::Modified.worktree(),
9445 );
9446 });
9447
9448 // The same for the submodule.
9449 fs.with_git_state(
9450 path!("/project/subdir/some-submodule/.git").as_ref(),
9451 true,
9452 |state| {
9453 state
9454 .head_contents
9455 .insert(repo_path("c.txt"), "c".to_owned());
9456 state
9457 .index_contents
9458 .insert(repo_path("c.txt"), "c".to_owned());
9459 },
9460 )
9461 .unwrap();
9462 cx.run_until_parked();
9463
9464 let buffer = project
9465 .update(cx, |project, cx| {
9466 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9467 })
9468 .await
9469 .unwrap();
9470 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9471 let (repo, _) = project
9472 .git_store()
9473 .read(cx)
9474 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9475 .unwrap();
9476 pretty_assertions::assert_eq!(
9477 repo.read(cx).work_directory_abs_path,
9478 Path::new(path!("/project/subdir/some-submodule")).into(),
9479 );
9480 let barrier = repo.update(cx, |repo, _| repo.barrier());
9481 (repo.clone(), barrier)
9482 });
9483 barrier.await.unwrap();
9484 submodule_repo.update(cx, |repo, _| {
9485 pretty_assertions::assert_eq!(
9486 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9487 StatusCode::Modified.worktree(),
9488 );
9489 });
9490}
9491
9492#[gpui::test]
9493async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9494 init_test(cx);
9495 let fs = FakeFs::new(cx.background_executor.clone());
9496 fs.insert_tree(
9497 path!("/root"),
9498 json!({
9499 "project": {
9500 ".git": {},
9501 "child1": {
9502 "a.txt": "A",
9503 },
9504 "child2": {
9505 "b.txt": "B",
9506 }
9507 }
9508 }),
9509 )
9510 .await;
9511
9512 let project = Project::test(
9513 fs.clone(),
9514 [
9515 path!("/root/project/child1").as_ref(),
9516 path!("/root/project/child2").as_ref(),
9517 ],
9518 cx,
9519 )
9520 .await;
9521
9522 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9523 tree.flush_fs_events(cx).await;
9524 project
9525 .update(cx, |project, cx| project.git_scans_complete(cx))
9526 .await;
9527 cx.executor().run_until_parked();
9528
9529 let repos = project.read_with(cx, |project, cx| {
9530 project
9531 .repositories(cx)
9532 .values()
9533 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9534 .collect::<Vec<_>>()
9535 });
9536 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9537}
9538
9539async fn search(
9540 project: &Entity<Project>,
9541 query: SearchQuery,
9542 cx: &mut gpui::TestAppContext,
9543) -> Result<HashMap<String, Vec<Range<usize>>>> {
9544 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9545 let mut results = HashMap::default();
9546 while let Ok(search_result) = search_rx.recv().await {
9547 match search_result {
9548 SearchResult::Buffer { buffer, ranges } => {
9549 results.entry(buffer).or_insert(ranges);
9550 }
9551 SearchResult::LimitReached => {}
9552 }
9553 }
9554 Ok(results
9555 .into_iter()
9556 .map(|(buffer, ranges)| {
9557 buffer.update(cx, |buffer, cx| {
9558 let path = buffer
9559 .file()
9560 .unwrap()
9561 .full_path(cx)
9562 .to_string_lossy()
9563 .to_string();
9564 let ranges = ranges
9565 .into_iter()
9566 .map(|range| range.to_offset(buffer))
9567 .collect::<Vec<_>>();
9568 (path, ranges)
9569 })
9570 })
9571 .collect())
9572}
9573
9574pub fn init_test(cx: &mut gpui::TestAppContext) {
9575 zlog::init_test();
9576
9577 cx.update(|cx| {
9578 let settings_store = SettingsStore::test(cx);
9579 cx.set_global(settings_store);
9580 release_channel::init(SemanticVersion::default(), cx);
9581 language::init(cx);
9582 Project::init_settings(cx);
9583 });
9584}
9585
9586fn json_lang() -> Arc<Language> {
9587 Arc::new(Language::new(
9588 LanguageConfig {
9589 name: "JSON".into(),
9590 matcher: LanguageMatcher {
9591 path_suffixes: vec!["json".to_string()],
9592 ..Default::default()
9593 },
9594 ..Default::default()
9595 },
9596 None,
9597 ))
9598}
9599
9600fn js_lang() -> Arc<Language> {
9601 Arc::new(Language::new(
9602 LanguageConfig {
9603 name: "JavaScript".into(),
9604 matcher: LanguageMatcher {
9605 path_suffixes: vec!["js".to_string()],
9606 ..Default::default()
9607 },
9608 ..Default::default()
9609 },
9610 None,
9611 ))
9612}
9613
9614fn rust_lang() -> Arc<Language> {
9615 Arc::new(Language::new(
9616 LanguageConfig {
9617 name: "Rust".into(),
9618 matcher: LanguageMatcher {
9619 path_suffixes: vec!["rs".to_string()],
9620 ..Default::default()
9621 },
9622 ..Default::default()
9623 },
9624 Some(tree_sitter_rust::LANGUAGE.into()),
9625 ))
9626}
9627
9628fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9629 struct PythonMootToolchainLister(Arc<FakeFs>);
9630 #[async_trait]
9631 impl ToolchainLister for PythonMootToolchainLister {
9632 async fn list(
9633 &self,
9634 worktree_root: PathBuf,
9635 subroot_relative_path: Arc<RelPath>,
9636 _: Option<HashMap<String, String>>,
9637 ) -> ToolchainList {
9638 // This lister will always return a path .venv directories within ancestors
9639 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9640 let mut toolchains = vec![];
9641 for ancestor in ancestors {
9642 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9643 if self.0.is_dir(&venv_path).await {
9644 toolchains.push(Toolchain {
9645 name: SharedString::new("Python Venv"),
9646 path: venv_path.to_string_lossy().into_owned().into(),
9647 language_name: LanguageName(SharedString::new_static("Python")),
9648 as_json: serde_json::Value::Null,
9649 })
9650 }
9651 }
9652 ToolchainList {
9653 toolchains,
9654 ..Default::default()
9655 }
9656 }
9657 async fn resolve(
9658 &self,
9659 _: PathBuf,
9660 _: Option<HashMap<String, String>>,
9661 ) -> anyhow::Result<Toolchain> {
9662 Err(anyhow::anyhow!("Not implemented"))
9663 }
9664 fn meta(&self) -> ToolchainMetadata {
9665 ToolchainMetadata {
9666 term: SharedString::new_static("Virtual Environment"),
9667 new_toolchain_placeholder: SharedString::new_static(
9668 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9669 ),
9670 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9671 }
9672 }
9673 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9674 vec![]
9675 }
9676 }
9677 Arc::new(
9678 Language::new(
9679 LanguageConfig {
9680 name: "Python".into(),
9681 matcher: LanguageMatcher {
9682 path_suffixes: vec!["py".to_string()],
9683 ..Default::default()
9684 },
9685 ..Default::default()
9686 },
9687 None, // We're not testing Python parsing with this language.
9688 )
9689 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9690 "pyproject.toml",
9691 ))))
9692 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9693 )
9694}
9695
9696fn typescript_lang() -> Arc<Language> {
9697 Arc::new(Language::new(
9698 LanguageConfig {
9699 name: "TypeScript".into(),
9700 matcher: LanguageMatcher {
9701 path_suffixes: vec!["ts".to_string()],
9702 ..Default::default()
9703 },
9704 ..Default::default()
9705 },
9706 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9707 ))
9708}
9709
9710fn tsx_lang() -> Arc<Language> {
9711 Arc::new(Language::new(
9712 LanguageConfig {
9713 name: "tsx".into(),
9714 matcher: LanguageMatcher {
9715 path_suffixes: vec!["tsx".to_string()],
9716 ..Default::default()
9717 },
9718 ..Default::default()
9719 },
9720 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9721 ))
9722}
9723
9724fn get_all_tasks(
9725 project: &Entity<Project>,
9726 task_contexts: Arc<TaskContexts>,
9727 cx: &mut App,
9728) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9729 let new_tasks = project.update(cx, |project, cx| {
9730 project.task_store.update(cx, |task_store, cx| {
9731 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9732 this.used_and_current_resolved_tasks(task_contexts, cx)
9733 })
9734 })
9735 });
9736
9737 cx.background_spawn(async move {
9738 let (mut old, new) = new_tasks.await;
9739 old.extend(new);
9740 old
9741 })
9742}
9743
9744#[track_caller]
9745fn assert_entry_git_state(
9746 tree: &Worktree,
9747 repository: &Repository,
9748 path: &str,
9749 index_status: Option<StatusCode>,
9750 is_ignored: bool,
9751) {
9752 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9753 let entry = tree
9754 .entry_for_path(&rel_path(path))
9755 .unwrap_or_else(|| panic!("entry {path} not found"));
9756 let status = repository
9757 .status_for_path(&repo_path(path))
9758 .map(|entry| entry.status);
9759 let expected = index_status.map(|index_status| {
9760 TrackedStatus {
9761 index_status,
9762 worktree_status: StatusCode::Unmodified,
9763 }
9764 .into()
9765 });
9766 assert_eq!(
9767 status, expected,
9768 "expected {path} to have git status: {expected:?}"
9769 );
9770 assert_eq!(
9771 entry.is_ignored, is_ignored,
9772 "expected {path} to have is_ignored: {is_ignored}"
9773 );
9774}
9775
9776#[track_caller]
9777fn git_init(path: &Path) -> git2::Repository {
9778 let mut init_opts = RepositoryInitOptions::new();
9779 init_opts.initial_head("main");
9780 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9781}
9782
9783#[track_caller]
9784fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9785 let path = path.as_ref();
9786 let mut index = repo.index().expect("Failed to get index");
9787 index.add_path(path).expect("Failed to add file");
9788 index.write().expect("Failed to write index");
9789}
9790
9791#[track_caller]
9792fn git_remove_index(path: &Path, repo: &git2::Repository) {
9793 let mut index = repo.index().expect("Failed to get index");
9794 index.remove_path(path).expect("Failed to add file");
9795 index.write().expect("Failed to write index");
9796}
9797
9798#[track_caller]
9799fn git_commit(msg: &'static str, repo: &git2::Repository) {
9800 use git2::Signature;
9801
9802 let signature = Signature::now("test", "test@zed.dev").unwrap();
9803 let oid = repo.index().unwrap().write_tree().unwrap();
9804 let tree = repo.find_tree(oid).unwrap();
9805 if let Ok(head) = repo.head() {
9806 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9807
9808 let parent_commit = parent_obj.as_commit().unwrap();
9809
9810 repo.commit(
9811 Some("HEAD"),
9812 &signature,
9813 &signature,
9814 msg,
9815 &tree,
9816 &[parent_commit],
9817 )
9818 .expect("Failed to commit with parent");
9819 } else {
9820 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9821 .expect("Failed to commit");
9822 }
9823}
9824
9825#[cfg(any())]
9826#[track_caller]
9827fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9828 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9829}
9830
9831#[track_caller]
9832fn git_stash(repo: &mut git2::Repository) {
9833 use git2::Signature;
9834
9835 let signature = Signature::now("test", "test@zed.dev").unwrap();
9836 repo.stash_save(&signature, "N/A", None)
9837 .expect("Failed to stash");
9838}
9839
9840#[track_caller]
9841fn git_reset(offset: usize, repo: &git2::Repository) {
9842 let head = repo.head().expect("Couldn't get repo head");
9843 let object = head.peel(git2::ObjectType::Commit).unwrap();
9844 let commit = object.as_commit().unwrap();
9845 let new_head = commit
9846 .parents()
9847 .inspect(|parnet| {
9848 parnet.message();
9849 })
9850 .nth(offset)
9851 .expect("Not enough history");
9852 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9853 .expect("Could not reset");
9854}
9855
9856#[cfg(any())]
9857#[track_caller]
9858fn git_branch(name: &str, repo: &git2::Repository) {
9859 let head = repo
9860 .head()
9861 .expect("Couldn't get repo head")
9862 .peel_to_commit()
9863 .expect("HEAD is not a commit");
9864 repo.branch(name, &head, false).expect("Failed to commit");
9865}
9866
9867#[cfg(any())]
9868#[track_caller]
9869fn git_checkout(name: &str, repo: &git2::Repository) {
9870 repo.set_head(name).expect("Failed to set head");
9871 repo.checkout_head(None).expect("Failed to check out head");
9872}
9873
9874#[cfg(any())]
9875#[track_caller]
9876fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9877 repo.statuses(None)
9878 .unwrap()
9879 .iter()
9880 .map(|status| (status.path().unwrap().to_string(), status.status()))
9881 .collect()
9882}
9883
9884#[gpui::test]
9885async fn test_find_project_path_abs(
9886 background_executor: BackgroundExecutor,
9887 cx: &mut gpui::TestAppContext,
9888) {
9889 // find_project_path should work with absolute paths
9890 init_test(cx);
9891
9892 let fs = FakeFs::new(background_executor);
9893 fs.insert_tree(
9894 path!("/root"),
9895 json!({
9896 "project1": {
9897 "file1.txt": "content1",
9898 "subdir": {
9899 "file2.txt": "content2"
9900 }
9901 },
9902 "project2": {
9903 "file3.txt": "content3"
9904 }
9905 }),
9906 )
9907 .await;
9908
9909 let project = Project::test(
9910 fs.clone(),
9911 [
9912 path!("/root/project1").as_ref(),
9913 path!("/root/project2").as_ref(),
9914 ],
9915 cx,
9916 )
9917 .await;
9918
9919 // Make sure the worktrees are fully initialized
9920 project
9921 .update(cx, |project, cx| project.git_scans_complete(cx))
9922 .await;
9923 cx.run_until_parked();
9924
9925 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9926 project.read_with(cx, |project, cx| {
9927 let worktrees: Vec<_> = project.worktrees(cx).collect();
9928 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9929 let id1 = worktrees[0].read(cx).id();
9930 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9931 let id2 = worktrees[1].read(cx).id();
9932 (abs_path1, id1, abs_path2, id2)
9933 });
9934
9935 project.update(cx, |project, cx| {
9936 let abs_path = project1_abs_path.join("file1.txt");
9937 let found_path = project.find_project_path(abs_path, cx).unwrap();
9938 assert_eq!(found_path.worktree_id, project1_id);
9939 assert_eq!(&*found_path.path, rel_path("file1.txt"));
9940
9941 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9942 let found_path = project.find_project_path(abs_path, cx).unwrap();
9943 assert_eq!(found_path.worktree_id, project1_id);
9944 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
9945
9946 let abs_path = project2_abs_path.join("file3.txt");
9947 let found_path = project.find_project_path(abs_path, cx).unwrap();
9948 assert_eq!(found_path.worktree_id, project2_id);
9949 assert_eq!(&*found_path.path, rel_path("file3.txt"));
9950
9951 let abs_path = project1_abs_path.join("nonexistent.txt");
9952 let found_path = project.find_project_path(abs_path, cx);
9953 assert!(
9954 found_path.is_some(),
9955 "Should find project path for nonexistent file in worktree"
9956 );
9957
9958 // Test with an absolute path outside any worktree
9959 let abs_path = Path::new("/some/other/path");
9960 let found_path = project.find_project_path(abs_path, cx);
9961 assert!(
9962 found_path.is_none(),
9963 "Should not find project path for path outside any worktree"
9964 );
9965 });
9966}