1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use encodings::{Encoding, UTF_8};
16use fs::FakeFs;
17use futures::{StreamExt, future};
18use git::{
19 GitHostingProviderRegistry,
20 repository::{RepoPath, repo_path},
21 status::{StatusCode, TrackedStatus},
22};
23use git2::RepositoryInitOptions;
24use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
25use itertools::Itertools;
26use language::{
27 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
28 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
29 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
30 ToolchainLister,
31 language_settings::{LanguageSettingsContent, language_settings},
32 tree_sitter_rust, tree_sitter_typescript,
33};
34use lsp::{
35 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
36 Uri, WillRenameFiles, notification::DidRenameFiles,
37};
38use parking_lot::Mutex;
39use paths::{config_dir, global_gitignore_path, tasks_file};
40use postage::stream::Stream as _;
41use pretty_assertions::{assert_eq, assert_matches};
42use rand::{Rng as _, rngs::StdRng};
43use serde_json::json;
44#[cfg(not(windows))]
45use std::os;
46use std::{
47 env, mem,
48 num::NonZeroU32,
49 ops::Range,
50 str::FromStr,
51 sync::{Arc, OnceLock},
52 task::Poll,
53};
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree(
1217 path!("/the-root"),
1218 json!({
1219 ".gitignore": "target\n",
1220 "Cargo.lock": "",
1221 "src": {
1222 "a.rs": "",
1223 "b.rs": "",
1224 },
1225 "target": {
1226 "x": {
1227 "out": {
1228 "x.rs": ""
1229 }
1230 },
1231 "y": {
1232 "out": {
1233 "y.rs": "",
1234 }
1235 },
1236 "z": {
1237 "out": {
1238 "z.rs": ""
1239 }
1240 }
1241 }
1242 }),
1243 )
1244 .await;
1245 fs.insert_tree(
1246 path!("/the-registry"),
1247 json!({
1248 "dep1": {
1249 "src": {
1250 "dep1.rs": "",
1251 }
1252 },
1253 "dep2": {
1254 "src": {
1255 "dep2.rs": "",
1256 }
1257 },
1258 }),
1259 )
1260 .await;
1261 fs.insert_tree(
1262 path!("/the/stdlib"),
1263 json!({
1264 "LICENSE": "",
1265 "src": {
1266 "string.rs": "",
1267 }
1268 }),
1269 )
1270 .await;
1271
1272 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1273 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1274 (project.languages().clone(), project.lsp_store())
1275 });
1276 language_registry.add(rust_lang());
1277 let mut fake_servers = language_registry.register_fake_lsp(
1278 "Rust",
1279 FakeLspAdapter {
1280 name: "the-language-server",
1281 ..Default::default()
1282 },
1283 );
1284
1285 cx.executor().run_until_parked();
1286
1287 // Start the language server by opening a buffer with a compatible file extension.
1288 project
1289 .update(cx, |project, cx| {
1290 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1291 })
1292 .await
1293 .unwrap();
1294
1295 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1296 project.update(cx, |project, cx| {
1297 let worktree = project.worktrees(cx).next().unwrap();
1298 assert_eq!(
1299 worktree
1300 .read(cx)
1301 .snapshot()
1302 .entries(true, 0)
1303 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("", false),
1307 (".gitignore", false),
1308 ("Cargo.lock", false),
1309 ("src", false),
1310 ("src/a.rs", false),
1311 ("src/b.rs", false),
1312 ("target", true),
1313 ]
1314 );
1315 });
1316
1317 let prev_read_dir_count = fs.read_dir_call_count();
1318
1319 let fake_server = fake_servers.next().await.unwrap();
1320 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1321 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1322 id
1323 });
1324
1325 // Simulate jumping to a definition in a dependency outside of the worktree.
1326 let _out_of_worktree_buffer = project
1327 .update(cx, |project, cx| {
1328 project.open_local_buffer_via_lsp(
1329 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1330 server_id,
1331 cx,
1332 )
1333 })
1334 .await
1335 .unwrap();
1336
1337 // Keep track of the FS events reported to the language server.
1338 let file_changes = Arc::new(Mutex::new(Vec::new()));
1339 fake_server
1340 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1341 registrations: vec![lsp::Registration {
1342 id: Default::default(),
1343 method: "workspace/didChangeWatchedFiles".to_string(),
1344 register_options: serde_json::to_value(
1345 lsp::DidChangeWatchedFilesRegistrationOptions {
1346 watchers: vec![
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/Cargo.toml").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/src/*.{rs,c}").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the-root/target/y/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("/the/stdlib/src/**/*.rs").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 lsp::FileSystemWatcher {
1372 glob_pattern: lsp::GlobPattern::String(
1373 path!("**/Cargo.lock").to_string(),
1374 ),
1375 kind: None,
1376 },
1377 ],
1378 },
1379 )
1380 .ok(),
1381 }],
1382 })
1383 .await
1384 .into_response()
1385 .unwrap();
1386 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1387 let file_changes = file_changes.clone();
1388 move |params, _| {
1389 let mut file_changes = file_changes.lock();
1390 file_changes.extend(params.changes);
1391 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1392 }
1393 });
1394
1395 cx.executor().run_until_parked();
1396 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1397 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1398
1399 let mut new_watched_paths = fs.watched_paths();
1400 new_watched_paths.retain(|path| {
1401 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1402 });
1403 assert_eq!(
1404 &new_watched_paths,
1405 &[
1406 Path::new(path!("/the-root")),
1407 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1408 Path::new(path!("/the/stdlib/src"))
1409 ]
1410 );
1411
1412 // Now the language server has asked us to watch an ignored directory path,
1413 // so we recursively load it.
1414 project.update(cx, |project, cx| {
1415 let worktree = project.visible_worktrees(cx).next().unwrap();
1416 assert_eq!(
1417 worktree
1418 .read(cx)
1419 .snapshot()
1420 .entries(true, 0)
1421 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1422 .collect::<Vec<_>>(),
1423 &[
1424 ("", false),
1425 (".gitignore", false),
1426 ("Cargo.lock", false),
1427 ("src", false),
1428 ("src/a.rs", false),
1429 ("src/b.rs", false),
1430 ("target", true),
1431 ("target/x", true),
1432 ("target/y", true),
1433 ("target/y/out", true),
1434 ("target/y/out/y.rs", true),
1435 ("target/z", true),
1436 ]
1437 );
1438 });
1439
1440 // Perform some file system mutations, two of which match the watched patterns,
1441 // and one of which does not.
1442 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1446 .await
1447 .unwrap();
1448 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/x/out/x2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.create_file(
1458 path!("/the-root/target/y/out/y2.rs").as_ref(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463
1464 let encoding = Encoding::default();
1465
1466 fs.save(
1467 path!("/the-root/Cargo.lock").as_ref(),
1468 &Rope::default(),
1469 Default::default(),
1470 encoding.clone(),
1471 )
1472 .await
1473 .unwrap();
1474 fs.save(
1475 path!("/the-stdlib/LICENSE").as_ref(),
1476 &Rope::default(),
1477 Default::default(),
1478 encoding.clone(),
1479 )
1480 .await
1481 .unwrap();
1482 fs.save(
1483 path!("/the/stdlib/src/string.rs").as_ref(),
1484 &Rope::default(),
1485 Default::default(),
1486 encoding,
1487 )
1488 .await
1489 .unwrap();
1490
1491 // The language server receives events for the FS mutations that match its watch patterns.
1492 cx.executor().run_until_parked();
1493 assert_eq!(
1494 &*file_changes.lock(),
1495 &[
1496 lsp::FileEvent {
1497 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1498 typ: lsp::FileChangeType::CHANGED,
1499 },
1500 lsp::FileEvent {
1501 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1502 typ: lsp::FileChangeType::DELETED,
1503 },
1504 lsp::FileEvent {
1505 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1506 typ: lsp::FileChangeType::CREATED,
1507 },
1508 lsp::FileEvent {
1509 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1510 typ: lsp::FileChangeType::CREATED,
1511 },
1512 lsp::FileEvent {
1513 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1514 typ: lsp::FileChangeType::CHANGED,
1515 },
1516 ]
1517 );
1518}
1519
1520#[gpui::test]
1521async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1522 init_test(cx);
1523
1524 let fs = FakeFs::new(cx.executor());
1525 fs.insert_tree(
1526 path!("/dir"),
1527 json!({
1528 "a.rs": "let a = 1;",
1529 "b.rs": "let b = 2;"
1530 }),
1531 )
1532 .await;
1533
1534 let project = Project::test(
1535 fs,
1536 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1537 cx,
1538 )
1539 .await;
1540 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1541
1542 let buffer_a = project
1543 .update(cx, |project, cx| {
1544 project.open_local_buffer(path!("/dir/a.rs"), cx)
1545 })
1546 .await
1547 .unwrap();
1548 let buffer_b = project
1549 .update(cx, |project, cx| {
1550 project.open_local_buffer(path!("/dir/b.rs"), cx)
1551 })
1552 .await
1553 .unwrap();
1554
1555 lsp_store.update(cx, |lsp_store, cx| {
1556 lsp_store
1557 .update_diagnostics(
1558 LanguageServerId(0),
1559 lsp::PublishDiagnosticsParams {
1560 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1561 version: None,
1562 diagnostics: vec![lsp::Diagnostic {
1563 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1564 severity: Some(lsp::DiagnosticSeverity::ERROR),
1565 message: "error 1".to_string(),
1566 ..Default::default()
1567 }],
1568 },
1569 None,
1570 DiagnosticSourceKind::Pushed,
1571 &[],
1572 cx,
1573 )
1574 .unwrap();
1575 lsp_store
1576 .update_diagnostics(
1577 LanguageServerId(0),
1578 lsp::PublishDiagnosticsParams {
1579 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1580 version: None,
1581 diagnostics: vec![lsp::Diagnostic {
1582 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1583 severity: Some(DiagnosticSeverity::WARNING),
1584 message: "error 2".to_string(),
1585 ..Default::default()
1586 }],
1587 },
1588 None,
1589 DiagnosticSourceKind::Pushed,
1590 &[],
1591 cx,
1592 )
1593 .unwrap();
1594 });
1595
1596 buffer_a.update(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let ", None),
1605 ("a", Some(DiagnosticSeverity::ERROR)),
1606 (" = 1;", None),
1607 ]
1608 );
1609 });
1610 buffer_b.update(cx, |buffer, _| {
1611 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1612 assert_eq!(
1613 chunks
1614 .iter()
1615 .map(|(s, d)| (s.as_str(), *d))
1616 .collect::<Vec<_>>(),
1617 &[
1618 ("let ", None),
1619 ("b", Some(DiagnosticSeverity::WARNING)),
1620 (" = 2;", None),
1621 ]
1622 );
1623 });
1624}
1625
1626#[gpui::test]
1627async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1628 init_test(cx);
1629
1630 let fs = FakeFs::new(cx.executor());
1631 fs.insert_tree(
1632 path!("/root"),
1633 json!({
1634 "dir": {
1635 ".git": {
1636 "HEAD": "ref: refs/heads/main",
1637 },
1638 ".gitignore": "b.rs",
1639 "a.rs": "let a = 1;",
1640 "b.rs": "let b = 2;",
1641 },
1642 "other.rs": "let b = c;"
1643 }),
1644 )
1645 .await;
1646
1647 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1648 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1649 let (worktree, _) = project
1650 .update(cx, |project, cx| {
1651 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1652 })
1653 .await
1654 .unwrap();
1655 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1656
1657 let (worktree, _) = project
1658 .update(cx, |project, cx| {
1659 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1660 })
1661 .await
1662 .unwrap();
1663 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1664
1665 let server_id = LanguageServerId(0);
1666 lsp_store.update(cx, |lsp_store, cx| {
1667 lsp_store
1668 .update_diagnostics(
1669 server_id,
1670 lsp::PublishDiagnosticsParams {
1671 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1672 version: None,
1673 diagnostics: vec![lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1675 severity: Some(lsp::DiagnosticSeverity::ERROR),
1676 message: "unused variable 'b'".to_string(),
1677 ..Default::default()
1678 }],
1679 },
1680 None,
1681 DiagnosticSourceKind::Pushed,
1682 &[],
1683 cx,
1684 )
1685 .unwrap();
1686 lsp_store
1687 .update_diagnostics(
1688 server_id,
1689 lsp::PublishDiagnosticsParams {
1690 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1691 version: None,
1692 diagnostics: vec![lsp::Diagnostic {
1693 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1694 severity: Some(lsp::DiagnosticSeverity::ERROR),
1695 message: "unknown variable 'c'".to_string(),
1696 ..Default::default()
1697 }],
1698 },
1699 None,
1700 DiagnosticSourceKind::Pushed,
1701 &[],
1702 cx,
1703 )
1704 .unwrap();
1705 });
1706
1707 let main_ignored_buffer = project
1708 .update(cx, |project, cx| {
1709 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1710 })
1711 .await
1712 .unwrap();
1713 main_ignored_buffer.update(cx, |buffer, _| {
1714 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1715 assert_eq!(
1716 chunks
1717 .iter()
1718 .map(|(s, d)| (s.as_str(), *d))
1719 .collect::<Vec<_>>(),
1720 &[
1721 ("let ", None),
1722 ("b", Some(DiagnosticSeverity::ERROR)),
1723 (" = 2;", None),
1724 ],
1725 "Gigitnored buffers should still get in-buffer diagnostics",
1726 );
1727 });
1728 let other_buffer = project
1729 .update(cx, |project, cx| {
1730 project.open_buffer((other_worktree_id, rel_path("")), cx)
1731 })
1732 .await
1733 .unwrap();
1734 other_buffer.update(cx, |buffer, _| {
1735 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1736 assert_eq!(
1737 chunks
1738 .iter()
1739 .map(|(s, d)| (s.as_str(), *d))
1740 .collect::<Vec<_>>(),
1741 &[
1742 ("let b = ", None),
1743 ("c", Some(DiagnosticSeverity::ERROR)),
1744 (";", None),
1745 ],
1746 "Buffers from hidden projects should still get in-buffer diagnostics"
1747 );
1748 });
1749
1750 project.update(cx, |project, cx| {
1751 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1752 assert_eq!(
1753 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1754 vec![(
1755 ProjectPath {
1756 worktree_id: main_worktree_id,
1757 path: rel_path("b.rs").into(),
1758 },
1759 server_id,
1760 DiagnosticSummary {
1761 error_count: 1,
1762 warning_count: 0,
1763 }
1764 )]
1765 );
1766 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1767 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1768 });
1769}
1770
1771#[gpui::test]
1772async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1773 init_test(cx);
1774
1775 let progress_token = "the-progress-token";
1776
1777 let fs = FakeFs::new(cx.executor());
1778 fs.insert_tree(
1779 path!("/dir"),
1780 json!({
1781 "a.rs": "fn a() { A }",
1782 "b.rs": "const y: i32 = 1",
1783 }),
1784 )
1785 .await;
1786
1787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1789
1790 language_registry.add(rust_lang());
1791 let mut fake_servers = language_registry.register_fake_lsp(
1792 "Rust",
1793 FakeLspAdapter {
1794 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1801
1802 // Cause worktree to start the fake language server
1803 let _ = project
1804 .update(cx, |project, cx| {
1805 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1806 })
1807 .await
1808 .unwrap();
1809
1810 let mut events = cx.events(&project);
1811
1812 let fake_server = fake_servers.next().await.unwrap();
1813 assert_eq!(
1814 events.next().await.unwrap(),
1815 Event::LanguageServerAdded(
1816 LanguageServerId(0),
1817 fake_server.server.name(),
1818 Some(worktree_id)
1819 ),
1820 );
1821
1822 fake_server
1823 .start_progress(format!("{}/0", progress_token))
1824 .await;
1825 assert_eq!(
1826 events.next().await.unwrap(),
1827 Event::RefreshInlayHints(fake_server.server.server_id())
1828 );
1829 assert_eq!(
1830 events.next().await.unwrap(),
1831 Event::DiskBasedDiagnosticsStarted {
1832 language_server_id: LanguageServerId(0),
1833 }
1834 );
1835
1836 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1837 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1838 version: None,
1839 diagnostics: vec![lsp::Diagnostic {
1840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1841 severity: Some(lsp::DiagnosticSeverity::ERROR),
1842 message: "undefined variable 'A'".to_string(),
1843 ..Default::default()
1844 }],
1845 });
1846 assert_eq!(
1847 events.next().await.unwrap(),
1848 Event::DiagnosticsUpdated {
1849 language_server_id: LanguageServerId(0),
1850 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1851 }
1852 );
1853
1854 fake_server.end_progress(format!("{}/0", progress_token));
1855 assert_eq!(
1856 events.next().await.unwrap(),
1857 Event::DiskBasedDiagnosticsFinished {
1858 language_server_id: LanguageServerId(0)
1859 }
1860 );
1861
1862 let buffer = project
1863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1864 .await
1865 .unwrap();
1866
1867 buffer.update(cx, |buffer, _| {
1868 let snapshot = buffer.snapshot();
1869 let diagnostics = snapshot
1870 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1871 .collect::<Vec<_>>();
1872 assert_eq!(
1873 diagnostics,
1874 &[DiagnosticEntryRef {
1875 range: Point::new(0, 9)..Point::new(0, 10),
1876 diagnostic: &Diagnostic {
1877 severity: lsp::DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'A'".to_string(),
1879 group_id: 0,
1880 is_primary: true,
1881 source_kind: DiagnosticSourceKind::Pushed,
1882 ..Diagnostic::default()
1883 }
1884 }]
1885 )
1886 });
1887
1888 // Ensure publishing empty diagnostics twice only results in one update event.
1889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1891 version: None,
1892 diagnostics: Default::default(),
1893 });
1894 assert_eq!(
1895 events.next().await.unwrap(),
1896 Event::DiagnosticsUpdated {
1897 language_server_id: LanguageServerId(0),
1898 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1899 }
1900 );
1901
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1903 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1904 version: None,
1905 diagnostics: Default::default(),
1906 });
1907 cx.executor().run_until_parked();
1908 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1909}
1910
1911#[gpui::test]
1912async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1913 init_test(cx);
1914
1915 let progress_token = "the-progress-token";
1916
1917 let fs = FakeFs::new(cx.executor());
1918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1919
1920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1921
1922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1923 language_registry.add(rust_lang());
1924 let mut fake_servers = language_registry.register_fake_lsp(
1925 "Rust",
1926 FakeLspAdapter {
1927 name: "the-language-server",
1928 disk_based_diagnostics_sources: vec!["disk".into()],
1929 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1930 ..FakeLspAdapter::default()
1931 },
1932 );
1933
1934 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1935
1936 let (buffer, _handle) = project
1937 .update(cx, |project, cx| {
1938 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1939 })
1940 .await
1941 .unwrap();
1942 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1943 // Simulate diagnostics starting to update.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 fake_server.start_progress(progress_token).await;
1946
1947 // Restart the server before the diagnostics finish updating.
1948 project.update(cx, |project, cx| {
1949 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1950 });
1951 let mut events = cx.events(&project);
1952
1953 // Simulate the newly started server sending more diagnostics.
1954 let fake_server = fake_servers.next().await.unwrap();
1955 assert_eq!(
1956 events.next().await.unwrap(),
1957 Event::LanguageServerRemoved(LanguageServerId(0))
1958 );
1959 assert_eq!(
1960 events.next().await.unwrap(),
1961 Event::LanguageServerAdded(
1962 LanguageServerId(1),
1963 fake_server.server.name(),
1964 Some(worktree_id)
1965 )
1966 );
1967 assert_eq!(
1968 events.next().await.unwrap(),
1969 Event::RefreshInlayHints(fake_server.server.server_id())
1970 );
1971 fake_server.start_progress(progress_token).await;
1972 assert_eq!(
1973 events.next().await.unwrap(),
1974 Event::LanguageServerBufferRegistered {
1975 server_id: LanguageServerId(1),
1976 buffer_id,
1977 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1978 name: Some(fake_server.server.name())
1979 }
1980 );
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiskBasedDiagnosticsStarted {
1984 language_server_id: LanguageServerId(1)
1985 }
1986 );
1987 project.update(cx, |project, cx| {
1988 assert_eq!(
1989 project
1990 .language_servers_running_disk_based_diagnostics(cx)
1991 .collect::<Vec<_>>(),
1992 [LanguageServerId(1)]
1993 );
1994 });
1995
1996 // All diagnostics are considered done, despite the old server's diagnostic
1997 // task never completing.
1998 fake_server.end_progress(progress_token);
1999 assert_eq!(
2000 events.next().await.unwrap(),
2001 Event::DiskBasedDiagnosticsFinished {
2002 language_server_id: LanguageServerId(1)
2003 }
2004 );
2005 project.update(cx, |project, cx| {
2006 assert_eq!(
2007 project
2008 .language_servers_running_disk_based_diagnostics(cx)
2009 .collect::<Vec<_>>(),
2010 [] as [language::LanguageServerId; 0]
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let fs = FakeFs::new(cx.executor());
2020 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2021
2022 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2023
2024 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2025 language_registry.add(rust_lang());
2026 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2027
2028 let (buffer, _) = project
2029 .update(cx, |project, cx| {
2030 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2031 })
2032 .await
2033 .unwrap();
2034
2035 // Publish diagnostics
2036 let fake_server = fake_servers.next().await.unwrap();
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: vec![lsp::Diagnostic {
2041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2042 severity: Some(lsp::DiagnosticSeverity::ERROR),
2043 message: "the message".to_string(),
2044 ..Default::default()
2045 }],
2046 });
2047
2048 cx.executor().run_until_parked();
2049 buffer.update(cx, |buffer, _| {
2050 assert_eq!(
2051 buffer
2052 .snapshot()
2053 .diagnostics_in_range::<_, usize>(0..1, false)
2054 .map(|entry| entry.diagnostic.message.clone())
2055 .collect::<Vec<_>>(),
2056 ["the message".to_string()]
2057 );
2058 });
2059 project.update(cx, |project, cx| {
2060 assert_eq!(
2061 project.diagnostic_summary(false, cx),
2062 DiagnosticSummary {
2063 error_count: 1,
2064 warning_count: 0,
2065 }
2066 );
2067 });
2068
2069 project.update(cx, |project, cx| {
2070 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2071 });
2072
2073 // The diagnostics are cleared.
2074 cx.executor().run_until_parked();
2075 buffer.update(cx, |buffer, _| {
2076 assert_eq!(
2077 buffer
2078 .snapshot()
2079 .diagnostics_in_range::<_, usize>(0..1, false)
2080 .map(|entry| entry.diagnostic.message.clone())
2081 .collect::<Vec<_>>(),
2082 Vec::<String>::new(),
2083 );
2084 });
2085 project.update(cx, |project, cx| {
2086 assert_eq!(
2087 project.diagnostic_summary(false, cx),
2088 DiagnosticSummary {
2089 error_count: 0,
2090 warning_count: 0,
2091 }
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2102
2103 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2104 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2105
2106 language_registry.add(rust_lang());
2107 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2108
2109 let (buffer, _handle) = project
2110 .update(cx, |project, cx| {
2111 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2112 })
2113 .await
2114 .unwrap();
2115
2116 // Before restarting the server, report diagnostics with an unknown buffer version.
2117 let fake_server = fake_servers.next().await.unwrap();
2118 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2119 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2120 version: Some(10000),
2121 diagnostics: Vec::new(),
2122 });
2123 cx.executor().run_until_parked();
2124 project.update(cx, |project, cx| {
2125 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2126 });
2127
2128 let mut fake_server = fake_servers.next().await.unwrap();
2129 let notification = fake_server
2130 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2131 .await
2132 .text_document;
2133 assert_eq!(notification.version, 0);
2134}
2135
2136#[gpui::test]
2137async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2138 init_test(cx);
2139
2140 let progress_token = "the-progress-token";
2141
2142 let fs = FakeFs::new(cx.executor());
2143 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2144
2145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2146
2147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2148 language_registry.add(rust_lang());
2149 let mut fake_servers = language_registry.register_fake_lsp(
2150 "Rust",
2151 FakeLspAdapter {
2152 name: "the-language-server",
2153 disk_based_diagnostics_sources: vec!["disk".into()],
2154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2155 ..Default::default()
2156 },
2157 );
2158
2159 let (buffer, _handle) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Simulate diagnostics starting to update.
2167 let mut fake_server = fake_servers.next().await.unwrap();
2168 fake_server
2169 .start_progress_with(
2170 "another-token",
2171 lsp::WorkDoneProgressBegin {
2172 cancellable: Some(false),
2173 ..Default::default()
2174 },
2175 )
2176 .await;
2177 fake_server
2178 .start_progress_with(
2179 progress_token,
2180 lsp::WorkDoneProgressBegin {
2181 cancellable: Some(true),
2182 ..Default::default()
2183 },
2184 )
2185 .await;
2186 cx.executor().run_until_parked();
2187
2188 project.update(cx, |project, cx| {
2189 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2190 });
2191
2192 let cancel_notification = fake_server
2193 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2194 .await;
2195 assert_eq!(
2196 cancel_notification.token,
2197 NumberOrString::String(progress_token.into())
2198 );
2199}
2200
2201#[gpui::test]
2202async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2203 init_test(cx);
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2207 .await;
2208
2209 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2211
2212 let mut fake_rust_servers = language_registry.register_fake_lsp(
2213 "Rust",
2214 FakeLspAdapter {
2215 name: "rust-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 let mut fake_js_servers = language_registry.register_fake_lsp(
2220 "JavaScript",
2221 FakeLspAdapter {
2222 name: "js-lsp",
2223 ..Default::default()
2224 },
2225 );
2226 language_registry.add(rust_lang());
2227 language_registry.add(js_lang());
2228
2229 let _rs_buffer = project
2230 .update(cx, |project, cx| {
2231 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2232 })
2233 .await
2234 .unwrap();
2235 let _js_buffer = project
2236 .update(cx, |project, cx| {
2237 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2238 })
2239 .await
2240 .unwrap();
2241
2242 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2243 assert_eq!(
2244 fake_rust_server_1
2245 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2246 .await
2247 .text_document
2248 .uri
2249 .as_str(),
2250 uri!("file:///dir/a.rs")
2251 );
2252
2253 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2254 assert_eq!(
2255 fake_js_server
2256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2257 .await
2258 .text_document
2259 .uri
2260 .as_str(),
2261 uri!("file:///dir/b.js")
2262 );
2263
2264 // Disable Rust language server, ensuring only that server gets stopped.
2265 cx.update(|cx| {
2266 SettingsStore::update_global(cx, |settings, cx| {
2267 settings.update_user_settings(cx, |settings| {
2268 settings.languages_mut().insert(
2269 "Rust".into(),
2270 LanguageSettingsContent {
2271 enable_language_server: Some(false),
2272 ..Default::default()
2273 },
2274 );
2275 });
2276 })
2277 });
2278 fake_rust_server_1
2279 .receive_notification::<lsp::notification::Exit>()
2280 .await;
2281
2282 // Enable Rust and disable JavaScript language servers, ensuring that the
2283 // former gets started again and that the latter stops.
2284 cx.update(|cx| {
2285 SettingsStore::update_global(cx, |settings, cx| {
2286 settings.update_user_settings(cx, |settings| {
2287 settings.languages_mut().insert(
2288 "Rust".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(true),
2291 ..Default::default()
2292 },
2293 );
2294 settings.languages_mut().insert(
2295 "JavaScript".into(),
2296 LanguageSettingsContent {
2297 enable_language_server: Some(false),
2298 ..Default::default()
2299 },
2300 );
2301 });
2302 })
2303 });
2304 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2305 assert_eq!(
2306 fake_rust_server_2
2307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2308 .await
2309 .text_document
2310 .uri
2311 .as_str(),
2312 uri!("file:///dir/a.rs")
2313 );
2314 fake_js_server
2315 .receive_notification::<lsp::notification::Exit>()
2316 .await;
2317}
2318
2319#[gpui::test(iterations = 3)]
2320async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2321 init_test(cx);
2322
2323 let text = "
2324 fn a() { A }
2325 fn b() { BB }
2326 fn c() { CCC }
2327 "
2328 .unindent();
2329
2330 let fs = FakeFs::new(cx.executor());
2331 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2332
2333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2334 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2335
2336 language_registry.add(rust_lang());
2337 let mut fake_servers = language_registry.register_fake_lsp(
2338 "Rust",
2339 FakeLspAdapter {
2340 disk_based_diagnostics_sources: vec!["disk".into()],
2341 ..Default::default()
2342 },
2343 );
2344
2345 let buffer = project
2346 .update(cx, |project, cx| {
2347 project.open_local_buffer(path!("/dir/a.rs"), cx)
2348 })
2349 .await
2350 .unwrap();
2351
2352 let _handle = project.update(cx, |project, cx| {
2353 project.register_buffer_with_language_servers(&buffer, cx)
2354 });
2355
2356 let mut fake_server = fake_servers.next().await.unwrap();
2357 let open_notification = fake_server
2358 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2359 .await;
2360
2361 // Edit the buffer, moving the content down
2362 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2363 let change_notification_1 = fake_server
2364 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2365 .await;
2366 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2367
2368 // Report some diagnostics for the initial version of the buffer
2369 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2370 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2371 version: Some(open_notification.text_document.version),
2372 diagnostics: vec![
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'A'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 message: "undefined variable 'BB'".to_string(),
2384 source: Some("disk".to_string()),
2385 ..Default::default()
2386 },
2387 lsp::Diagnostic {
2388 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2389 severity: Some(DiagnosticSeverity::ERROR),
2390 source: Some("disk".to_string()),
2391 message: "undefined variable 'CCC'".to_string(),
2392 ..Default::default()
2393 },
2394 ],
2395 });
2396
2397 // The diagnostics have moved down since they were created.
2398 cx.executor().run_until_parked();
2399 buffer.update(cx, |buffer, _| {
2400 assert_eq!(
2401 buffer
2402 .snapshot()
2403 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2404 .collect::<Vec<_>>(),
2405 &[
2406 DiagnosticEntry {
2407 range: Point::new(3, 9)..Point::new(3, 11),
2408 diagnostic: Diagnostic {
2409 source: Some("disk".into()),
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "undefined variable 'BB'".to_string(),
2412 is_disk_based: true,
2413 group_id: 1,
2414 is_primary: true,
2415 source_kind: DiagnosticSourceKind::Pushed,
2416 ..Diagnostic::default()
2417 },
2418 },
2419 DiagnosticEntry {
2420 range: Point::new(4, 9)..Point::new(4, 12),
2421 diagnostic: Diagnostic {
2422 source: Some("disk".into()),
2423 severity: DiagnosticSeverity::ERROR,
2424 message: "undefined variable 'CCC'".to_string(),
2425 is_disk_based: true,
2426 group_id: 2,
2427 is_primary: true,
2428 source_kind: DiagnosticSourceKind::Pushed,
2429 ..Diagnostic::default()
2430 }
2431 }
2432 ]
2433 );
2434 assert_eq!(
2435 chunks_with_diagnostics(buffer, 0..buffer.len()),
2436 [
2437 ("\n\nfn a() { ".to_string(), None),
2438 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2439 (" }\nfn b() { ".to_string(), None),
2440 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2441 (" }\nfn c() { ".to_string(), None),
2442 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\n".to_string(), None),
2444 ]
2445 );
2446 assert_eq!(
2447 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2448 [
2449 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2450 (" }\nfn c() { ".to_string(), None),
2451 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2452 ]
2453 );
2454 });
2455
2456 // Ensure overlapping diagnostics are highlighted correctly.
2457 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2458 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2459 version: Some(open_notification.text_document.version),
2460 diagnostics: vec![
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2463 severity: Some(DiagnosticSeverity::ERROR),
2464 message: "undefined variable 'A'".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 lsp::Diagnostic {
2469 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2470 severity: Some(DiagnosticSeverity::WARNING),
2471 message: "unreachable statement".to_string(),
2472 source: Some("disk".to_string()),
2473 ..Default::default()
2474 },
2475 ],
2476 });
2477
2478 cx.executor().run_until_parked();
2479 buffer.update(cx, |buffer, _| {
2480 assert_eq!(
2481 buffer
2482 .snapshot()
2483 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2484 .collect::<Vec<_>>(),
2485 &[
2486 DiagnosticEntry {
2487 range: Point::new(2, 9)..Point::new(2, 12),
2488 diagnostic: Diagnostic {
2489 source: Some("disk".into()),
2490 severity: DiagnosticSeverity::WARNING,
2491 message: "unreachable statement".to_string(),
2492 is_disk_based: true,
2493 group_id: 4,
2494 is_primary: true,
2495 source_kind: DiagnosticSourceKind::Pushed,
2496 ..Diagnostic::default()
2497 }
2498 },
2499 DiagnosticEntry {
2500 range: Point::new(2, 9)..Point::new(2, 10),
2501 diagnostic: Diagnostic {
2502 source: Some("disk".into()),
2503 severity: DiagnosticSeverity::ERROR,
2504 message: "undefined variable 'A'".to_string(),
2505 is_disk_based: true,
2506 group_id: 3,
2507 is_primary: true,
2508 source_kind: DiagnosticSourceKind::Pushed,
2509 ..Diagnostic::default()
2510 },
2511 }
2512 ]
2513 );
2514 assert_eq!(
2515 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2516 [
2517 ("fn a() { ".to_string(), None),
2518 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 assert_eq!(
2524 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2525 [
2526 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2527 ("\n".to_string(), None),
2528 ]
2529 );
2530 });
2531
2532 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2533 // changes since the last save.
2534 buffer.update(cx, |buffer, cx| {
2535 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2536 buffer.edit(
2537 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2538 None,
2539 cx,
2540 );
2541 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2542 });
2543 let change_notification_2 = fake_server
2544 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2545 .await;
2546 assert!(
2547 change_notification_2.text_document.version > change_notification_1.text_document.version
2548 );
2549
2550 // Handle out-of-order diagnostics
2551 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2552 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2553 version: Some(change_notification_2.text_document.version),
2554 diagnostics: vec![
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2557 severity: Some(DiagnosticSeverity::ERROR),
2558 message: "undefined variable 'BB'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 lsp::Diagnostic {
2563 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2564 severity: Some(DiagnosticSeverity::WARNING),
2565 message: "undefined variable 'A'".to_string(),
2566 source: Some("disk".to_string()),
2567 ..Default::default()
2568 },
2569 ],
2570 });
2571
2572 cx.executor().run_until_parked();
2573 buffer.update(cx, |buffer, _| {
2574 assert_eq!(
2575 buffer
2576 .snapshot()
2577 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2578 .collect::<Vec<_>>(),
2579 &[
2580 DiagnosticEntry {
2581 range: Point::new(2, 21)..Point::new(2, 22),
2582 diagnostic: Diagnostic {
2583 source: Some("disk".into()),
2584 severity: DiagnosticSeverity::WARNING,
2585 message: "undefined variable 'A'".to_string(),
2586 is_disk_based: true,
2587 group_id: 6,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 }
2592 },
2593 DiagnosticEntry {
2594 range: Point::new(3, 9)..Point::new(3, 14),
2595 diagnostic: Diagnostic {
2596 source: Some("disk".into()),
2597 severity: DiagnosticSeverity::ERROR,
2598 message: "undefined variable 'BB'".to_string(),
2599 is_disk_based: true,
2600 group_id: 5,
2601 is_primary: true,
2602 source_kind: DiagnosticSourceKind::Pushed,
2603 ..Diagnostic::default()
2604 },
2605 }
2606 ]
2607 );
2608 });
2609}
2610
2611#[gpui::test]
2612async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2613 init_test(cx);
2614
2615 let text = concat!(
2616 "let one = ;\n", //
2617 "let two = \n",
2618 "let three = 3;\n",
2619 );
2620
2621 let fs = FakeFs::new(cx.executor());
2622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2623
2624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2625 let buffer = project
2626 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2627 .await
2628 .unwrap();
2629
2630 project.update(cx, |project, cx| {
2631 project.lsp_store.update(cx, |lsp_store, cx| {
2632 lsp_store
2633 .update_diagnostic_entries(
2634 LanguageServerId(0),
2635 PathBuf::from("/dir/a.rs"),
2636 None,
2637 None,
2638 vec![
2639 DiagnosticEntry {
2640 range: Unclipped(PointUtf16::new(0, 10))
2641 ..Unclipped(PointUtf16::new(0, 10)),
2642 diagnostic: Diagnostic {
2643 severity: DiagnosticSeverity::ERROR,
2644 message: "syntax error 1".to_string(),
2645 source_kind: DiagnosticSourceKind::Pushed,
2646 ..Diagnostic::default()
2647 },
2648 },
2649 DiagnosticEntry {
2650 range: Unclipped(PointUtf16::new(1, 10))
2651 ..Unclipped(PointUtf16::new(1, 10)),
2652 diagnostic: Diagnostic {
2653 severity: DiagnosticSeverity::ERROR,
2654 message: "syntax error 2".to_string(),
2655 source_kind: DiagnosticSourceKind::Pushed,
2656 ..Diagnostic::default()
2657 },
2658 },
2659 ],
2660 cx,
2661 )
2662 .unwrap();
2663 })
2664 });
2665
2666 // An empty range is extended forward to include the following character.
2667 // At the end of a line, an empty range is extended backward to include
2668 // the preceding character.
2669 buffer.update(cx, |buffer, _| {
2670 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2671 assert_eq!(
2672 chunks
2673 .iter()
2674 .map(|(s, d)| (s.as_str(), *d))
2675 .collect::<Vec<_>>(),
2676 &[
2677 ("let one = ", None),
2678 (";", Some(DiagnosticSeverity::ERROR)),
2679 ("\nlet two =", None),
2680 (" ", Some(DiagnosticSeverity::ERROR)),
2681 ("\nlet three = 3;\n", None)
2682 ]
2683 );
2684 });
2685}
2686
2687#[gpui::test]
2688async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2689 init_test(cx);
2690
2691 let fs = FakeFs::new(cx.executor());
2692 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2693 .await;
2694
2695 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2696 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2697
2698 lsp_store.update(cx, |lsp_store, cx| {
2699 lsp_store
2700 .update_diagnostic_entries(
2701 LanguageServerId(0),
2702 Path::new("/dir/a.rs").to_owned(),
2703 None,
2704 None,
2705 vec![DiagnosticEntry {
2706 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2707 diagnostic: Diagnostic {
2708 severity: DiagnosticSeverity::ERROR,
2709 is_primary: true,
2710 message: "syntax error a1".to_string(),
2711 source_kind: DiagnosticSourceKind::Pushed,
2712 ..Diagnostic::default()
2713 },
2714 }],
2715 cx,
2716 )
2717 .unwrap();
2718 lsp_store
2719 .update_diagnostic_entries(
2720 LanguageServerId(1),
2721 Path::new("/dir/a.rs").to_owned(),
2722 None,
2723 None,
2724 vec![DiagnosticEntry {
2725 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2726 diagnostic: Diagnostic {
2727 severity: DiagnosticSeverity::ERROR,
2728 is_primary: true,
2729 message: "syntax error b1".to_string(),
2730 source_kind: DiagnosticSourceKind::Pushed,
2731 ..Diagnostic::default()
2732 },
2733 }],
2734 cx,
2735 )
2736 .unwrap();
2737
2738 assert_eq!(
2739 lsp_store.diagnostic_summary(false, cx),
2740 DiagnosticSummary {
2741 error_count: 2,
2742 warning_count: 0,
2743 }
2744 );
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2750 init_test(cx);
2751
2752 let text = "
2753 fn a() {
2754 f1();
2755 }
2756 fn b() {
2757 f2();
2758 }
2759 fn c() {
2760 f3();
2761 }
2762 "
2763 .unindent();
2764
2765 let fs = FakeFs::new(cx.executor());
2766 fs.insert_tree(
2767 path!("/dir"),
2768 json!({
2769 "a.rs": text.clone(),
2770 }),
2771 )
2772 .await;
2773
2774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2775 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2776
2777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2778 language_registry.add(rust_lang());
2779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2780
2781 let (buffer, _handle) = project
2782 .update(cx, |project, cx| {
2783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2784 })
2785 .await
2786 .unwrap();
2787
2788 let mut fake_server = fake_servers.next().await.unwrap();
2789 let lsp_document_version = fake_server
2790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2791 .await
2792 .text_document
2793 .version;
2794
2795 // Simulate editing the buffer after the language server computes some edits.
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit(
2798 [(
2799 Point::new(0, 0)..Point::new(0, 0),
2800 "// above first function\n",
2801 )],
2802 None,
2803 cx,
2804 );
2805 buffer.edit(
2806 [(
2807 Point::new(2, 0)..Point::new(2, 0),
2808 " // inside first function\n",
2809 )],
2810 None,
2811 cx,
2812 );
2813 buffer.edit(
2814 [(
2815 Point::new(6, 4)..Point::new(6, 4),
2816 "// inside second function ",
2817 )],
2818 None,
2819 cx,
2820 );
2821
2822 assert_eq!(
2823 buffer.text(),
2824 "
2825 // above first function
2826 fn a() {
2827 // inside first function
2828 f1();
2829 }
2830 fn b() {
2831 // inside second function f2();
2832 }
2833 fn c() {
2834 f3();
2835 }
2836 "
2837 .unindent()
2838 );
2839 });
2840
2841 let edits = lsp_store
2842 .update(cx, |lsp_store, cx| {
2843 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2844 &buffer,
2845 vec![
2846 // replace body of first function
2847 lsp::TextEdit {
2848 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2849 new_text: "
2850 fn a() {
2851 f10();
2852 }
2853 "
2854 .unindent(),
2855 },
2856 // edit inside second function
2857 lsp::TextEdit {
2858 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2859 new_text: "00".into(),
2860 },
2861 // edit inside third function via two distinct edits
2862 lsp::TextEdit {
2863 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2864 new_text: "4000".into(),
2865 },
2866 lsp::TextEdit {
2867 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2868 new_text: "".into(),
2869 },
2870 ],
2871 LanguageServerId(0),
2872 Some(lsp_document_version),
2873 cx,
2874 )
2875 })
2876 .await
2877 .unwrap();
2878
2879 buffer.update(cx, |buffer, cx| {
2880 for (range, new_text) in edits {
2881 buffer.edit([(range, new_text)], None, cx);
2882 }
2883 assert_eq!(
2884 buffer.text(),
2885 "
2886 // above first function
2887 fn a() {
2888 // inside first function
2889 f10();
2890 }
2891 fn b() {
2892 // inside second function f200();
2893 }
2894 fn c() {
2895 f4000();
2896 }
2897 "
2898 .unindent()
2899 );
2900 });
2901}
2902
2903#[gpui::test]
2904async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2905 init_test(cx);
2906
2907 let text = "
2908 use a::b;
2909 use a::c;
2910
2911 fn f() {
2912 b();
2913 c();
2914 }
2915 "
2916 .unindent();
2917
2918 let fs = FakeFs::new(cx.executor());
2919 fs.insert_tree(
2920 path!("/dir"),
2921 json!({
2922 "a.rs": text.clone(),
2923 }),
2924 )
2925 .await;
2926
2927 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2928 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2929 let buffer = project
2930 .update(cx, |project, cx| {
2931 project.open_local_buffer(path!("/dir/a.rs"), cx)
2932 })
2933 .await
2934 .unwrap();
2935
2936 // Simulate the language server sending us a small edit in the form of a very large diff.
2937 // Rust-analyzer does this when performing a merge-imports code action.
2938 let edits = lsp_store
2939 .update(cx, |lsp_store, cx| {
2940 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2941 &buffer,
2942 [
2943 // Replace the first use statement without editing the semicolon.
2944 lsp::TextEdit {
2945 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2946 new_text: "a::{b, c}".into(),
2947 },
2948 // Reinsert the remainder of the file between the semicolon and the final
2949 // newline of the file.
2950 lsp::TextEdit {
2951 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2952 new_text: "\n\n".into(),
2953 },
2954 lsp::TextEdit {
2955 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2956 new_text: "
2957 fn f() {
2958 b();
2959 c();
2960 }"
2961 .unindent(),
2962 },
2963 // Delete everything after the first newline of the file.
2964 lsp::TextEdit {
2965 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2966 new_text: "".into(),
2967 },
2968 ],
2969 LanguageServerId(0),
2970 None,
2971 cx,
2972 )
2973 })
2974 .await
2975 .unwrap();
2976
2977 buffer.update(cx, |buffer, cx| {
2978 let edits = edits
2979 .into_iter()
2980 .map(|(range, text)| {
2981 (
2982 range.start.to_point(buffer)..range.end.to_point(buffer),
2983 text,
2984 )
2985 })
2986 .collect::<Vec<_>>();
2987
2988 assert_eq!(
2989 edits,
2990 [
2991 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2992 (Point::new(1, 0)..Point::new(2, 0), "".into())
2993 ]
2994 );
2995
2996 for (range, new_text) in edits {
2997 buffer.edit([(range, new_text)], None, cx);
2998 }
2999 assert_eq!(
3000 buffer.text(),
3001 "
3002 use a::{b, c};
3003
3004 fn f() {
3005 b();
3006 c();
3007 }
3008 "
3009 .unindent()
3010 );
3011 });
3012}
3013
3014#[gpui::test]
3015async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3016 cx: &mut gpui::TestAppContext,
3017) {
3018 init_test(cx);
3019
3020 let text = "Path()";
3021
3022 let fs = FakeFs::new(cx.executor());
3023 fs.insert_tree(
3024 path!("/dir"),
3025 json!({
3026 "a.rs": text
3027 }),
3028 )
3029 .await;
3030
3031 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3032 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3033 let buffer = project
3034 .update(cx, |project, cx| {
3035 project.open_local_buffer(path!("/dir/a.rs"), cx)
3036 })
3037 .await
3038 .unwrap();
3039
3040 // Simulate the language server sending us a pair of edits at the same location,
3041 // with an insertion following a replacement (which violates the LSP spec).
3042 let edits = lsp_store
3043 .update(cx, |lsp_store, cx| {
3044 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3045 &buffer,
3046 [
3047 lsp::TextEdit {
3048 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3049 new_text: "Path".into(),
3050 },
3051 lsp::TextEdit {
3052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3053 new_text: "from path import Path\n\n\n".into(),
3054 },
3055 ],
3056 LanguageServerId(0),
3057 None,
3058 cx,
3059 )
3060 })
3061 .await
3062 .unwrap();
3063
3064 buffer.update(cx, |buffer, cx| {
3065 buffer.edit(edits, None, cx);
3066 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3067 });
3068}
3069
3070#[gpui::test]
3071async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let text = "
3075 use a::b;
3076 use a::c;
3077
3078 fn f() {
3079 b();
3080 c();
3081 }
3082 "
3083 .unindent();
3084
3085 let fs = FakeFs::new(cx.executor());
3086 fs.insert_tree(
3087 path!("/dir"),
3088 json!({
3089 "a.rs": text.clone(),
3090 }),
3091 )
3092 .await;
3093
3094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3095 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3096 let buffer = project
3097 .update(cx, |project, cx| {
3098 project.open_local_buffer(path!("/dir/a.rs"), cx)
3099 })
3100 .await
3101 .unwrap();
3102
3103 // Simulate the language server sending us edits in a non-ordered fashion,
3104 // with ranges sometimes being inverted or pointing to invalid locations.
3105 let edits = lsp_store
3106 .update(cx, |lsp_store, cx| {
3107 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3108 &buffer,
3109 [
3110 lsp::TextEdit {
3111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3112 new_text: "\n\n".into(),
3113 },
3114 lsp::TextEdit {
3115 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3116 new_text: "a::{b, c}".into(),
3117 },
3118 lsp::TextEdit {
3119 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3120 new_text: "".into(),
3121 },
3122 lsp::TextEdit {
3123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3124 new_text: "
3125 fn f() {
3126 b();
3127 c();
3128 }"
3129 .unindent(),
3130 },
3131 ],
3132 LanguageServerId(0),
3133 None,
3134 cx,
3135 )
3136 })
3137 .await
3138 .unwrap();
3139
3140 buffer.update(cx, |buffer, cx| {
3141 let edits = edits
3142 .into_iter()
3143 .map(|(range, text)| {
3144 (
3145 range.start.to_point(buffer)..range.end.to_point(buffer),
3146 text,
3147 )
3148 })
3149 .collect::<Vec<_>>();
3150
3151 assert_eq!(
3152 edits,
3153 [
3154 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3155 (Point::new(1, 0)..Point::new(2, 0), "".into())
3156 ]
3157 );
3158
3159 for (range, new_text) in edits {
3160 buffer.edit([(range, new_text)], None, cx);
3161 }
3162 assert_eq!(
3163 buffer.text(),
3164 "
3165 use a::{b, c};
3166
3167 fn f() {
3168 b();
3169 c();
3170 }
3171 "
3172 .unindent()
3173 );
3174 });
3175}
3176
3177fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3178 buffer: &Buffer,
3179 range: Range<T>,
3180) -> Vec<(String, Option<DiagnosticSeverity>)> {
3181 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3182 for chunk in buffer.snapshot().chunks(range, true) {
3183 if chunks
3184 .last()
3185 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3186 {
3187 chunks.last_mut().unwrap().0.push_str(chunk.text);
3188 } else {
3189 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3190 }
3191 }
3192 chunks
3193}
3194
3195#[gpui::test(iterations = 10)]
3196async fn test_definition(cx: &mut gpui::TestAppContext) {
3197 init_test(cx);
3198
3199 let fs = FakeFs::new(cx.executor());
3200 fs.insert_tree(
3201 path!("/dir"),
3202 json!({
3203 "a.rs": "const fn a() { A }",
3204 "b.rs": "const y: i32 = crate::a()",
3205 }),
3206 )
3207 .await;
3208
3209 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3210
3211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3212 language_registry.add(rust_lang());
3213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3214
3215 let (buffer, _handle) = project
3216 .update(cx, |project, cx| {
3217 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3218 })
3219 .await
3220 .unwrap();
3221
3222 let fake_server = fake_servers.next().await.unwrap();
3223 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3224 let params = params.text_document_position_params;
3225 assert_eq!(
3226 params.text_document.uri.to_file_path().unwrap(),
3227 Path::new(path!("/dir/b.rs")),
3228 );
3229 assert_eq!(params.position, lsp::Position::new(0, 22));
3230
3231 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3232 lsp::Location::new(
3233 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3235 ),
3236 )))
3237 });
3238 let mut definitions = project
3239 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3240 .await
3241 .unwrap()
3242 .unwrap();
3243
3244 // Assert no new language server started
3245 cx.executor().run_until_parked();
3246 assert!(fake_servers.try_next().is_err());
3247
3248 assert_eq!(definitions.len(), 1);
3249 let definition = definitions.pop().unwrap();
3250 cx.update(|cx| {
3251 let target_buffer = definition.target.buffer.read(cx);
3252 assert_eq!(
3253 target_buffer
3254 .file()
3255 .unwrap()
3256 .as_local()
3257 .unwrap()
3258 .abs_path(cx),
3259 Path::new(path!("/dir/a.rs")),
3260 );
3261 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3262 assert_eq!(
3263 list_worktrees(&project, cx),
3264 [
3265 (path!("/dir/a.rs").as_ref(), false),
3266 (path!("/dir/b.rs").as_ref(), true)
3267 ],
3268 );
3269
3270 drop(definition);
3271 });
3272 cx.update(|cx| {
3273 assert_eq!(
3274 list_worktrees(&project, cx),
3275 [(path!("/dir/b.rs").as_ref(), true)]
3276 );
3277 });
3278
3279 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3280 project
3281 .read(cx)
3282 .worktrees(cx)
3283 .map(|worktree| {
3284 let worktree = worktree.read(cx);
3285 (
3286 worktree.as_local().unwrap().abs_path().as_ref(),
3287 worktree.is_visible(),
3288 )
3289 })
3290 .collect::<Vec<_>>()
3291 }
3292}
3293
3294#[gpui::test]
3295async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3296 init_test(cx);
3297
3298 let fs = FakeFs::new(cx.executor());
3299 fs.insert_tree(
3300 path!("/dir"),
3301 json!({
3302 "a.ts": "",
3303 }),
3304 )
3305 .await;
3306
3307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3308
3309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3310 language_registry.add(typescript_lang());
3311 let mut fake_language_servers = language_registry.register_fake_lsp(
3312 "TypeScript",
3313 FakeLspAdapter {
3314 capabilities: lsp::ServerCapabilities {
3315 completion_provider: Some(lsp::CompletionOptions {
3316 trigger_characters: Some(vec![".".to_string()]),
3317 ..Default::default()
3318 }),
3319 ..Default::default()
3320 },
3321 ..Default::default()
3322 },
3323 );
3324
3325 let (buffer, _handle) = project
3326 .update(cx, |p, cx| {
3327 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3328 })
3329 .await
3330 .unwrap();
3331
3332 let fake_server = fake_language_servers.next().await.unwrap();
3333
3334 // When text_edit exists, it takes precedence over insert_text and label
3335 let text = "let a = obj.fqn";
3336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3337 let completions = project.update(cx, |project, cx| {
3338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3339 });
3340
3341 fake_server
3342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3343 Ok(Some(lsp::CompletionResponse::Array(vec![
3344 lsp::CompletionItem {
3345 label: "labelText".into(),
3346 insert_text: Some("insertText".into()),
3347 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3348 range: lsp::Range::new(
3349 lsp::Position::new(0, text.len() as u32 - 3),
3350 lsp::Position::new(0, text.len() as u32),
3351 ),
3352 new_text: "textEditText".into(),
3353 })),
3354 ..Default::default()
3355 },
3356 ])))
3357 })
3358 .next()
3359 .await;
3360
3361 let completions = completions
3362 .await
3363 .unwrap()
3364 .into_iter()
3365 .flat_map(|response| response.completions)
3366 .collect::<Vec<_>>();
3367 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3368
3369 assert_eq!(completions.len(), 1);
3370 assert_eq!(completions[0].new_text, "textEditText");
3371 assert_eq!(
3372 completions[0].replace_range.to_offset(&snapshot),
3373 text.len() - 3..text.len()
3374 );
3375}
3376
3377#[gpui::test]
3378async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3379 init_test(cx);
3380
3381 let fs = FakeFs::new(cx.executor());
3382 fs.insert_tree(
3383 path!("/dir"),
3384 json!({
3385 "a.ts": "",
3386 }),
3387 )
3388 .await;
3389
3390 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3391
3392 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3393 language_registry.add(typescript_lang());
3394 let mut fake_language_servers = language_registry.register_fake_lsp(
3395 "TypeScript",
3396 FakeLspAdapter {
3397 capabilities: lsp::ServerCapabilities {
3398 completion_provider: Some(lsp::CompletionOptions {
3399 trigger_characters: Some(vec![".".to_string()]),
3400 ..Default::default()
3401 }),
3402 ..Default::default()
3403 },
3404 ..Default::default()
3405 },
3406 );
3407
3408 let (buffer, _handle) = project
3409 .update(cx, |p, cx| {
3410 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3411 })
3412 .await
3413 .unwrap();
3414
3415 let fake_server = fake_language_servers.next().await.unwrap();
3416 let text = "let a = obj.fqn";
3417
3418 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3419 {
3420 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3421 let completions = project.update(cx, |project, cx| {
3422 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3423 });
3424
3425 fake_server
3426 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3427 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3428 is_incomplete: false,
3429 item_defaults: Some(lsp::CompletionListItemDefaults {
3430 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3431 lsp::Range::new(
3432 lsp::Position::new(0, text.len() as u32 - 3),
3433 lsp::Position::new(0, text.len() as u32),
3434 ),
3435 )),
3436 ..Default::default()
3437 }),
3438 items: vec![lsp::CompletionItem {
3439 label: "labelText".into(),
3440 text_edit_text: Some("textEditText".into()),
3441 text_edit: None,
3442 ..Default::default()
3443 }],
3444 })))
3445 })
3446 .next()
3447 .await;
3448
3449 let completions = completions
3450 .await
3451 .unwrap()
3452 .into_iter()
3453 .flat_map(|response| response.completions)
3454 .collect::<Vec<_>>();
3455 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3456
3457 assert_eq!(completions.len(), 1);
3458 assert_eq!(completions[0].new_text, "textEditText");
3459 assert_eq!(
3460 completions[0].replace_range.to_offset(&snapshot),
3461 text.len() - 3..text.len()
3462 );
3463 }
3464
3465 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3466 {
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3475 is_incomplete: false,
3476 item_defaults: Some(lsp::CompletionListItemDefaults {
3477 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3478 lsp::Range::new(
3479 lsp::Position::new(0, text.len() as u32 - 3),
3480 lsp::Position::new(0, text.len() as u32),
3481 ),
3482 )),
3483 ..Default::default()
3484 }),
3485 items: vec![lsp::CompletionItem {
3486 label: "labelText".into(),
3487 text_edit_text: None,
3488 insert_text: Some("irrelevant".into()),
3489 text_edit: None,
3490 ..Default::default()
3491 }],
3492 })))
3493 })
3494 .next()
3495 .await;
3496
3497 let completions = completions
3498 .await
3499 .unwrap()
3500 .into_iter()
3501 .flat_map(|response| response.completions)
3502 .collect::<Vec<_>>();
3503 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3504
3505 assert_eq!(completions.len(), 1);
3506 assert_eq!(completions[0].new_text, "labelText");
3507 assert_eq!(
3508 completions[0].replace_range.to_offset(&snapshot),
3509 text.len() - 3..text.len()
3510 );
3511 }
3512}
3513
3514#[gpui::test]
3515async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let fs = FakeFs::new(cx.executor());
3519 fs.insert_tree(
3520 path!("/dir"),
3521 json!({
3522 "a.ts": "",
3523 }),
3524 )
3525 .await;
3526
3527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3528
3529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3530 language_registry.add(typescript_lang());
3531 let mut fake_language_servers = language_registry.register_fake_lsp(
3532 "TypeScript",
3533 FakeLspAdapter {
3534 capabilities: lsp::ServerCapabilities {
3535 completion_provider: Some(lsp::CompletionOptions {
3536 trigger_characters: Some(vec![":".to_string()]),
3537 ..Default::default()
3538 }),
3539 ..Default::default()
3540 },
3541 ..Default::default()
3542 },
3543 );
3544
3545 let (buffer, _handle) = project
3546 .update(cx, |p, cx| {
3547 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3548 })
3549 .await
3550 .unwrap();
3551
3552 let fake_server = fake_language_servers.next().await.unwrap();
3553
3554 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3555 let text = "let a = b.fqn";
3556 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3557 let completions = project.update(cx, |project, cx| {
3558 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3559 });
3560
3561 fake_server
3562 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3563 Ok(Some(lsp::CompletionResponse::Array(vec![
3564 lsp::CompletionItem {
3565 label: "fullyQualifiedName?".into(),
3566 insert_text: Some("fullyQualifiedName".into()),
3567 ..Default::default()
3568 },
3569 ])))
3570 })
3571 .next()
3572 .await;
3573 let completions = completions
3574 .await
3575 .unwrap()
3576 .into_iter()
3577 .flat_map(|response| response.completions)
3578 .collect::<Vec<_>>();
3579 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3580 assert_eq!(completions.len(), 1);
3581 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3582 assert_eq!(
3583 completions[0].replace_range.to_offset(&snapshot),
3584 text.len() - 3..text.len()
3585 );
3586
3587 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3588 let text = "let a = \"atoms/cmp\"";
3589 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3590 let completions = project.update(cx, |project, cx| {
3591 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3592 });
3593
3594 fake_server
3595 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3596 Ok(Some(lsp::CompletionResponse::Array(vec![
3597 lsp::CompletionItem {
3598 label: "component".into(),
3599 ..Default::default()
3600 },
3601 ])))
3602 })
3603 .next()
3604 .await;
3605 let completions = completions
3606 .await
3607 .unwrap()
3608 .into_iter()
3609 .flat_map(|response| response.completions)
3610 .collect::<Vec<_>>();
3611 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3612 assert_eq!(completions.len(), 1);
3613 assert_eq!(completions[0].new_text, "component");
3614 assert_eq!(
3615 completions[0].replace_range.to_offset(&snapshot),
3616 text.len() - 4..text.len() - 1
3617 );
3618}
3619
3620#[gpui::test]
3621async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3622 init_test(cx);
3623
3624 let fs = FakeFs::new(cx.executor());
3625 fs.insert_tree(
3626 path!("/dir"),
3627 json!({
3628 "a.ts": "",
3629 }),
3630 )
3631 .await;
3632
3633 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3634
3635 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3636 language_registry.add(typescript_lang());
3637 let mut fake_language_servers = language_registry.register_fake_lsp(
3638 "TypeScript",
3639 FakeLspAdapter {
3640 capabilities: lsp::ServerCapabilities {
3641 completion_provider: Some(lsp::CompletionOptions {
3642 trigger_characters: Some(vec![":".to_string()]),
3643 ..Default::default()
3644 }),
3645 ..Default::default()
3646 },
3647 ..Default::default()
3648 },
3649 );
3650
3651 let (buffer, _handle) = project
3652 .update(cx, |p, cx| {
3653 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3654 })
3655 .await
3656 .unwrap();
3657
3658 let fake_server = fake_language_servers.next().await.unwrap();
3659
3660 let text = "let a = b.fqn";
3661 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3662 let completions = project.update(cx, |project, cx| {
3663 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3664 });
3665
3666 fake_server
3667 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3668 Ok(Some(lsp::CompletionResponse::Array(vec![
3669 lsp::CompletionItem {
3670 label: "fullyQualifiedName?".into(),
3671 insert_text: Some("fully\rQualified\r\nName".into()),
3672 ..Default::default()
3673 },
3674 ])))
3675 })
3676 .next()
3677 .await;
3678 let completions = completions
3679 .await
3680 .unwrap()
3681 .into_iter()
3682 .flat_map(|response| response.completions)
3683 .collect::<Vec<_>>();
3684 assert_eq!(completions.len(), 1);
3685 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3686}
3687
3688#[gpui::test(iterations = 10)]
3689async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3690 init_test(cx);
3691
3692 let fs = FakeFs::new(cx.executor());
3693 fs.insert_tree(
3694 path!("/dir"),
3695 json!({
3696 "a.ts": "a",
3697 }),
3698 )
3699 .await;
3700
3701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3702
3703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3704 language_registry.add(typescript_lang());
3705 let mut fake_language_servers = language_registry.register_fake_lsp(
3706 "TypeScript",
3707 FakeLspAdapter {
3708 capabilities: lsp::ServerCapabilities {
3709 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3710 lsp::CodeActionOptions {
3711 resolve_provider: Some(true),
3712 ..lsp::CodeActionOptions::default()
3713 },
3714 )),
3715 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3716 commands: vec!["_the/command".to_string()],
3717 ..lsp::ExecuteCommandOptions::default()
3718 }),
3719 ..lsp::ServerCapabilities::default()
3720 },
3721 ..FakeLspAdapter::default()
3722 },
3723 );
3724
3725 let (buffer, _handle) = project
3726 .update(cx, |p, cx| {
3727 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3728 })
3729 .await
3730 .unwrap();
3731
3732 let fake_server = fake_language_servers.next().await.unwrap();
3733
3734 // Language server returns code actions that contain commands, and not edits.
3735 let actions = project.update(cx, |project, cx| {
3736 project.code_actions(&buffer, 0..0, None, cx)
3737 });
3738 fake_server
3739 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3740 Ok(Some(vec![
3741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3742 title: "The code action".into(),
3743 data: Some(serde_json::json!({
3744 "command": "_the/command",
3745 })),
3746 ..lsp::CodeAction::default()
3747 }),
3748 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3749 title: "two".into(),
3750 ..lsp::CodeAction::default()
3751 }),
3752 ]))
3753 })
3754 .next()
3755 .await;
3756
3757 let action = actions.await.unwrap().unwrap()[0].clone();
3758 let apply = project.update(cx, |project, cx| {
3759 project.apply_code_action(buffer.clone(), action, true, cx)
3760 });
3761
3762 // Resolving the code action does not populate its edits. In absence of
3763 // edits, we must execute the given command.
3764 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3765 |mut action, _| async move {
3766 if action.data.is_some() {
3767 action.command = Some(lsp::Command {
3768 title: "The command".into(),
3769 command: "_the/command".into(),
3770 arguments: Some(vec![json!("the-argument")]),
3771 });
3772 }
3773 Ok(action)
3774 },
3775 );
3776
3777 // While executing the command, the language server sends the editor
3778 // a `workspaceEdit` request.
3779 fake_server
3780 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3781 let fake = fake_server.clone();
3782 move |params, _| {
3783 assert_eq!(params.command, "_the/command");
3784 let fake = fake.clone();
3785 async move {
3786 fake.server
3787 .request::<lsp::request::ApplyWorkspaceEdit>(
3788 lsp::ApplyWorkspaceEditParams {
3789 label: None,
3790 edit: lsp::WorkspaceEdit {
3791 changes: Some(
3792 [(
3793 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3794 vec![lsp::TextEdit {
3795 range: lsp::Range::new(
3796 lsp::Position::new(0, 0),
3797 lsp::Position::new(0, 0),
3798 ),
3799 new_text: "X".into(),
3800 }],
3801 )]
3802 .into_iter()
3803 .collect(),
3804 ),
3805 ..Default::default()
3806 },
3807 },
3808 )
3809 .await
3810 .into_response()
3811 .unwrap();
3812 Ok(Some(json!(null)))
3813 }
3814 }
3815 })
3816 .next()
3817 .await;
3818
3819 // Applying the code action returns a project transaction containing the edits
3820 // sent by the language server in its `workspaceEdit` request.
3821 let transaction = apply.await.unwrap();
3822 assert!(transaction.0.contains_key(&buffer));
3823 buffer.update(cx, |buffer, cx| {
3824 assert_eq!(buffer.text(), "Xa");
3825 buffer.undo(cx);
3826 assert_eq!(buffer.text(), "a");
3827 });
3828}
3829
3830#[gpui::test]
3831async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833 let fs = FakeFs::new(cx.background_executor.clone());
3834 let expected_contents = "content";
3835 fs.as_fake()
3836 .insert_tree(
3837 "/root",
3838 json!({
3839 "test.txt": expected_contents
3840 }),
3841 )
3842 .await;
3843
3844 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3845
3846 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3847 let worktree = project.worktrees(cx).next().unwrap();
3848 let entry_id = worktree
3849 .read(cx)
3850 .entry_for_path(rel_path("test.txt"))
3851 .unwrap()
3852 .id;
3853 (worktree, entry_id)
3854 });
3855 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3856 let _result = project
3857 .update(cx, |project, cx| {
3858 project.rename_entry(
3859 entry_id,
3860 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3861 cx,
3862 )
3863 })
3864 .await
3865 .unwrap();
3866 worktree.read_with(cx, |worktree, _| {
3867 assert!(
3868 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3869 "Old file should have been removed"
3870 );
3871 assert!(
3872 worktree
3873 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3874 .is_some(),
3875 "Whole directory hierarchy and the new file should have been created"
3876 );
3877 });
3878 assert_eq!(
3879 worktree
3880 .update(cx, |worktree, cx| {
3881 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3882 })
3883 .await
3884 .unwrap()
3885 .text,
3886 expected_contents,
3887 "Moved file's contents should be preserved"
3888 );
3889
3890 let entry_id = worktree.read_with(cx, |worktree, _| {
3891 worktree
3892 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3893 .unwrap()
3894 .id
3895 });
3896
3897 let _result = project
3898 .update(cx, |project, cx| {
3899 project.rename_entry(
3900 entry_id,
3901 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3902 cx,
3903 )
3904 })
3905 .await
3906 .unwrap();
3907 worktree.read_with(cx, |worktree, _| {
3908 assert!(
3909 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3910 "First file should not reappear"
3911 );
3912 assert!(
3913 worktree
3914 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3915 .is_none(),
3916 "Old file should have been removed"
3917 );
3918 assert!(
3919 worktree
3920 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3921 .is_some(),
3922 "No error should have occurred after moving into existing directory"
3923 );
3924 });
3925 assert_eq!(
3926 worktree
3927 .update(cx, |worktree, cx| {
3928 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3929 })
3930 .await
3931 .unwrap()
3932 .text,
3933 expected_contents,
3934 "Moved file's contents should be preserved"
3935 );
3936}
3937
3938#[gpui::test(iterations = 10)]
3939async fn test_save_file(cx: &mut gpui::TestAppContext) {
3940 init_test(cx);
3941
3942 let fs = FakeFs::new(cx.executor());
3943 fs.insert_tree(
3944 path!("/dir"),
3945 json!({
3946 "file1": "the old contents",
3947 }),
3948 )
3949 .await;
3950
3951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3952 let buffer = project
3953 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3954 .await
3955 .unwrap();
3956 buffer.update(cx, |buffer, cx| {
3957 assert_eq!(buffer.text(), "the old contents");
3958 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3959 });
3960
3961 project
3962 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3963 .await
3964 .unwrap();
3965
3966 let new_text = fs
3967 .load(Path::new(path!("/dir/file1")))
3968 .await
3969 .unwrap()
3970 .replace("\r\n", "\n");
3971 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3972}
3973
3974#[gpui::test(iterations = 10)]
3975async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3976 // Issue: #24349
3977 init_test(cx);
3978
3979 let fs = FakeFs::new(cx.executor());
3980 fs.insert_tree(path!("/dir"), json!({})).await;
3981
3982 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3983 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3984
3985 language_registry.add(rust_lang());
3986 let mut fake_rust_servers = language_registry.register_fake_lsp(
3987 "Rust",
3988 FakeLspAdapter {
3989 name: "the-rust-language-server",
3990 capabilities: lsp::ServerCapabilities {
3991 completion_provider: Some(lsp::CompletionOptions {
3992 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3993 ..Default::default()
3994 }),
3995 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3996 lsp::TextDocumentSyncOptions {
3997 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3998 ..Default::default()
3999 },
4000 )),
4001 ..Default::default()
4002 },
4003 ..Default::default()
4004 },
4005 );
4006
4007 let buffer = project
4008 .update(cx, |this, cx| this.create_buffer(false, cx))
4009 .unwrap()
4010 .await;
4011 project.update(cx, |this, cx| {
4012 this.register_buffer_with_language_servers(&buffer, cx);
4013 buffer.update(cx, |buffer, cx| {
4014 assert!(!this.has_language_servers_for(buffer, cx));
4015 })
4016 });
4017
4018 project
4019 .update(cx, |this, cx| {
4020 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4021 this.save_buffer_as(
4022 buffer.clone(),
4023 ProjectPath {
4024 worktree_id,
4025 path: rel_path("file.rs").into(),
4026 },
4027 cx,
4028 )
4029 })
4030 .await
4031 .unwrap();
4032 // A server is started up, and it is notified about Rust files.
4033 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4034 assert_eq!(
4035 fake_rust_server
4036 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4037 .await
4038 .text_document,
4039 lsp::TextDocumentItem {
4040 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4041 version: 0,
4042 text: "".to_string(),
4043 language_id: "rust".to_string(),
4044 }
4045 );
4046
4047 project.update(cx, |this, cx| {
4048 buffer.update(cx, |buffer, cx| {
4049 assert!(this.has_language_servers_for(buffer, cx));
4050 })
4051 });
4052}
4053
4054#[gpui::test(iterations = 30)]
4055async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 path!("/dir"),
4061 json!({
4062 "file1": "the original contents",
4063 }),
4064 )
4065 .await;
4066
4067 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4068 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4069 let buffer = project
4070 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4071 .await
4072 .unwrap();
4073
4074 // Simulate buffer diffs being slow, so that they don't complete before
4075 // the next file change occurs.
4076 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4077
4078 let encoding = Encoding::default();
4079
4080 // Change the buffer's file on disk, and then wait for the file change
4081 // to be detected by the worktree, so that the buffer starts reloading.
4082 fs.save(
4083 path!("/dir/file1").as_ref(),
4084 &Rope::from_str("the first contents", cx.background_executor()),
4085 Default::default(),
4086 encoding.clone(),
4087 )
4088 .await
4089 .unwrap();
4090 worktree.next_event(cx).await;
4091
4092 // Change the buffer's file again. Depending on the random seed, the
4093 // previous file change may still be in progress.
4094 fs.save(
4095 path!("/dir/file1").as_ref(),
4096 &Rope::from_str("the second contents", cx.background_executor()),
4097 Default::default(),
4098 encoding,
4099 )
4100 .await
4101 .unwrap();
4102 worktree.next_event(cx).await;
4103
4104 cx.executor().run_until_parked();
4105 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4106 buffer.read_with(cx, |buffer, _| {
4107 assert_eq!(buffer.text(), on_disk_text);
4108 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4109 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4110 });
4111}
4112
4113#[gpui::test(iterations = 30)]
4114async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4115 init_test(cx);
4116
4117 let fs = FakeFs::new(cx.executor());
4118 fs.insert_tree(
4119 path!("/dir"),
4120 json!({
4121 "file1": "the original contents",
4122 }),
4123 )
4124 .await;
4125
4126 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4127 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4128 let buffer = project
4129 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4130 .await
4131 .unwrap();
4132
4133 // Simulate buffer diffs being slow, so that they don't complete before
4134 // the next file change occurs.
4135 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4136
4137 let encoding = Encoding::new(UTF_8);
4138
4139 // Change the buffer's file on disk, and then wait for the file change
4140 // to be detected by the worktree, so that the buffer starts reloading.
4141 fs.save(
4142 path!("/dir/file1").as_ref(),
4143 &Rope::from_str("the first contents", cx.background_executor()),
4144 Default::default(),
4145 encoding,
4146 )
4147 .await
4148 .unwrap();
4149 worktree.next_event(cx).await;
4150
4151 cx.executor()
4152 .spawn(cx.executor().simulate_random_delay())
4153 .await;
4154
4155 // Perform a noop edit, causing the buffer's version to increase.
4156 buffer.update(cx, |buffer, cx| {
4157 buffer.edit([(0..0, " ")], None, cx);
4158 buffer.undo(cx);
4159 });
4160
4161 cx.executor().run_until_parked();
4162 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4163 buffer.read_with(cx, |buffer, _| {
4164 let buffer_text = buffer.text();
4165 if buffer_text == on_disk_text {
4166 assert!(
4167 !buffer.is_dirty() && !buffer.has_conflict(),
4168 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4169 );
4170 }
4171 // If the file change occurred while the buffer was processing the first
4172 // change, the buffer will be in a conflicting state.
4173 else {
4174 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4175 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4176 }
4177 });
4178}
4179
4180#[gpui::test]
4181async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4182 init_test(cx);
4183
4184 let fs = FakeFs::new(cx.executor());
4185 fs.insert_tree(
4186 path!("/dir"),
4187 json!({
4188 "file1": "the old contents",
4189 }),
4190 )
4191 .await;
4192
4193 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4194 let buffer = project
4195 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4196 .await
4197 .unwrap();
4198 buffer.update(cx, |buffer, cx| {
4199 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4200 });
4201
4202 project
4203 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4204 .await
4205 .unwrap();
4206
4207 let new_text = fs
4208 .load(Path::new(path!("/dir/file1")))
4209 .await
4210 .unwrap()
4211 .replace("\r\n", "\n");
4212 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4213}
4214
4215#[gpui::test]
4216async fn test_save_as(cx: &mut gpui::TestAppContext) {
4217 init_test(cx);
4218
4219 let fs = FakeFs::new(cx.executor());
4220 fs.insert_tree("/dir", json!({})).await;
4221
4222 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4223
4224 let languages = project.update(cx, |project, _| project.languages().clone());
4225 languages.add(rust_lang());
4226
4227 let buffer = project.update(cx, |project, cx| {
4228 project.create_local_buffer("", None, false, cx)
4229 });
4230 buffer.update(cx, |buffer, cx| {
4231 buffer.edit([(0..0, "abc")], None, cx);
4232 assert!(buffer.is_dirty());
4233 assert!(!buffer.has_conflict());
4234 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4235 });
4236 project
4237 .update(cx, |project, cx| {
4238 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4239 let path = ProjectPath {
4240 worktree_id,
4241 path: rel_path("file1.rs").into(),
4242 };
4243 project.save_buffer_as(buffer.clone(), path, cx)
4244 })
4245 .await
4246 .unwrap();
4247 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4248
4249 cx.executor().run_until_parked();
4250 buffer.update(cx, |buffer, cx| {
4251 assert_eq!(
4252 buffer.file().unwrap().full_path(cx),
4253 Path::new("dir/file1.rs")
4254 );
4255 assert!(!buffer.is_dirty());
4256 assert!(!buffer.has_conflict());
4257 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4258 });
4259
4260 let opened_buffer = project
4261 .update(cx, |project, cx| {
4262 project.open_local_buffer("/dir/file1.rs", cx)
4263 })
4264 .await
4265 .unwrap();
4266 assert_eq!(opened_buffer, buffer);
4267}
4268
4269#[gpui::test]
4270async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4271 init_test(cx);
4272
4273 let fs = FakeFs::new(cx.executor());
4274 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4275
4276 fs.insert_tree(
4277 path!("/dir"),
4278 json!({
4279 "data_a.txt": "data about a"
4280 }),
4281 )
4282 .await;
4283
4284 let buffer = project
4285 .update(cx, |project, cx| {
4286 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4287 })
4288 .await
4289 .unwrap();
4290
4291 buffer.update(cx, |buffer, cx| {
4292 buffer.edit([(11..12, "b")], None, cx);
4293 });
4294
4295 // Save buffer's contents as a new file and confirm that the buffer's now
4296 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4297 // file associated with the buffer has now been updated to `data_b.txt`
4298 project
4299 .update(cx, |project, cx| {
4300 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4301 let new_path = ProjectPath {
4302 worktree_id,
4303 path: rel_path("data_b.txt").into(),
4304 };
4305
4306 project.save_buffer_as(buffer.clone(), new_path, cx)
4307 })
4308 .await
4309 .unwrap();
4310
4311 buffer.update(cx, |buffer, cx| {
4312 assert_eq!(
4313 buffer.file().unwrap().full_path(cx),
4314 Path::new("dir/data_b.txt")
4315 )
4316 });
4317
4318 // Open the original `data_a.txt` file, confirming that its contents are
4319 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4320 let original_buffer = project
4321 .update(cx, |project, cx| {
4322 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4323 })
4324 .await
4325 .unwrap();
4326
4327 original_buffer.update(cx, |buffer, cx| {
4328 assert_eq!(buffer.text(), "data about a");
4329 assert_eq!(
4330 buffer.file().unwrap().full_path(cx),
4331 Path::new("dir/data_a.txt")
4332 )
4333 });
4334}
4335
4336#[gpui::test(retries = 5)]
4337async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4338 use worktree::WorktreeModelHandle as _;
4339
4340 init_test(cx);
4341 cx.executor().allow_parking();
4342
4343 let dir = TempTree::new(json!({
4344 "a": {
4345 "file1": "",
4346 "file2": "",
4347 "file3": "",
4348 },
4349 "b": {
4350 "c": {
4351 "file4": "",
4352 "file5": "",
4353 }
4354 }
4355 }));
4356
4357 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4358
4359 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4360 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4361 async move { buffer.await.unwrap() }
4362 };
4363 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4364 project.update(cx, |project, cx| {
4365 let tree = project.worktrees(cx).next().unwrap();
4366 tree.read(cx)
4367 .entry_for_path(rel_path(path))
4368 .unwrap_or_else(|| panic!("no entry for path {}", path))
4369 .id
4370 })
4371 };
4372
4373 let buffer2 = buffer_for_path("a/file2", cx).await;
4374 let buffer3 = buffer_for_path("a/file3", cx).await;
4375 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4376 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4377
4378 let file2_id = id_for_path("a/file2", cx);
4379 let file3_id = id_for_path("a/file3", cx);
4380 let file4_id = id_for_path("b/c/file4", cx);
4381
4382 // Create a remote copy of this worktree.
4383 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4384 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4385
4386 let updates = Arc::new(Mutex::new(Vec::new()));
4387 tree.update(cx, |tree, cx| {
4388 let updates = updates.clone();
4389 tree.observe_updates(0, cx, move |update| {
4390 updates.lock().push(update);
4391 async { true }
4392 });
4393 });
4394
4395 let remote = cx.update(|cx| {
4396 Worktree::remote(
4397 0,
4398 ReplicaId::REMOTE_SERVER,
4399 metadata,
4400 project.read(cx).client().into(),
4401 project.read(cx).path_style(cx),
4402 cx,
4403 )
4404 });
4405
4406 cx.executor().run_until_parked();
4407
4408 cx.update(|cx| {
4409 assert!(!buffer2.read(cx).is_dirty());
4410 assert!(!buffer3.read(cx).is_dirty());
4411 assert!(!buffer4.read(cx).is_dirty());
4412 assert!(!buffer5.read(cx).is_dirty());
4413 });
4414
4415 // Rename and delete files and directories.
4416 tree.flush_fs_events(cx).await;
4417 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4418 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4419 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4420 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4421 tree.flush_fs_events(cx).await;
4422
4423 cx.update(|app| {
4424 assert_eq!(
4425 tree.read(app).paths().collect::<Vec<_>>(),
4426 vec![
4427 rel_path("a"),
4428 rel_path("a/file1"),
4429 rel_path("a/file2.new"),
4430 rel_path("b"),
4431 rel_path("d"),
4432 rel_path("d/file3"),
4433 rel_path("d/file4"),
4434 ]
4435 );
4436 });
4437
4438 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4439 assert_eq!(id_for_path("d/file3", cx), file3_id);
4440 assert_eq!(id_for_path("d/file4", cx), file4_id);
4441
4442 cx.update(|cx| {
4443 assert_eq!(
4444 buffer2.read(cx).file().unwrap().path().as_ref(),
4445 rel_path("a/file2.new")
4446 );
4447 assert_eq!(
4448 buffer3.read(cx).file().unwrap().path().as_ref(),
4449 rel_path("d/file3")
4450 );
4451 assert_eq!(
4452 buffer4.read(cx).file().unwrap().path().as_ref(),
4453 rel_path("d/file4")
4454 );
4455 assert_eq!(
4456 buffer5.read(cx).file().unwrap().path().as_ref(),
4457 rel_path("b/c/file5")
4458 );
4459
4460 assert_matches!(
4461 buffer2.read(cx).file().unwrap().disk_state(),
4462 DiskState::Present { .. }
4463 );
4464 assert_matches!(
4465 buffer3.read(cx).file().unwrap().disk_state(),
4466 DiskState::Present { .. }
4467 );
4468 assert_matches!(
4469 buffer4.read(cx).file().unwrap().disk_state(),
4470 DiskState::Present { .. }
4471 );
4472 assert_eq!(
4473 buffer5.read(cx).file().unwrap().disk_state(),
4474 DiskState::Deleted
4475 );
4476 });
4477
4478 // Update the remote worktree. Check that it becomes consistent with the
4479 // local worktree.
4480 cx.executor().run_until_parked();
4481
4482 remote.update(cx, |remote, _| {
4483 for update in updates.lock().drain(..) {
4484 remote.as_remote_mut().unwrap().update_from_remote(update);
4485 }
4486 });
4487 cx.executor().run_until_parked();
4488 remote.update(cx, |remote, _| {
4489 assert_eq!(
4490 remote.paths().collect::<Vec<_>>(),
4491 vec![
4492 rel_path("a"),
4493 rel_path("a/file1"),
4494 rel_path("a/file2.new"),
4495 rel_path("b"),
4496 rel_path("d"),
4497 rel_path("d/file3"),
4498 rel_path("d/file4"),
4499 ]
4500 );
4501 });
4502}
4503
4504#[gpui::test(iterations = 10)]
4505async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4506 init_test(cx);
4507
4508 let fs = FakeFs::new(cx.executor());
4509 fs.insert_tree(
4510 path!("/dir"),
4511 json!({
4512 "a": {
4513 "file1": "",
4514 }
4515 }),
4516 )
4517 .await;
4518
4519 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4520 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4521 let tree_id = tree.update(cx, |tree, _| tree.id());
4522
4523 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4524 project.update(cx, |project, cx| {
4525 let tree = project.worktrees(cx).next().unwrap();
4526 tree.read(cx)
4527 .entry_for_path(rel_path(path))
4528 .unwrap_or_else(|| panic!("no entry for path {}", path))
4529 .id
4530 })
4531 };
4532
4533 let dir_id = id_for_path("a", cx);
4534 let file_id = id_for_path("a/file1", cx);
4535 let buffer = project
4536 .update(cx, |p, cx| {
4537 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4538 })
4539 .await
4540 .unwrap();
4541 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4542
4543 project
4544 .update(cx, |project, cx| {
4545 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4546 })
4547 .unwrap()
4548 .await
4549 .into_included()
4550 .unwrap();
4551 cx.executor().run_until_parked();
4552
4553 assert_eq!(id_for_path("b", cx), dir_id);
4554 assert_eq!(id_for_path("b/file1", cx), file_id);
4555 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4556}
4557
4558#[gpui::test]
4559async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4560 init_test(cx);
4561
4562 let fs = FakeFs::new(cx.executor());
4563 fs.insert_tree(
4564 "/dir",
4565 json!({
4566 "a.txt": "a-contents",
4567 "b.txt": "b-contents",
4568 }),
4569 )
4570 .await;
4571
4572 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4573
4574 // Spawn multiple tasks to open paths, repeating some paths.
4575 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4576 (
4577 p.open_local_buffer("/dir/a.txt", cx),
4578 p.open_local_buffer("/dir/b.txt", cx),
4579 p.open_local_buffer("/dir/a.txt", cx),
4580 )
4581 });
4582
4583 let buffer_a_1 = buffer_a_1.await.unwrap();
4584 let buffer_a_2 = buffer_a_2.await.unwrap();
4585 let buffer_b = buffer_b.await.unwrap();
4586 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4587 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4588
4589 // There is only one buffer per path.
4590 let buffer_a_id = buffer_a_1.entity_id();
4591 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4592
4593 // Open the same path again while it is still open.
4594 drop(buffer_a_1);
4595 let buffer_a_3 = project
4596 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4597 .await
4598 .unwrap();
4599
4600 // There's still only one buffer per path.
4601 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4602}
4603
4604#[gpui::test]
4605async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4606 init_test(cx);
4607
4608 let fs = FakeFs::new(cx.executor());
4609 fs.insert_tree(
4610 path!("/dir"),
4611 json!({
4612 "file1": "abc",
4613 "file2": "def",
4614 "file3": "ghi",
4615 }),
4616 )
4617 .await;
4618
4619 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4620
4621 let buffer1 = project
4622 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4623 .await
4624 .unwrap();
4625 let events = Arc::new(Mutex::new(Vec::new()));
4626
4627 // initially, the buffer isn't dirty.
4628 buffer1.update(cx, |buffer, cx| {
4629 cx.subscribe(&buffer1, {
4630 let events = events.clone();
4631 move |_, _, event, _| match event {
4632 BufferEvent::Operation { .. } => {}
4633 _ => events.lock().push(event.clone()),
4634 }
4635 })
4636 .detach();
4637
4638 assert!(!buffer.is_dirty());
4639 assert!(events.lock().is_empty());
4640
4641 buffer.edit([(1..2, "")], None, cx);
4642 });
4643
4644 // after the first edit, the buffer is dirty, and emits a dirtied event.
4645 buffer1.update(cx, |buffer, cx| {
4646 assert!(buffer.text() == "ac");
4647 assert!(buffer.is_dirty());
4648 assert_eq!(
4649 *events.lock(),
4650 &[
4651 language::BufferEvent::Edited,
4652 language::BufferEvent::DirtyChanged
4653 ]
4654 );
4655 events.lock().clear();
4656 buffer.did_save(
4657 buffer.version(),
4658 buffer.file().unwrap().disk_state().mtime(),
4659 cx,
4660 );
4661 });
4662
4663 // after saving, the buffer is not dirty, and emits a saved event.
4664 buffer1.update(cx, |buffer, cx| {
4665 assert!(!buffer.is_dirty());
4666 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4667 events.lock().clear();
4668
4669 buffer.edit([(1..1, "B")], None, cx);
4670 buffer.edit([(2..2, "D")], None, cx);
4671 });
4672
4673 // after editing again, the buffer is dirty, and emits another dirty event.
4674 buffer1.update(cx, |buffer, cx| {
4675 assert!(buffer.text() == "aBDc");
4676 assert!(buffer.is_dirty());
4677 assert_eq!(
4678 *events.lock(),
4679 &[
4680 language::BufferEvent::Edited,
4681 language::BufferEvent::DirtyChanged,
4682 language::BufferEvent::Edited,
4683 ],
4684 );
4685 events.lock().clear();
4686
4687 // After restoring the buffer to its previously-saved state,
4688 // the buffer is not considered dirty anymore.
4689 buffer.edit([(1..3, "")], None, cx);
4690 assert!(buffer.text() == "ac");
4691 assert!(!buffer.is_dirty());
4692 });
4693
4694 assert_eq!(
4695 *events.lock(),
4696 &[
4697 language::BufferEvent::Edited,
4698 language::BufferEvent::DirtyChanged
4699 ]
4700 );
4701
4702 // When a file is deleted, it is not considered dirty.
4703 let events = Arc::new(Mutex::new(Vec::new()));
4704 let buffer2 = project
4705 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4706 .await
4707 .unwrap();
4708 buffer2.update(cx, |_, cx| {
4709 cx.subscribe(&buffer2, {
4710 let events = events.clone();
4711 move |_, _, event, _| match event {
4712 BufferEvent::Operation { .. } => {}
4713 _ => events.lock().push(event.clone()),
4714 }
4715 })
4716 .detach();
4717 });
4718
4719 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4720 .await
4721 .unwrap();
4722 cx.executor().run_until_parked();
4723 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4724 assert_eq!(
4725 mem::take(&mut *events.lock()),
4726 &[language::BufferEvent::FileHandleChanged]
4727 );
4728
4729 // Buffer becomes dirty when edited.
4730 buffer2.update(cx, |buffer, cx| {
4731 buffer.edit([(2..3, "")], None, cx);
4732 assert_eq!(buffer.is_dirty(), true);
4733 });
4734 assert_eq!(
4735 mem::take(&mut *events.lock()),
4736 &[
4737 language::BufferEvent::Edited,
4738 language::BufferEvent::DirtyChanged
4739 ]
4740 );
4741
4742 // Buffer becomes clean again when all of its content is removed, because
4743 // the file was deleted.
4744 buffer2.update(cx, |buffer, cx| {
4745 buffer.edit([(0..2, "")], None, cx);
4746 assert_eq!(buffer.is_empty(), true);
4747 assert_eq!(buffer.is_dirty(), false);
4748 });
4749 assert_eq!(
4750 *events.lock(),
4751 &[
4752 language::BufferEvent::Edited,
4753 language::BufferEvent::DirtyChanged
4754 ]
4755 );
4756
4757 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4758 let events = Arc::new(Mutex::new(Vec::new()));
4759 let buffer3 = project
4760 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4761 .await
4762 .unwrap();
4763 buffer3.update(cx, |_, cx| {
4764 cx.subscribe(&buffer3, {
4765 let events = events.clone();
4766 move |_, _, event, _| match event {
4767 BufferEvent::Operation { .. } => {}
4768 _ => events.lock().push(event.clone()),
4769 }
4770 })
4771 .detach();
4772 });
4773
4774 buffer3.update(cx, |buffer, cx| {
4775 buffer.edit([(0..0, "x")], None, cx);
4776 });
4777 events.lock().clear();
4778 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4779 .await
4780 .unwrap();
4781 cx.executor().run_until_parked();
4782 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4783 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4784}
4785
4786#[gpui::test]
4787async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4788 init_test(cx);
4789
4790 let (initial_contents, initial_offsets) =
4791 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4792 let fs = FakeFs::new(cx.executor());
4793 fs.insert_tree(
4794 path!("/dir"),
4795 json!({
4796 "the-file": initial_contents,
4797 }),
4798 )
4799 .await;
4800 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4801 let buffer = project
4802 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4803 .await
4804 .unwrap();
4805
4806 let anchors = initial_offsets
4807 .iter()
4808 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4809 .collect::<Vec<_>>();
4810
4811 // Change the file on disk, adding two new lines of text, and removing
4812 // one line.
4813 buffer.update(cx, |buffer, _| {
4814 assert!(!buffer.is_dirty());
4815 assert!(!buffer.has_conflict());
4816 });
4817
4818 let (new_contents, new_offsets) =
4819 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4820
4821 let encoding = Encoding::new(UTF_8);
4822
4823 fs.save(
4824 path!("/dir/the-file").as_ref(),
4825 &Rope::from_str(new_contents.as_str(), cx.background_executor()),
4826 LineEnding::Unix,
4827 encoding,
4828 )
4829 .await
4830 .unwrap();
4831
4832 // Because the buffer was not modified, it is reloaded from disk. Its
4833 // contents are edited according to the diff between the old and new
4834 // file contents.
4835 cx.executor().run_until_parked();
4836 buffer.update(cx, |buffer, _| {
4837 assert_eq!(buffer.text(), new_contents);
4838 assert!(!buffer.is_dirty());
4839 assert!(!buffer.has_conflict());
4840
4841 let anchor_offsets = anchors
4842 .iter()
4843 .map(|anchor| anchor.to_offset(&*buffer))
4844 .collect::<Vec<_>>();
4845 assert_eq!(anchor_offsets, new_offsets);
4846 });
4847
4848 // Modify the buffer
4849 buffer.update(cx, |buffer, cx| {
4850 buffer.edit([(0..0, " ")], None, cx);
4851 assert!(buffer.is_dirty());
4852 assert!(!buffer.has_conflict());
4853 });
4854
4855 let encoding = Encoding::new(UTF_8);
4856
4857 // Change the file on disk again, adding blank lines to the beginning.
4858 fs.save(
4859 path!("/dir/the-file").as_ref(),
4860 &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
4861 LineEnding::Unix,
4862 encoding,
4863 )
4864 .await
4865 .unwrap();
4866
4867 // Because the buffer is modified, it doesn't reload from disk, but is
4868 // marked as having a conflict.
4869 cx.executor().run_until_parked();
4870 buffer.update(cx, |buffer, _| {
4871 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4872 assert!(buffer.has_conflict());
4873 });
4874}
4875
4876#[gpui::test]
4877async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4878 init_test(cx);
4879
4880 let fs = FakeFs::new(cx.executor());
4881 fs.insert_tree(
4882 path!("/dir"),
4883 json!({
4884 "file1": "a\nb\nc\n",
4885 "file2": "one\r\ntwo\r\nthree\r\n",
4886 }),
4887 )
4888 .await;
4889
4890 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4891 let buffer1 = project
4892 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4893 .await
4894 .unwrap();
4895 let buffer2 = project
4896 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4897 .await
4898 .unwrap();
4899
4900 buffer1.update(cx, |buffer, _| {
4901 assert_eq!(buffer.text(), "a\nb\nc\n");
4902 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4903 });
4904 buffer2.update(cx, |buffer, _| {
4905 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4906 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4907 });
4908
4909 let encoding = Encoding::new(UTF_8);
4910
4911 // Change a file's line endings on disk from unix to windows. The buffer's
4912 // state updates correctly.
4913 fs.save(
4914 path!("/dir/file1").as_ref(),
4915 &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
4916 LineEnding::Windows,
4917 encoding,
4918 )
4919 .await
4920 .unwrap();
4921 cx.executor().run_until_parked();
4922 buffer1.update(cx, |buffer, _| {
4923 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4924 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4925 });
4926
4927 // Save a file with windows line endings. The file is written correctly.
4928 buffer2.update(cx, |buffer, cx| {
4929 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4930 });
4931 project
4932 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4933 .await
4934 .unwrap();
4935 assert_eq!(
4936 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4937 "one\r\ntwo\r\nthree\r\nfour\r\n",
4938 );
4939}
4940
4941#[gpui::test]
4942async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4943 init_test(cx);
4944
4945 let fs = FakeFs::new(cx.executor());
4946 fs.insert_tree(
4947 path!("/dir"),
4948 json!({
4949 "a.rs": "
4950 fn foo(mut v: Vec<usize>) {
4951 for x in &v {
4952 v.push(1);
4953 }
4954 }
4955 "
4956 .unindent(),
4957 }),
4958 )
4959 .await;
4960
4961 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4962 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4963 let buffer = project
4964 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4965 .await
4966 .unwrap();
4967
4968 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4969 let message = lsp::PublishDiagnosticsParams {
4970 uri: buffer_uri.clone(),
4971 diagnostics: vec![
4972 lsp::Diagnostic {
4973 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4974 severity: Some(DiagnosticSeverity::WARNING),
4975 message: "error 1".to_string(),
4976 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4977 location: lsp::Location {
4978 uri: buffer_uri.clone(),
4979 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4980 },
4981 message: "error 1 hint 1".to_string(),
4982 }]),
4983 ..Default::default()
4984 },
4985 lsp::Diagnostic {
4986 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4987 severity: Some(DiagnosticSeverity::HINT),
4988 message: "error 1 hint 1".to_string(),
4989 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4990 location: lsp::Location {
4991 uri: buffer_uri.clone(),
4992 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4993 },
4994 message: "original diagnostic".to_string(),
4995 }]),
4996 ..Default::default()
4997 },
4998 lsp::Diagnostic {
4999 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5000 severity: Some(DiagnosticSeverity::ERROR),
5001 message: "error 2".to_string(),
5002 related_information: Some(vec![
5003 lsp::DiagnosticRelatedInformation {
5004 location: lsp::Location {
5005 uri: buffer_uri.clone(),
5006 range: lsp::Range::new(
5007 lsp::Position::new(1, 13),
5008 lsp::Position::new(1, 15),
5009 ),
5010 },
5011 message: "error 2 hint 1".to_string(),
5012 },
5013 lsp::DiagnosticRelatedInformation {
5014 location: lsp::Location {
5015 uri: buffer_uri.clone(),
5016 range: lsp::Range::new(
5017 lsp::Position::new(1, 13),
5018 lsp::Position::new(1, 15),
5019 ),
5020 },
5021 message: "error 2 hint 2".to_string(),
5022 },
5023 ]),
5024 ..Default::default()
5025 },
5026 lsp::Diagnostic {
5027 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5028 severity: Some(DiagnosticSeverity::HINT),
5029 message: "error 2 hint 1".to_string(),
5030 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5031 location: lsp::Location {
5032 uri: buffer_uri.clone(),
5033 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5034 },
5035 message: "original diagnostic".to_string(),
5036 }]),
5037 ..Default::default()
5038 },
5039 lsp::Diagnostic {
5040 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5041 severity: Some(DiagnosticSeverity::HINT),
5042 message: "error 2 hint 2".to_string(),
5043 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5044 location: lsp::Location {
5045 uri: buffer_uri,
5046 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5047 },
5048 message: "original diagnostic".to_string(),
5049 }]),
5050 ..Default::default()
5051 },
5052 ],
5053 version: None,
5054 };
5055
5056 lsp_store
5057 .update(cx, |lsp_store, cx| {
5058 lsp_store.update_diagnostics(
5059 LanguageServerId(0),
5060 message,
5061 None,
5062 DiagnosticSourceKind::Pushed,
5063 &[],
5064 cx,
5065 )
5066 })
5067 .unwrap();
5068 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5069
5070 assert_eq!(
5071 buffer
5072 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5073 .collect::<Vec<_>>(),
5074 &[
5075 DiagnosticEntry {
5076 range: Point::new(1, 8)..Point::new(1, 9),
5077 diagnostic: Diagnostic {
5078 severity: DiagnosticSeverity::WARNING,
5079 message: "error 1".to_string(),
5080 group_id: 1,
5081 is_primary: true,
5082 source_kind: DiagnosticSourceKind::Pushed,
5083 ..Diagnostic::default()
5084 }
5085 },
5086 DiagnosticEntry {
5087 range: Point::new(1, 8)..Point::new(1, 9),
5088 diagnostic: Diagnostic {
5089 severity: DiagnosticSeverity::HINT,
5090 message: "error 1 hint 1".to_string(),
5091 group_id: 1,
5092 is_primary: false,
5093 source_kind: DiagnosticSourceKind::Pushed,
5094 ..Diagnostic::default()
5095 }
5096 },
5097 DiagnosticEntry {
5098 range: Point::new(1, 13)..Point::new(1, 15),
5099 diagnostic: Diagnostic {
5100 severity: DiagnosticSeverity::HINT,
5101 message: "error 2 hint 1".to_string(),
5102 group_id: 0,
5103 is_primary: false,
5104 source_kind: DiagnosticSourceKind::Pushed,
5105 ..Diagnostic::default()
5106 }
5107 },
5108 DiagnosticEntry {
5109 range: Point::new(1, 13)..Point::new(1, 15),
5110 diagnostic: Diagnostic {
5111 severity: DiagnosticSeverity::HINT,
5112 message: "error 2 hint 2".to_string(),
5113 group_id: 0,
5114 is_primary: false,
5115 source_kind: DiagnosticSourceKind::Pushed,
5116 ..Diagnostic::default()
5117 }
5118 },
5119 DiagnosticEntry {
5120 range: Point::new(2, 8)..Point::new(2, 17),
5121 diagnostic: Diagnostic {
5122 severity: DiagnosticSeverity::ERROR,
5123 message: "error 2".to_string(),
5124 group_id: 0,
5125 is_primary: true,
5126 source_kind: DiagnosticSourceKind::Pushed,
5127 ..Diagnostic::default()
5128 }
5129 }
5130 ]
5131 );
5132
5133 assert_eq!(
5134 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5135 &[
5136 DiagnosticEntry {
5137 range: Point::new(1, 13)..Point::new(1, 15),
5138 diagnostic: Diagnostic {
5139 severity: DiagnosticSeverity::HINT,
5140 message: "error 2 hint 1".to_string(),
5141 group_id: 0,
5142 is_primary: false,
5143 source_kind: DiagnosticSourceKind::Pushed,
5144 ..Diagnostic::default()
5145 }
5146 },
5147 DiagnosticEntry {
5148 range: Point::new(1, 13)..Point::new(1, 15),
5149 diagnostic: Diagnostic {
5150 severity: DiagnosticSeverity::HINT,
5151 message: "error 2 hint 2".to_string(),
5152 group_id: 0,
5153 is_primary: false,
5154 source_kind: DiagnosticSourceKind::Pushed,
5155 ..Diagnostic::default()
5156 }
5157 },
5158 DiagnosticEntry {
5159 range: Point::new(2, 8)..Point::new(2, 17),
5160 diagnostic: Diagnostic {
5161 severity: DiagnosticSeverity::ERROR,
5162 message: "error 2".to_string(),
5163 group_id: 0,
5164 is_primary: true,
5165 source_kind: DiagnosticSourceKind::Pushed,
5166 ..Diagnostic::default()
5167 }
5168 }
5169 ]
5170 );
5171
5172 assert_eq!(
5173 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5174 &[
5175 DiagnosticEntry {
5176 range: Point::new(1, 8)..Point::new(1, 9),
5177 diagnostic: Diagnostic {
5178 severity: DiagnosticSeverity::WARNING,
5179 message: "error 1".to_string(),
5180 group_id: 1,
5181 is_primary: true,
5182 source_kind: DiagnosticSourceKind::Pushed,
5183 ..Diagnostic::default()
5184 }
5185 },
5186 DiagnosticEntry {
5187 range: Point::new(1, 8)..Point::new(1, 9),
5188 diagnostic: Diagnostic {
5189 severity: DiagnosticSeverity::HINT,
5190 message: "error 1 hint 1".to_string(),
5191 group_id: 1,
5192 is_primary: false,
5193 source_kind: DiagnosticSourceKind::Pushed,
5194 ..Diagnostic::default()
5195 }
5196 },
5197 ]
5198 );
5199}
5200
5201#[gpui::test]
5202async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5203 init_test(cx);
5204
5205 let fs = FakeFs::new(cx.executor());
5206 fs.insert_tree(
5207 path!("/dir"),
5208 json!({
5209 "one.rs": "const ONE: usize = 1;",
5210 "two": {
5211 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5212 }
5213
5214 }),
5215 )
5216 .await;
5217 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5218
5219 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5220 language_registry.add(rust_lang());
5221 let watched_paths = lsp::FileOperationRegistrationOptions {
5222 filters: vec![
5223 FileOperationFilter {
5224 scheme: Some("file".to_owned()),
5225 pattern: lsp::FileOperationPattern {
5226 glob: "**/*.rs".to_owned(),
5227 matches: Some(lsp::FileOperationPatternKind::File),
5228 options: None,
5229 },
5230 },
5231 FileOperationFilter {
5232 scheme: Some("file".to_owned()),
5233 pattern: lsp::FileOperationPattern {
5234 glob: "**/**".to_owned(),
5235 matches: Some(lsp::FileOperationPatternKind::Folder),
5236 options: None,
5237 },
5238 },
5239 ],
5240 };
5241 let mut fake_servers = language_registry.register_fake_lsp(
5242 "Rust",
5243 FakeLspAdapter {
5244 capabilities: lsp::ServerCapabilities {
5245 workspace: Some(lsp::WorkspaceServerCapabilities {
5246 workspace_folders: None,
5247 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5248 did_rename: Some(watched_paths.clone()),
5249 will_rename: Some(watched_paths),
5250 ..Default::default()
5251 }),
5252 }),
5253 ..Default::default()
5254 },
5255 ..Default::default()
5256 },
5257 );
5258
5259 let _ = project
5260 .update(cx, |project, cx| {
5261 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5262 })
5263 .await
5264 .unwrap();
5265
5266 let fake_server = fake_servers.next().await.unwrap();
5267 let response = project.update(cx, |project, cx| {
5268 let worktree = project.worktrees(cx).next().unwrap();
5269 let entry = worktree
5270 .read(cx)
5271 .entry_for_path(rel_path("one.rs"))
5272 .unwrap();
5273 project.rename_entry(
5274 entry.id,
5275 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5276 cx,
5277 )
5278 });
5279 let expected_edit = lsp::WorkspaceEdit {
5280 changes: None,
5281 document_changes: Some(DocumentChanges::Edits({
5282 vec![TextDocumentEdit {
5283 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5284 range: lsp::Range {
5285 start: lsp::Position {
5286 line: 0,
5287 character: 1,
5288 },
5289 end: lsp::Position {
5290 line: 0,
5291 character: 3,
5292 },
5293 },
5294 new_text: "This is not a drill".to_owned(),
5295 })],
5296 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5297 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5298 version: Some(1337),
5299 },
5300 }]
5301 })),
5302 change_annotations: None,
5303 };
5304 let resolved_workspace_edit = Arc::new(OnceLock::new());
5305 fake_server
5306 .set_request_handler::<WillRenameFiles, _, _>({
5307 let resolved_workspace_edit = resolved_workspace_edit.clone();
5308 let expected_edit = expected_edit.clone();
5309 move |params, _| {
5310 let resolved_workspace_edit = resolved_workspace_edit.clone();
5311 let expected_edit = expected_edit.clone();
5312 async move {
5313 assert_eq!(params.files.len(), 1);
5314 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5315 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5316 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5317 Ok(Some(expected_edit))
5318 }
5319 }
5320 })
5321 .next()
5322 .await
5323 .unwrap();
5324 let _ = response.await.unwrap();
5325 fake_server
5326 .handle_notification::<DidRenameFiles, _>(|params, _| {
5327 assert_eq!(params.files.len(), 1);
5328 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5329 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5330 })
5331 .next()
5332 .await
5333 .unwrap();
5334 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5335}
5336
5337#[gpui::test]
5338async fn test_rename(cx: &mut gpui::TestAppContext) {
5339 // hi
5340 init_test(cx);
5341
5342 let fs = FakeFs::new(cx.executor());
5343 fs.insert_tree(
5344 path!("/dir"),
5345 json!({
5346 "one.rs": "const ONE: usize = 1;",
5347 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5348 }),
5349 )
5350 .await;
5351
5352 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5353
5354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5355 language_registry.add(rust_lang());
5356 let mut fake_servers = language_registry.register_fake_lsp(
5357 "Rust",
5358 FakeLspAdapter {
5359 capabilities: lsp::ServerCapabilities {
5360 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5361 prepare_provider: Some(true),
5362 work_done_progress_options: Default::default(),
5363 })),
5364 ..Default::default()
5365 },
5366 ..Default::default()
5367 },
5368 );
5369
5370 let (buffer, _handle) = project
5371 .update(cx, |project, cx| {
5372 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5373 })
5374 .await
5375 .unwrap();
5376
5377 let fake_server = fake_servers.next().await.unwrap();
5378
5379 let response = project.update(cx, |project, cx| {
5380 project.prepare_rename(buffer.clone(), 7, cx)
5381 });
5382 fake_server
5383 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5384 assert_eq!(
5385 params.text_document.uri.as_str(),
5386 uri!("file:///dir/one.rs")
5387 );
5388 assert_eq!(params.position, lsp::Position::new(0, 7));
5389 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5390 lsp::Position::new(0, 6),
5391 lsp::Position::new(0, 9),
5392 ))))
5393 })
5394 .next()
5395 .await
5396 .unwrap();
5397 let response = response.await.unwrap();
5398 let PrepareRenameResponse::Success(range) = response else {
5399 panic!("{:?}", response);
5400 };
5401 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5402 assert_eq!(range, 6..9);
5403
5404 let response = project.update(cx, |project, cx| {
5405 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5406 });
5407 fake_server
5408 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5409 assert_eq!(
5410 params.text_document_position.text_document.uri.as_str(),
5411 uri!("file:///dir/one.rs")
5412 );
5413 assert_eq!(
5414 params.text_document_position.position,
5415 lsp::Position::new(0, 7)
5416 );
5417 assert_eq!(params.new_name, "THREE");
5418 Ok(Some(lsp::WorkspaceEdit {
5419 changes: Some(
5420 [
5421 (
5422 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5423 vec![lsp::TextEdit::new(
5424 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5425 "THREE".to_string(),
5426 )],
5427 ),
5428 (
5429 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5430 vec![
5431 lsp::TextEdit::new(
5432 lsp::Range::new(
5433 lsp::Position::new(0, 24),
5434 lsp::Position::new(0, 27),
5435 ),
5436 "THREE".to_string(),
5437 ),
5438 lsp::TextEdit::new(
5439 lsp::Range::new(
5440 lsp::Position::new(0, 35),
5441 lsp::Position::new(0, 38),
5442 ),
5443 "THREE".to_string(),
5444 ),
5445 ],
5446 ),
5447 ]
5448 .into_iter()
5449 .collect(),
5450 ),
5451 ..Default::default()
5452 }))
5453 })
5454 .next()
5455 .await
5456 .unwrap();
5457 let mut transaction = response.await.unwrap().0;
5458 assert_eq!(transaction.len(), 2);
5459 assert_eq!(
5460 transaction
5461 .remove_entry(&buffer)
5462 .unwrap()
5463 .0
5464 .update(cx, |buffer, _| buffer.text()),
5465 "const THREE: usize = 1;"
5466 );
5467 assert_eq!(
5468 transaction
5469 .into_keys()
5470 .next()
5471 .unwrap()
5472 .update(cx, |buffer, _| buffer.text()),
5473 "const TWO: usize = one::THREE + one::THREE;"
5474 );
5475}
5476
5477#[gpui::test]
5478async fn test_search(cx: &mut gpui::TestAppContext) {
5479 init_test(cx);
5480
5481 let fs = FakeFs::new(cx.executor());
5482 fs.insert_tree(
5483 path!("/dir"),
5484 json!({
5485 "one.rs": "const ONE: usize = 1;",
5486 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5487 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5488 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5489 }),
5490 )
5491 .await;
5492 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5493 assert_eq!(
5494 search(
5495 &project,
5496 SearchQuery::text(
5497 "TWO",
5498 false,
5499 true,
5500 false,
5501 Default::default(),
5502 Default::default(),
5503 false,
5504 None
5505 )
5506 .unwrap(),
5507 cx
5508 )
5509 .await
5510 .unwrap(),
5511 HashMap::from_iter([
5512 (path!("dir/two.rs").to_string(), vec![6..9]),
5513 (path!("dir/three.rs").to_string(), vec![37..40])
5514 ])
5515 );
5516
5517 let buffer_4 = project
5518 .update(cx, |project, cx| {
5519 project.open_local_buffer(path!("/dir/four.rs"), cx)
5520 })
5521 .await
5522 .unwrap();
5523 buffer_4.update(cx, |buffer, cx| {
5524 let text = "two::TWO";
5525 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5526 });
5527
5528 assert_eq!(
5529 search(
5530 &project,
5531 SearchQuery::text(
5532 "TWO",
5533 false,
5534 true,
5535 false,
5536 Default::default(),
5537 Default::default(),
5538 false,
5539 None,
5540 )
5541 .unwrap(),
5542 cx
5543 )
5544 .await
5545 .unwrap(),
5546 HashMap::from_iter([
5547 (path!("dir/two.rs").to_string(), vec![6..9]),
5548 (path!("dir/three.rs").to_string(), vec![37..40]),
5549 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5550 ])
5551 );
5552}
5553
5554#[gpui::test]
5555async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5556 init_test(cx);
5557
5558 let search_query = "file";
5559
5560 let fs = FakeFs::new(cx.executor());
5561 fs.insert_tree(
5562 path!("/dir"),
5563 json!({
5564 "one.rs": r#"// Rust file one"#,
5565 "one.ts": r#"// TypeScript file one"#,
5566 "two.rs": r#"// Rust file two"#,
5567 "two.ts": r#"// TypeScript file two"#,
5568 }),
5569 )
5570 .await;
5571 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5572
5573 assert!(
5574 search(
5575 &project,
5576 SearchQuery::text(
5577 search_query,
5578 false,
5579 true,
5580 false,
5581 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5582 Default::default(),
5583 false,
5584 None
5585 )
5586 .unwrap(),
5587 cx
5588 )
5589 .await
5590 .unwrap()
5591 .is_empty(),
5592 "If no inclusions match, no files should be returned"
5593 );
5594
5595 assert_eq!(
5596 search(
5597 &project,
5598 SearchQuery::text(
5599 search_query,
5600 false,
5601 true,
5602 false,
5603 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5604 Default::default(),
5605 false,
5606 None
5607 )
5608 .unwrap(),
5609 cx
5610 )
5611 .await
5612 .unwrap(),
5613 HashMap::from_iter([
5614 (path!("dir/one.rs").to_string(), vec![8..12]),
5615 (path!("dir/two.rs").to_string(), vec![8..12]),
5616 ]),
5617 "Rust only search should give only Rust files"
5618 );
5619
5620 assert_eq!(
5621 search(
5622 &project,
5623 SearchQuery::text(
5624 search_query,
5625 false,
5626 true,
5627 false,
5628 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5629 .unwrap(),
5630 Default::default(),
5631 false,
5632 None,
5633 )
5634 .unwrap(),
5635 cx
5636 )
5637 .await
5638 .unwrap(),
5639 HashMap::from_iter([
5640 (path!("dir/one.ts").to_string(), vec![14..18]),
5641 (path!("dir/two.ts").to_string(), vec![14..18]),
5642 ]),
5643 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5644 );
5645
5646 assert_eq!(
5647 search(
5648 &project,
5649 SearchQuery::text(
5650 search_query,
5651 false,
5652 true,
5653 false,
5654 PathMatcher::new(
5655 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5656 PathStyle::local()
5657 )
5658 .unwrap(),
5659 Default::default(),
5660 false,
5661 None,
5662 )
5663 .unwrap(),
5664 cx
5665 )
5666 .await
5667 .unwrap(),
5668 HashMap::from_iter([
5669 (path!("dir/two.ts").to_string(), vec![14..18]),
5670 (path!("dir/one.rs").to_string(), vec![8..12]),
5671 (path!("dir/one.ts").to_string(), vec![14..18]),
5672 (path!("dir/two.rs").to_string(), vec![8..12]),
5673 ]),
5674 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5675 );
5676}
5677
5678#[gpui::test]
5679async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5680 init_test(cx);
5681
5682 let search_query = "file";
5683
5684 let fs = FakeFs::new(cx.executor());
5685 fs.insert_tree(
5686 path!("/dir"),
5687 json!({
5688 "one.rs": r#"// Rust file one"#,
5689 "one.ts": r#"// TypeScript file one"#,
5690 "two.rs": r#"// Rust file two"#,
5691 "two.ts": r#"// TypeScript file two"#,
5692 }),
5693 )
5694 .await;
5695 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5696
5697 assert_eq!(
5698 search(
5699 &project,
5700 SearchQuery::text(
5701 search_query,
5702 false,
5703 true,
5704 false,
5705 Default::default(),
5706 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5707 false,
5708 None,
5709 )
5710 .unwrap(),
5711 cx
5712 )
5713 .await
5714 .unwrap(),
5715 HashMap::from_iter([
5716 (path!("dir/one.rs").to_string(), vec![8..12]),
5717 (path!("dir/one.ts").to_string(), vec![14..18]),
5718 (path!("dir/two.rs").to_string(), vec![8..12]),
5719 (path!("dir/two.ts").to_string(), vec![14..18]),
5720 ]),
5721 "If no exclusions match, all files should be returned"
5722 );
5723
5724 assert_eq!(
5725 search(
5726 &project,
5727 SearchQuery::text(
5728 search_query,
5729 false,
5730 true,
5731 false,
5732 Default::default(),
5733 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5734 false,
5735 None,
5736 )
5737 .unwrap(),
5738 cx
5739 )
5740 .await
5741 .unwrap(),
5742 HashMap::from_iter([
5743 (path!("dir/one.ts").to_string(), vec![14..18]),
5744 (path!("dir/two.ts").to_string(), vec![14..18]),
5745 ]),
5746 "Rust exclusion search should give only TypeScript files"
5747 );
5748
5749 assert_eq!(
5750 search(
5751 &project,
5752 SearchQuery::text(
5753 search_query,
5754 false,
5755 true,
5756 false,
5757 Default::default(),
5758 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5759 .unwrap(),
5760 false,
5761 None,
5762 )
5763 .unwrap(),
5764 cx
5765 )
5766 .await
5767 .unwrap(),
5768 HashMap::from_iter([
5769 (path!("dir/one.rs").to_string(), vec![8..12]),
5770 (path!("dir/two.rs").to_string(), vec![8..12]),
5771 ]),
5772 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5773 );
5774
5775 assert!(
5776 search(
5777 &project,
5778 SearchQuery::text(
5779 search_query,
5780 false,
5781 true,
5782 false,
5783 Default::default(),
5784 PathMatcher::new(
5785 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5786 PathStyle::local(),
5787 )
5788 .unwrap(),
5789 false,
5790 None,
5791 )
5792 .unwrap(),
5793 cx
5794 )
5795 .await
5796 .unwrap()
5797 .is_empty(),
5798 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5799 );
5800}
5801
5802#[gpui::test]
5803async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5804 init_test(cx);
5805
5806 let search_query = "file";
5807
5808 let fs = FakeFs::new(cx.executor());
5809 fs.insert_tree(
5810 path!("/dir"),
5811 json!({
5812 "one.rs": r#"// Rust file one"#,
5813 "one.ts": r#"// TypeScript file one"#,
5814 "two.rs": r#"// Rust file two"#,
5815 "two.ts": r#"// TypeScript file two"#,
5816 }),
5817 )
5818 .await;
5819
5820 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5821 let path_style = PathStyle::local();
5822 let _buffer = project.update(cx, |project, cx| {
5823 project.create_local_buffer("file", None, false, cx)
5824 });
5825
5826 assert_eq!(
5827 search(
5828 &project,
5829 SearchQuery::text(
5830 search_query,
5831 false,
5832 true,
5833 false,
5834 Default::default(),
5835 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5836 false,
5837 None,
5838 )
5839 .unwrap(),
5840 cx
5841 )
5842 .await
5843 .unwrap(),
5844 HashMap::from_iter([
5845 (path!("dir/one.rs").to_string(), vec![8..12]),
5846 (path!("dir/one.ts").to_string(), vec![14..18]),
5847 (path!("dir/two.rs").to_string(), vec![8..12]),
5848 (path!("dir/two.ts").to_string(), vec![14..18]),
5849 ]),
5850 "If no exclusions match, all files should be returned"
5851 );
5852
5853 assert_eq!(
5854 search(
5855 &project,
5856 SearchQuery::text(
5857 search_query,
5858 false,
5859 true,
5860 false,
5861 Default::default(),
5862 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5863 false,
5864 None,
5865 )
5866 .unwrap(),
5867 cx
5868 )
5869 .await
5870 .unwrap(),
5871 HashMap::from_iter([
5872 (path!("dir/one.ts").to_string(), vec![14..18]),
5873 (path!("dir/two.ts").to_string(), vec![14..18]),
5874 ]),
5875 "Rust exclusion search should give only TypeScript files"
5876 );
5877
5878 assert_eq!(
5879 search(
5880 &project,
5881 SearchQuery::text(
5882 search_query,
5883 false,
5884 true,
5885 false,
5886 Default::default(),
5887 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5888 false,
5889 None,
5890 )
5891 .unwrap(),
5892 cx
5893 )
5894 .await
5895 .unwrap(),
5896 HashMap::from_iter([
5897 (path!("dir/one.rs").to_string(), vec![8..12]),
5898 (path!("dir/two.rs").to_string(), vec![8..12]),
5899 ]),
5900 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5901 );
5902
5903 assert!(
5904 search(
5905 &project,
5906 SearchQuery::text(
5907 search_query,
5908 false,
5909 true,
5910 false,
5911 Default::default(),
5912 PathMatcher::new(
5913 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5914 PathStyle::local(),
5915 )
5916 .unwrap(),
5917 false,
5918 None,
5919 )
5920 .unwrap(),
5921 cx
5922 )
5923 .await
5924 .unwrap()
5925 .is_empty(),
5926 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5927 );
5928}
5929
5930#[gpui::test]
5931async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5932 init_test(cx);
5933
5934 let search_query = "file";
5935
5936 let fs = FakeFs::new(cx.executor());
5937 fs.insert_tree(
5938 path!("/dir"),
5939 json!({
5940 "one.rs": r#"// Rust file one"#,
5941 "one.ts": r#"// TypeScript file one"#,
5942 "two.rs": r#"// Rust file two"#,
5943 "two.ts": r#"// TypeScript file two"#,
5944 }),
5945 )
5946 .await;
5947 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5948 assert!(
5949 search(
5950 &project,
5951 SearchQuery::text(
5952 search_query,
5953 false,
5954 true,
5955 false,
5956 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5957 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5958 false,
5959 None,
5960 )
5961 .unwrap(),
5962 cx
5963 )
5964 .await
5965 .unwrap()
5966 .is_empty(),
5967 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5968 );
5969
5970 assert!(
5971 search(
5972 &project,
5973 SearchQuery::text(
5974 search_query,
5975 false,
5976 true,
5977 false,
5978 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5979 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5980 false,
5981 None,
5982 )
5983 .unwrap(),
5984 cx
5985 )
5986 .await
5987 .unwrap()
5988 .is_empty(),
5989 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5990 );
5991
5992 assert!(
5993 search(
5994 &project,
5995 SearchQuery::text(
5996 search_query,
5997 false,
5998 true,
5999 false,
6000 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6001 .unwrap(),
6002 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6003 .unwrap(),
6004 false,
6005 None,
6006 )
6007 .unwrap(),
6008 cx
6009 )
6010 .await
6011 .unwrap()
6012 .is_empty(),
6013 "Non-matching inclusions and exclusions should not change that."
6014 );
6015
6016 assert_eq!(
6017 search(
6018 &project,
6019 SearchQuery::text(
6020 search_query,
6021 false,
6022 true,
6023 false,
6024 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6025 .unwrap(),
6026 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6027 .unwrap(),
6028 false,
6029 None,
6030 )
6031 .unwrap(),
6032 cx
6033 )
6034 .await
6035 .unwrap(),
6036 HashMap::from_iter([
6037 (path!("dir/one.ts").to_string(), vec![14..18]),
6038 (path!("dir/two.ts").to_string(), vec![14..18]),
6039 ]),
6040 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6041 );
6042}
6043
6044#[gpui::test]
6045async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6046 init_test(cx);
6047
6048 let fs = FakeFs::new(cx.executor());
6049 fs.insert_tree(
6050 path!("/worktree-a"),
6051 json!({
6052 "haystack.rs": r#"// NEEDLE"#,
6053 "haystack.ts": r#"// NEEDLE"#,
6054 }),
6055 )
6056 .await;
6057 fs.insert_tree(
6058 path!("/worktree-b"),
6059 json!({
6060 "haystack.rs": r#"// NEEDLE"#,
6061 "haystack.ts": r#"// NEEDLE"#,
6062 }),
6063 )
6064 .await;
6065
6066 let path_style = PathStyle::local();
6067 let project = Project::test(
6068 fs.clone(),
6069 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6070 cx,
6071 )
6072 .await;
6073
6074 assert_eq!(
6075 search(
6076 &project,
6077 SearchQuery::text(
6078 "NEEDLE",
6079 false,
6080 true,
6081 false,
6082 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6083 Default::default(),
6084 true,
6085 None,
6086 )
6087 .unwrap(),
6088 cx
6089 )
6090 .await
6091 .unwrap(),
6092 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6093 "should only return results from included worktree"
6094 );
6095 assert_eq!(
6096 search(
6097 &project,
6098 SearchQuery::text(
6099 "NEEDLE",
6100 false,
6101 true,
6102 false,
6103 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6104 Default::default(),
6105 true,
6106 None,
6107 )
6108 .unwrap(),
6109 cx
6110 )
6111 .await
6112 .unwrap(),
6113 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6114 "should only return results from included worktree"
6115 );
6116
6117 assert_eq!(
6118 search(
6119 &project,
6120 SearchQuery::text(
6121 "NEEDLE",
6122 false,
6123 true,
6124 false,
6125 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6126 Default::default(),
6127 false,
6128 None,
6129 )
6130 .unwrap(),
6131 cx
6132 )
6133 .await
6134 .unwrap(),
6135 HashMap::from_iter([
6136 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6137 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6138 ]),
6139 "should return results from both worktrees"
6140 );
6141}
6142
6143#[gpui::test]
6144async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6145 init_test(cx);
6146
6147 let fs = FakeFs::new(cx.background_executor.clone());
6148 fs.insert_tree(
6149 path!("/dir"),
6150 json!({
6151 ".git": {},
6152 ".gitignore": "**/target\n/node_modules\n",
6153 "target": {
6154 "index.txt": "index_key:index_value"
6155 },
6156 "node_modules": {
6157 "eslint": {
6158 "index.ts": "const eslint_key = 'eslint value'",
6159 "package.json": r#"{ "some_key": "some value" }"#,
6160 },
6161 "prettier": {
6162 "index.ts": "const prettier_key = 'prettier value'",
6163 "package.json": r#"{ "other_key": "other value" }"#,
6164 },
6165 },
6166 "package.json": r#"{ "main_key": "main value" }"#,
6167 }),
6168 )
6169 .await;
6170 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6171
6172 let query = "key";
6173 assert_eq!(
6174 search(
6175 &project,
6176 SearchQuery::text(
6177 query,
6178 false,
6179 false,
6180 false,
6181 Default::default(),
6182 Default::default(),
6183 false,
6184 None,
6185 )
6186 .unwrap(),
6187 cx
6188 )
6189 .await
6190 .unwrap(),
6191 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6192 "Only one non-ignored file should have the query"
6193 );
6194
6195 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6196 let path_style = PathStyle::local();
6197 assert_eq!(
6198 search(
6199 &project,
6200 SearchQuery::text(
6201 query,
6202 false,
6203 false,
6204 true,
6205 Default::default(),
6206 Default::default(),
6207 false,
6208 None,
6209 )
6210 .unwrap(),
6211 cx
6212 )
6213 .await
6214 .unwrap(),
6215 HashMap::from_iter([
6216 (path!("dir/package.json").to_string(), vec![8..11]),
6217 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6218 (
6219 path!("dir/node_modules/prettier/package.json").to_string(),
6220 vec![9..12]
6221 ),
6222 (
6223 path!("dir/node_modules/prettier/index.ts").to_string(),
6224 vec![15..18]
6225 ),
6226 (
6227 path!("dir/node_modules/eslint/index.ts").to_string(),
6228 vec![13..16]
6229 ),
6230 (
6231 path!("dir/node_modules/eslint/package.json").to_string(),
6232 vec![8..11]
6233 ),
6234 ]),
6235 "Unrestricted search with ignored directories should find every file with the query"
6236 );
6237
6238 let files_to_include =
6239 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6240 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6241 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6242 assert_eq!(
6243 search(
6244 &project,
6245 SearchQuery::text(
6246 query,
6247 false,
6248 false,
6249 true,
6250 files_to_include,
6251 files_to_exclude,
6252 false,
6253 None,
6254 )
6255 .unwrap(),
6256 cx
6257 )
6258 .await
6259 .unwrap(),
6260 HashMap::from_iter([(
6261 path!("dir/node_modules/prettier/package.json").to_string(),
6262 vec![9..12]
6263 )]),
6264 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6265 );
6266}
6267
6268#[gpui::test]
6269async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6270 init_test(cx);
6271
6272 let fs = FakeFs::new(cx.executor());
6273 fs.insert_tree(
6274 path!("/dir"),
6275 json!({
6276 "one.rs": "// ПРИВЕТ? привет!",
6277 "two.rs": "// ПРИВЕТ.",
6278 "three.rs": "// привет",
6279 }),
6280 )
6281 .await;
6282 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6283 let unicode_case_sensitive_query = SearchQuery::text(
6284 "привет",
6285 false,
6286 true,
6287 false,
6288 Default::default(),
6289 Default::default(),
6290 false,
6291 None,
6292 );
6293 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6294 assert_eq!(
6295 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6296 .await
6297 .unwrap(),
6298 HashMap::from_iter([
6299 (path!("dir/one.rs").to_string(), vec![17..29]),
6300 (path!("dir/three.rs").to_string(), vec![3..15]),
6301 ])
6302 );
6303
6304 let unicode_case_insensitive_query = SearchQuery::text(
6305 "привет",
6306 false,
6307 false,
6308 false,
6309 Default::default(),
6310 Default::default(),
6311 false,
6312 None,
6313 );
6314 assert_matches!(
6315 unicode_case_insensitive_query,
6316 Ok(SearchQuery::Regex { .. })
6317 );
6318 assert_eq!(
6319 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6320 .await
6321 .unwrap(),
6322 HashMap::from_iter([
6323 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6324 (path!("dir/two.rs").to_string(), vec![3..15]),
6325 (path!("dir/three.rs").to_string(), vec![3..15]),
6326 ])
6327 );
6328
6329 assert_eq!(
6330 search(
6331 &project,
6332 SearchQuery::text(
6333 "привет.",
6334 false,
6335 false,
6336 false,
6337 Default::default(),
6338 Default::default(),
6339 false,
6340 None,
6341 )
6342 .unwrap(),
6343 cx
6344 )
6345 .await
6346 .unwrap(),
6347 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6348 );
6349}
6350
6351#[gpui::test]
6352async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6353 init_test(cx);
6354
6355 let fs = FakeFs::new(cx.executor());
6356 fs.insert_tree(
6357 "/one/two",
6358 json!({
6359 "three": {
6360 "a.txt": "",
6361 "four": {}
6362 },
6363 "c.rs": ""
6364 }),
6365 )
6366 .await;
6367
6368 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6369 project
6370 .update(cx, |project, cx| {
6371 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6372 project.create_entry((id, rel_path("b..")), true, cx)
6373 })
6374 .await
6375 .unwrap()
6376 .into_included()
6377 .unwrap();
6378
6379 assert_eq!(
6380 fs.paths(true),
6381 vec![
6382 PathBuf::from(path!("/")),
6383 PathBuf::from(path!("/one")),
6384 PathBuf::from(path!("/one/two")),
6385 PathBuf::from(path!("/one/two/c.rs")),
6386 PathBuf::from(path!("/one/two/three")),
6387 PathBuf::from(path!("/one/two/three/a.txt")),
6388 PathBuf::from(path!("/one/two/three/b..")),
6389 PathBuf::from(path!("/one/two/three/four")),
6390 ]
6391 );
6392}
6393
6394#[gpui::test]
6395async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6396 init_test(cx);
6397
6398 let fs = FakeFs::new(cx.executor());
6399 fs.insert_tree(
6400 path!("/dir"),
6401 json!({
6402 "a.tsx": "a",
6403 }),
6404 )
6405 .await;
6406
6407 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6408
6409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6410 language_registry.add(tsx_lang());
6411 let language_server_names = [
6412 "TypeScriptServer",
6413 "TailwindServer",
6414 "ESLintServer",
6415 "NoHoverCapabilitiesServer",
6416 ];
6417 let mut language_servers = [
6418 language_registry.register_fake_lsp(
6419 "tsx",
6420 FakeLspAdapter {
6421 name: language_server_names[0],
6422 capabilities: lsp::ServerCapabilities {
6423 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6424 ..lsp::ServerCapabilities::default()
6425 },
6426 ..FakeLspAdapter::default()
6427 },
6428 ),
6429 language_registry.register_fake_lsp(
6430 "tsx",
6431 FakeLspAdapter {
6432 name: language_server_names[1],
6433 capabilities: lsp::ServerCapabilities {
6434 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6435 ..lsp::ServerCapabilities::default()
6436 },
6437 ..FakeLspAdapter::default()
6438 },
6439 ),
6440 language_registry.register_fake_lsp(
6441 "tsx",
6442 FakeLspAdapter {
6443 name: language_server_names[2],
6444 capabilities: lsp::ServerCapabilities {
6445 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6446 ..lsp::ServerCapabilities::default()
6447 },
6448 ..FakeLspAdapter::default()
6449 },
6450 ),
6451 language_registry.register_fake_lsp(
6452 "tsx",
6453 FakeLspAdapter {
6454 name: language_server_names[3],
6455 capabilities: lsp::ServerCapabilities {
6456 hover_provider: None,
6457 ..lsp::ServerCapabilities::default()
6458 },
6459 ..FakeLspAdapter::default()
6460 },
6461 ),
6462 ];
6463
6464 let (buffer, _handle) = project
6465 .update(cx, |p, cx| {
6466 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6467 })
6468 .await
6469 .unwrap();
6470 cx.executor().run_until_parked();
6471
6472 let mut servers_with_hover_requests = HashMap::default();
6473 for i in 0..language_server_names.len() {
6474 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6475 panic!(
6476 "Failed to get language server #{i} with name {}",
6477 &language_server_names[i]
6478 )
6479 });
6480 let new_server_name = new_server.server.name();
6481 assert!(
6482 !servers_with_hover_requests.contains_key(&new_server_name),
6483 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6484 );
6485 match new_server_name.as_ref() {
6486 "TailwindServer" | "TypeScriptServer" => {
6487 servers_with_hover_requests.insert(
6488 new_server_name.clone(),
6489 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6490 move |_, _| {
6491 let name = new_server_name.clone();
6492 async move {
6493 Ok(Some(lsp::Hover {
6494 contents: lsp::HoverContents::Scalar(
6495 lsp::MarkedString::String(format!("{name} hover")),
6496 ),
6497 range: None,
6498 }))
6499 }
6500 },
6501 ),
6502 );
6503 }
6504 "ESLintServer" => {
6505 servers_with_hover_requests.insert(
6506 new_server_name,
6507 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6508 |_, _| async move { Ok(None) },
6509 ),
6510 );
6511 }
6512 "NoHoverCapabilitiesServer" => {
6513 let _never_handled = new_server
6514 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6515 panic!(
6516 "Should not call for hovers server with no corresponding capabilities"
6517 )
6518 });
6519 }
6520 unexpected => panic!("Unexpected server name: {unexpected}"),
6521 }
6522 }
6523
6524 let hover_task = project.update(cx, |project, cx| {
6525 project.hover(&buffer, Point::new(0, 0), cx)
6526 });
6527 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6528 |mut hover_request| async move {
6529 hover_request
6530 .next()
6531 .await
6532 .expect("All hover requests should have been triggered")
6533 },
6534 ))
6535 .await;
6536 assert_eq!(
6537 vec!["TailwindServer hover", "TypeScriptServer hover"],
6538 hover_task
6539 .await
6540 .into_iter()
6541 .flatten()
6542 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6543 .sorted()
6544 .collect::<Vec<_>>(),
6545 "Should receive hover responses from all related servers with hover capabilities"
6546 );
6547}
6548
6549#[gpui::test]
6550async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6551 init_test(cx);
6552
6553 let fs = FakeFs::new(cx.executor());
6554 fs.insert_tree(
6555 path!("/dir"),
6556 json!({
6557 "a.ts": "a",
6558 }),
6559 )
6560 .await;
6561
6562 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6563
6564 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6565 language_registry.add(typescript_lang());
6566 let mut fake_language_servers = language_registry.register_fake_lsp(
6567 "TypeScript",
6568 FakeLspAdapter {
6569 capabilities: lsp::ServerCapabilities {
6570 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6571 ..lsp::ServerCapabilities::default()
6572 },
6573 ..FakeLspAdapter::default()
6574 },
6575 );
6576
6577 let (buffer, _handle) = project
6578 .update(cx, |p, cx| {
6579 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6580 })
6581 .await
6582 .unwrap();
6583 cx.executor().run_until_parked();
6584
6585 let fake_server = fake_language_servers
6586 .next()
6587 .await
6588 .expect("failed to get the language server");
6589
6590 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6591 move |_, _| async move {
6592 Ok(Some(lsp::Hover {
6593 contents: lsp::HoverContents::Array(vec![
6594 lsp::MarkedString::String("".to_string()),
6595 lsp::MarkedString::String(" ".to_string()),
6596 lsp::MarkedString::String("\n\n\n".to_string()),
6597 ]),
6598 range: None,
6599 }))
6600 },
6601 );
6602
6603 let hover_task = project.update(cx, |project, cx| {
6604 project.hover(&buffer, Point::new(0, 0), cx)
6605 });
6606 let () = request_handled
6607 .next()
6608 .await
6609 .expect("All hover requests should have been triggered");
6610 assert_eq!(
6611 Vec::<String>::new(),
6612 hover_task
6613 .await
6614 .into_iter()
6615 .flatten()
6616 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6617 .sorted()
6618 .collect::<Vec<_>>(),
6619 "Empty hover parts should be ignored"
6620 );
6621}
6622
6623#[gpui::test]
6624async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6625 init_test(cx);
6626
6627 let fs = FakeFs::new(cx.executor());
6628 fs.insert_tree(
6629 path!("/dir"),
6630 json!({
6631 "a.ts": "a",
6632 }),
6633 )
6634 .await;
6635
6636 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6637
6638 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6639 language_registry.add(typescript_lang());
6640 let mut fake_language_servers = language_registry.register_fake_lsp(
6641 "TypeScript",
6642 FakeLspAdapter {
6643 capabilities: lsp::ServerCapabilities {
6644 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6645 ..lsp::ServerCapabilities::default()
6646 },
6647 ..FakeLspAdapter::default()
6648 },
6649 );
6650
6651 let (buffer, _handle) = project
6652 .update(cx, |p, cx| {
6653 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6654 })
6655 .await
6656 .unwrap();
6657 cx.executor().run_until_parked();
6658
6659 let fake_server = fake_language_servers
6660 .next()
6661 .await
6662 .expect("failed to get the language server");
6663
6664 let mut request_handled = fake_server
6665 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6666 Ok(Some(vec![
6667 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6668 title: "organize imports".to_string(),
6669 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6670 ..lsp::CodeAction::default()
6671 }),
6672 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6673 title: "fix code".to_string(),
6674 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6675 ..lsp::CodeAction::default()
6676 }),
6677 ]))
6678 });
6679
6680 let code_actions_task = project.update(cx, |project, cx| {
6681 project.code_actions(
6682 &buffer,
6683 0..buffer.read(cx).len(),
6684 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6685 cx,
6686 )
6687 });
6688
6689 let () = request_handled
6690 .next()
6691 .await
6692 .expect("The code action request should have been triggered");
6693
6694 let code_actions = code_actions_task.await.unwrap().unwrap();
6695 assert_eq!(code_actions.len(), 1);
6696 assert_eq!(
6697 code_actions[0].lsp_action.action_kind(),
6698 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6699 );
6700}
6701
6702#[gpui::test]
6703async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6704 init_test(cx);
6705
6706 let fs = FakeFs::new(cx.executor());
6707 fs.insert_tree(
6708 path!("/dir"),
6709 json!({
6710 "a.tsx": "a",
6711 }),
6712 )
6713 .await;
6714
6715 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6716
6717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6718 language_registry.add(tsx_lang());
6719 let language_server_names = [
6720 "TypeScriptServer",
6721 "TailwindServer",
6722 "ESLintServer",
6723 "NoActionsCapabilitiesServer",
6724 ];
6725
6726 let mut language_server_rxs = [
6727 language_registry.register_fake_lsp(
6728 "tsx",
6729 FakeLspAdapter {
6730 name: language_server_names[0],
6731 capabilities: lsp::ServerCapabilities {
6732 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6733 ..lsp::ServerCapabilities::default()
6734 },
6735 ..FakeLspAdapter::default()
6736 },
6737 ),
6738 language_registry.register_fake_lsp(
6739 "tsx",
6740 FakeLspAdapter {
6741 name: language_server_names[1],
6742 capabilities: lsp::ServerCapabilities {
6743 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6744 ..lsp::ServerCapabilities::default()
6745 },
6746 ..FakeLspAdapter::default()
6747 },
6748 ),
6749 language_registry.register_fake_lsp(
6750 "tsx",
6751 FakeLspAdapter {
6752 name: language_server_names[2],
6753 capabilities: lsp::ServerCapabilities {
6754 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6755 ..lsp::ServerCapabilities::default()
6756 },
6757 ..FakeLspAdapter::default()
6758 },
6759 ),
6760 language_registry.register_fake_lsp(
6761 "tsx",
6762 FakeLspAdapter {
6763 name: language_server_names[3],
6764 capabilities: lsp::ServerCapabilities {
6765 code_action_provider: None,
6766 ..lsp::ServerCapabilities::default()
6767 },
6768 ..FakeLspAdapter::default()
6769 },
6770 ),
6771 ];
6772
6773 let (buffer, _handle) = project
6774 .update(cx, |p, cx| {
6775 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6776 })
6777 .await
6778 .unwrap();
6779 cx.executor().run_until_parked();
6780
6781 let mut servers_with_actions_requests = HashMap::default();
6782 for i in 0..language_server_names.len() {
6783 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6784 panic!(
6785 "Failed to get language server #{i} with name {}",
6786 &language_server_names[i]
6787 )
6788 });
6789 let new_server_name = new_server.server.name();
6790
6791 assert!(
6792 !servers_with_actions_requests.contains_key(&new_server_name),
6793 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6794 );
6795 match new_server_name.0.as_ref() {
6796 "TailwindServer" | "TypeScriptServer" => {
6797 servers_with_actions_requests.insert(
6798 new_server_name.clone(),
6799 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6800 move |_, _| {
6801 let name = new_server_name.clone();
6802 async move {
6803 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6804 lsp::CodeAction {
6805 title: format!("{name} code action"),
6806 ..lsp::CodeAction::default()
6807 },
6808 )]))
6809 }
6810 },
6811 ),
6812 );
6813 }
6814 "ESLintServer" => {
6815 servers_with_actions_requests.insert(
6816 new_server_name,
6817 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6818 |_, _| async move { Ok(None) },
6819 ),
6820 );
6821 }
6822 "NoActionsCapabilitiesServer" => {
6823 let _never_handled = new_server
6824 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6825 panic!(
6826 "Should not call for code actions server with no corresponding capabilities"
6827 )
6828 });
6829 }
6830 unexpected => panic!("Unexpected server name: {unexpected}"),
6831 }
6832 }
6833
6834 let code_actions_task = project.update(cx, |project, cx| {
6835 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6836 });
6837
6838 // cx.run_until_parked();
6839 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6840 |mut code_actions_request| async move {
6841 code_actions_request
6842 .next()
6843 .await
6844 .expect("All code actions requests should have been triggered")
6845 },
6846 ))
6847 .await;
6848 assert_eq!(
6849 vec!["TailwindServer code action", "TypeScriptServer code action"],
6850 code_actions_task
6851 .await
6852 .unwrap()
6853 .unwrap()
6854 .into_iter()
6855 .map(|code_action| code_action.lsp_action.title().to_owned())
6856 .sorted()
6857 .collect::<Vec<_>>(),
6858 "Should receive code actions responses from all related servers with hover capabilities"
6859 );
6860}
6861
6862#[gpui::test]
6863async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6864 init_test(cx);
6865
6866 let fs = FakeFs::new(cx.executor());
6867 fs.insert_tree(
6868 "/dir",
6869 json!({
6870 "a.rs": "let a = 1;",
6871 "b.rs": "let b = 2;",
6872 "c.rs": "let c = 2;",
6873 }),
6874 )
6875 .await;
6876
6877 let project = Project::test(
6878 fs,
6879 [
6880 "/dir/a.rs".as_ref(),
6881 "/dir/b.rs".as_ref(),
6882 "/dir/c.rs".as_ref(),
6883 ],
6884 cx,
6885 )
6886 .await;
6887
6888 // check the initial state and get the worktrees
6889 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6890 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6891 assert_eq!(worktrees.len(), 3);
6892
6893 let worktree_a = worktrees[0].read(cx);
6894 let worktree_b = worktrees[1].read(cx);
6895 let worktree_c = worktrees[2].read(cx);
6896
6897 // check they start in the right order
6898 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6899 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6900 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6901
6902 (
6903 worktrees[0].clone(),
6904 worktrees[1].clone(),
6905 worktrees[2].clone(),
6906 )
6907 });
6908
6909 // move first worktree to after the second
6910 // [a, b, c] -> [b, a, c]
6911 project
6912 .update(cx, |project, cx| {
6913 let first = worktree_a.read(cx);
6914 let second = worktree_b.read(cx);
6915 project.move_worktree(first.id(), second.id(), cx)
6916 })
6917 .expect("moving first after second");
6918
6919 // check the state after moving
6920 project.update(cx, |project, cx| {
6921 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6922 assert_eq!(worktrees.len(), 3);
6923
6924 let first = worktrees[0].read(cx);
6925 let second = worktrees[1].read(cx);
6926 let third = worktrees[2].read(cx);
6927
6928 // check they are now in the right order
6929 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6930 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6931 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6932 });
6933
6934 // move the second worktree to before the first
6935 // [b, a, c] -> [a, b, c]
6936 project
6937 .update(cx, |project, cx| {
6938 let second = worktree_a.read(cx);
6939 let first = worktree_b.read(cx);
6940 project.move_worktree(first.id(), second.id(), cx)
6941 })
6942 .expect("moving second before first");
6943
6944 // check the state after moving
6945 project.update(cx, |project, cx| {
6946 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6947 assert_eq!(worktrees.len(), 3);
6948
6949 let first = worktrees[0].read(cx);
6950 let second = worktrees[1].read(cx);
6951 let third = worktrees[2].read(cx);
6952
6953 // check they are now in the right order
6954 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6955 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6956 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6957 });
6958
6959 // move the second worktree to after the third
6960 // [a, b, c] -> [a, c, b]
6961 project
6962 .update(cx, |project, cx| {
6963 let second = worktree_b.read(cx);
6964 let third = worktree_c.read(cx);
6965 project.move_worktree(second.id(), third.id(), cx)
6966 })
6967 .expect("moving second after third");
6968
6969 // check the state after moving
6970 project.update(cx, |project, cx| {
6971 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6972 assert_eq!(worktrees.len(), 3);
6973
6974 let first = worktrees[0].read(cx);
6975 let second = worktrees[1].read(cx);
6976 let third = worktrees[2].read(cx);
6977
6978 // check they are now in the right order
6979 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6980 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6981 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6982 });
6983
6984 // move the third worktree to before the second
6985 // [a, c, b] -> [a, b, c]
6986 project
6987 .update(cx, |project, cx| {
6988 let third = worktree_c.read(cx);
6989 let second = worktree_b.read(cx);
6990 project.move_worktree(third.id(), second.id(), cx)
6991 })
6992 .expect("moving third before second");
6993
6994 // check the state after moving
6995 project.update(cx, |project, cx| {
6996 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6997 assert_eq!(worktrees.len(), 3);
6998
6999 let first = worktrees[0].read(cx);
7000 let second = worktrees[1].read(cx);
7001 let third = worktrees[2].read(cx);
7002
7003 // check they are now in the right order
7004 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7005 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7006 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7007 });
7008
7009 // move the first worktree to after the third
7010 // [a, b, c] -> [b, c, a]
7011 project
7012 .update(cx, |project, cx| {
7013 let first = worktree_a.read(cx);
7014 let third = worktree_c.read(cx);
7015 project.move_worktree(first.id(), third.id(), cx)
7016 })
7017 .expect("moving first after third");
7018
7019 // check the state after moving
7020 project.update(cx, |project, cx| {
7021 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7022 assert_eq!(worktrees.len(), 3);
7023
7024 let first = worktrees[0].read(cx);
7025 let second = worktrees[1].read(cx);
7026 let third = worktrees[2].read(cx);
7027
7028 // check they are now in the right order
7029 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7030 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7031 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7032 });
7033
7034 // move the third worktree to before the first
7035 // [b, c, a] -> [a, b, c]
7036 project
7037 .update(cx, |project, cx| {
7038 let third = worktree_a.read(cx);
7039 let first = worktree_b.read(cx);
7040 project.move_worktree(third.id(), first.id(), cx)
7041 })
7042 .expect("moving third before first");
7043
7044 // check the state after moving
7045 project.update(cx, |project, cx| {
7046 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7047 assert_eq!(worktrees.len(), 3);
7048
7049 let first = worktrees[0].read(cx);
7050 let second = worktrees[1].read(cx);
7051 let third = worktrees[2].read(cx);
7052
7053 // check they are now in the right order
7054 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7055 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7056 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7057 });
7058}
7059
7060#[gpui::test]
7061async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7062 init_test(cx);
7063
7064 let staged_contents = r#"
7065 fn main() {
7066 println!("hello world");
7067 }
7068 "#
7069 .unindent();
7070 let file_contents = r#"
7071 // print goodbye
7072 fn main() {
7073 println!("goodbye world");
7074 }
7075 "#
7076 .unindent();
7077
7078 let fs = FakeFs::new(cx.background_executor.clone());
7079 fs.insert_tree(
7080 "/dir",
7081 json!({
7082 ".git": {},
7083 "src": {
7084 "main.rs": file_contents,
7085 }
7086 }),
7087 )
7088 .await;
7089
7090 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7091
7092 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7093
7094 let buffer = project
7095 .update(cx, |project, cx| {
7096 project.open_local_buffer("/dir/src/main.rs", cx)
7097 })
7098 .await
7099 .unwrap();
7100 let unstaged_diff = project
7101 .update(cx, |project, cx| {
7102 project.open_unstaged_diff(buffer.clone(), cx)
7103 })
7104 .await
7105 .unwrap();
7106
7107 cx.run_until_parked();
7108 unstaged_diff.update(cx, |unstaged_diff, cx| {
7109 let snapshot = buffer.read(cx).snapshot();
7110 assert_hunks(
7111 unstaged_diff.hunks(&snapshot, cx),
7112 &snapshot,
7113 &unstaged_diff.base_text_string().unwrap(),
7114 &[
7115 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7116 (
7117 2..3,
7118 " println!(\"hello world\");\n",
7119 " println!(\"goodbye world\");\n",
7120 DiffHunkStatus::modified_none(),
7121 ),
7122 ],
7123 );
7124 });
7125
7126 let staged_contents = r#"
7127 // print goodbye
7128 fn main() {
7129 }
7130 "#
7131 .unindent();
7132
7133 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7134
7135 cx.run_until_parked();
7136 unstaged_diff.update(cx, |unstaged_diff, cx| {
7137 let snapshot = buffer.read(cx).snapshot();
7138 assert_hunks(
7139 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7140 &snapshot,
7141 &unstaged_diff.base_text().text(),
7142 &[(
7143 2..3,
7144 "",
7145 " println!(\"goodbye world\");\n",
7146 DiffHunkStatus::added_none(),
7147 )],
7148 );
7149 });
7150}
7151
7152#[gpui::test]
7153async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7154 init_test(cx);
7155
7156 let committed_contents = r#"
7157 fn main() {
7158 println!("hello world");
7159 }
7160 "#
7161 .unindent();
7162 let staged_contents = r#"
7163 fn main() {
7164 println!("goodbye world");
7165 }
7166 "#
7167 .unindent();
7168 let file_contents = r#"
7169 // print goodbye
7170 fn main() {
7171 println!("goodbye world");
7172 }
7173 "#
7174 .unindent();
7175
7176 let fs = FakeFs::new(cx.background_executor.clone());
7177 fs.insert_tree(
7178 "/dir",
7179 json!({
7180 ".git": {},
7181 "src": {
7182 "modification.rs": file_contents,
7183 }
7184 }),
7185 )
7186 .await;
7187
7188 fs.set_head_for_repo(
7189 Path::new("/dir/.git"),
7190 &[
7191 ("src/modification.rs", committed_contents),
7192 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7193 ],
7194 "deadbeef",
7195 );
7196 fs.set_index_for_repo(
7197 Path::new("/dir/.git"),
7198 &[
7199 ("src/modification.rs", staged_contents),
7200 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7201 ],
7202 );
7203
7204 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7205 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7206 let language = rust_lang();
7207 language_registry.add(language.clone());
7208
7209 let buffer_1 = project
7210 .update(cx, |project, cx| {
7211 project.open_local_buffer("/dir/src/modification.rs", cx)
7212 })
7213 .await
7214 .unwrap();
7215 let diff_1 = project
7216 .update(cx, |project, cx| {
7217 project.open_uncommitted_diff(buffer_1.clone(), cx)
7218 })
7219 .await
7220 .unwrap();
7221 diff_1.read_with(cx, |diff, _| {
7222 assert_eq!(diff.base_text().language().cloned(), Some(language))
7223 });
7224 cx.run_until_parked();
7225 diff_1.update(cx, |diff, cx| {
7226 let snapshot = buffer_1.read(cx).snapshot();
7227 assert_hunks(
7228 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7229 &snapshot,
7230 &diff.base_text_string().unwrap(),
7231 &[
7232 (
7233 0..1,
7234 "",
7235 "// print goodbye\n",
7236 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7237 ),
7238 (
7239 2..3,
7240 " println!(\"hello world\");\n",
7241 " println!(\"goodbye world\");\n",
7242 DiffHunkStatus::modified_none(),
7243 ),
7244 ],
7245 );
7246 });
7247
7248 // Reset HEAD to a version that differs from both the buffer and the index.
7249 let committed_contents = r#"
7250 // print goodbye
7251 fn main() {
7252 }
7253 "#
7254 .unindent();
7255 fs.set_head_for_repo(
7256 Path::new("/dir/.git"),
7257 &[
7258 ("src/modification.rs", committed_contents.clone()),
7259 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7260 ],
7261 "deadbeef",
7262 );
7263
7264 // Buffer now has an unstaged hunk.
7265 cx.run_until_parked();
7266 diff_1.update(cx, |diff, cx| {
7267 let snapshot = buffer_1.read(cx).snapshot();
7268 assert_hunks(
7269 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7270 &snapshot,
7271 &diff.base_text().text(),
7272 &[(
7273 2..3,
7274 "",
7275 " println!(\"goodbye world\");\n",
7276 DiffHunkStatus::added_none(),
7277 )],
7278 );
7279 });
7280
7281 // Open a buffer for a file that's been deleted.
7282 let buffer_2 = project
7283 .update(cx, |project, cx| {
7284 project.open_local_buffer("/dir/src/deletion.rs", cx)
7285 })
7286 .await
7287 .unwrap();
7288 let diff_2 = project
7289 .update(cx, |project, cx| {
7290 project.open_uncommitted_diff(buffer_2.clone(), cx)
7291 })
7292 .await
7293 .unwrap();
7294 cx.run_until_parked();
7295 diff_2.update(cx, |diff, cx| {
7296 let snapshot = buffer_2.read(cx).snapshot();
7297 assert_hunks(
7298 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7299 &snapshot,
7300 &diff.base_text_string().unwrap(),
7301 &[(
7302 0..0,
7303 "// the-deleted-contents\n",
7304 "",
7305 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7306 )],
7307 );
7308 });
7309
7310 // Stage the deletion of this file
7311 fs.set_index_for_repo(
7312 Path::new("/dir/.git"),
7313 &[("src/modification.rs", committed_contents.clone())],
7314 );
7315 cx.run_until_parked();
7316 diff_2.update(cx, |diff, cx| {
7317 let snapshot = buffer_2.read(cx).snapshot();
7318 assert_hunks(
7319 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7320 &snapshot,
7321 &diff.base_text_string().unwrap(),
7322 &[(
7323 0..0,
7324 "// the-deleted-contents\n",
7325 "",
7326 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7327 )],
7328 );
7329 });
7330}
7331
7332#[gpui::test]
7333async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7334 use DiffHunkSecondaryStatus::*;
7335 init_test(cx);
7336
7337 let committed_contents = r#"
7338 zero
7339 one
7340 two
7341 three
7342 four
7343 five
7344 "#
7345 .unindent();
7346 let file_contents = r#"
7347 one
7348 TWO
7349 three
7350 FOUR
7351 five
7352 "#
7353 .unindent();
7354
7355 let fs = FakeFs::new(cx.background_executor.clone());
7356 fs.insert_tree(
7357 "/dir",
7358 json!({
7359 ".git": {},
7360 "file.txt": file_contents.clone()
7361 }),
7362 )
7363 .await;
7364
7365 fs.set_head_and_index_for_repo(
7366 path!("/dir/.git").as_ref(),
7367 &[("file.txt", committed_contents.clone())],
7368 );
7369
7370 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7371
7372 let buffer = project
7373 .update(cx, |project, cx| {
7374 project.open_local_buffer("/dir/file.txt", cx)
7375 })
7376 .await
7377 .unwrap();
7378 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7379 let uncommitted_diff = project
7380 .update(cx, |project, cx| {
7381 project.open_uncommitted_diff(buffer.clone(), cx)
7382 })
7383 .await
7384 .unwrap();
7385 let mut diff_events = cx.events(&uncommitted_diff);
7386
7387 // The hunks are initially unstaged.
7388 uncommitted_diff.read_with(cx, |diff, cx| {
7389 assert_hunks(
7390 diff.hunks(&snapshot, cx),
7391 &snapshot,
7392 &diff.base_text_string().unwrap(),
7393 &[
7394 (
7395 0..0,
7396 "zero\n",
7397 "",
7398 DiffHunkStatus::deleted(HasSecondaryHunk),
7399 ),
7400 (
7401 1..2,
7402 "two\n",
7403 "TWO\n",
7404 DiffHunkStatus::modified(HasSecondaryHunk),
7405 ),
7406 (
7407 3..4,
7408 "four\n",
7409 "FOUR\n",
7410 DiffHunkStatus::modified(HasSecondaryHunk),
7411 ),
7412 ],
7413 );
7414 });
7415
7416 // Stage a hunk. It appears as optimistically staged.
7417 uncommitted_diff.update(cx, |diff, cx| {
7418 let range =
7419 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7420 let hunks = diff
7421 .hunks_intersecting_range(range, &snapshot, cx)
7422 .collect::<Vec<_>>();
7423 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7424
7425 assert_hunks(
7426 diff.hunks(&snapshot, cx),
7427 &snapshot,
7428 &diff.base_text_string().unwrap(),
7429 &[
7430 (
7431 0..0,
7432 "zero\n",
7433 "",
7434 DiffHunkStatus::deleted(HasSecondaryHunk),
7435 ),
7436 (
7437 1..2,
7438 "two\n",
7439 "TWO\n",
7440 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7441 ),
7442 (
7443 3..4,
7444 "four\n",
7445 "FOUR\n",
7446 DiffHunkStatus::modified(HasSecondaryHunk),
7447 ),
7448 ],
7449 );
7450 });
7451
7452 // The diff emits a change event for the range of the staged hunk.
7453 assert!(matches!(
7454 diff_events.next().await.unwrap(),
7455 BufferDiffEvent::HunksStagedOrUnstaged(_)
7456 ));
7457 let event = diff_events.next().await.unwrap();
7458 if let BufferDiffEvent::DiffChanged {
7459 changed_range: Some(changed_range),
7460 } = event
7461 {
7462 let changed_range = changed_range.to_point(&snapshot);
7463 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7464 } else {
7465 panic!("Unexpected event {event:?}");
7466 }
7467
7468 // When the write to the index completes, it appears as staged.
7469 cx.run_until_parked();
7470 uncommitted_diff.update(cx, |diff, cx| {
7471 assert_hunks(
7472 diff.hunks(&snapshot, cx),
7473 &snapshot,
7474 &diff.base_text_string().unwrap(),
7475 &[
7476 (
7477 0..0,
7478 "zero\n",
7479 "",
7480 DiffHunkStatus::deleted(HasSecondaryHunk),
7481 ),
7482 (
7483 1..2,
7484 "two\n",
7485 "TWO\n",
7486 DiffHunkStatus::modified(NoSecondaryHunk),
7487 ),
7488 (
7489 3..4,
7490 "four\n",
7491 "FOUR\n",
7492 DiffHunkStatus::modified(HasSecondaryHunk),
7493 ),
7494 ],
7495 );
7496 });
7497
7498 // The diff emits a change event for the changed index text.
7499 let event = diff_events.next().await.unwrap();
7500 if let BufferDiffEvent::DiffChanged {
7501 changed_range: Some(changed_range),
7502 } = event
7503 {
7504 let changed_range = changed_range.to_point(&snapshot);
7505 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7506 } else {
7507 panic!("Unexpected event {event:?}");
7508 }
7509
7510 // Simulate a problem writing to the git index.
7511 fs.set_error_message_for_index_write(
7512 "/dir/.git".as_ref(),
7513 Some("failed to write git index".into()),
7514 );
7515
7516 // Stage another hunk.
7517 uncommitted_diff.update(cx, |diff, cx| {
7518 let range =
7519 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7520 let hunks = diff
7521 .hunks_intersecting_range(range, &snapshot, cx)
7522 .collect::<Vec<_>>();
7523 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7524
7525 assert_hunks(
7526 diff.hunks(&snapshot, cx),
7527 &snapshot,
7528 &diff.base_text_string().unwrap(),
7529 &[
7530 (
7531 0..0,
7532 "zero\n",
7533 "",
7534 DiffHunkStatus::deleted(HasSecondaryHunk),
7535 ),
7536 (
7537 1..2,
7538 "two\n",
7539 "TWO\n",
7540 DiffHunkStatus::modified(NoSecondaryHunk),
7541 ),
7542 (
7543 3..4,
7544 "four\n",
7545 "FOUR\n",
7546 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7547 ),
7548 ],
7549 );
7550 });
7551 assert!(matches!(
7552 diff_events.next().await.unwrap(),
7553 BufferDiffEvent::HunksStagedOrUnstaged(_)
7554 ));
7555 let event = diff_events.next().await.unwrap();
7556 if let BufferDiffEvent::DiffChanged {
7557 changed_range: Some(changed_range),
7558 } = event
7559 {
7560 let changed_range = changed_range.to_point(&snapshot);
7561 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7562 } else {
7563 panic!("Unexpected event {event:?}");
7564 }
7565
7566 // When the write fails, the hunk returns to being unstaged.
7567 cx.run_until_parked();
7568 uncommitted_diff.update(cx, |diff, cx| {
7569 assert_hunks(
7570 diff.hunks(&snapshot, cx),
7571 &snapshot,
7572 &diff.base_text_string().unwrap(),
7573 &[
7574 (
7575 0..0,
7576 "zero\n",
7577 "",
7578 DiffHunkStatus::deleted(HasSecondaryHunk),
7579 ),
7580 (
7581 1..2,
7582 "two\n",
7583 "TWO\n",
7584 DiffHunkStatus::modified(NoSecondaryHunk),
7585 ),
7586 (
7587 3..4,
7588 "four\n",
7589 "FOUR\n",
7590 DiffHunkStatus::modified(HasSecondaryHunk),
7591 ),
7592 ],
7593 );
7594 });
7595
7596 let event = diff_events.next().await.unwrap();
7597 if let BufferDiffEvent::DiffChanged {
7598 changed_range: Some(changed_range),
7599 } = event
7600 {
7601 let changed_range = changed_range.to_point(&snapshot);
7602 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7603 } else {
7604 panic!("Unexpected event {event:?}");
7605 }
7606
7607 // Allow writing to the git index to succeed again.
7608 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7609
7610 // Stage two hunks with separate operations.
7611 uncommitted_diff.update(cx, |diff, cx| {
7612 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7613 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7614 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7615 });
7616
7617 // Both staged hunks appear as pending.
7618 uncommitted_diff.update(cx, |diff, cx| {
7619 assert_hunks(
7620 diff.hunks(&snapshot, cx),
7621 &snapshot,
7622 &diff.base_text_string().unwrap(),
7623 &[
7624 (
7625 0..0,
7626 "zero\n",
7627 "",
7628 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7629 ),
7630 (
7631 1..2,
7632 "two\n",
7633 "TWO\n",
7634 DiffHunkStatus::modified(NoSecondaryHunk),
7635 ),
7636 (
7637 3..4,
7638 "four\n",
7639 "FOUR\n",
7640 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7641 ),
7642 ],
7643 );
7644 });
7645
7646 // Both staging operations take effect.
7647 cx.run_until_parked();
7648 uncommitted_diff.update(cx, |diff, cx| {
7649 assert_hunks(
7650 diff.hunks(&snapshot, cx),
7651 &snapshot,
7652 &diff.base_text_string().unwrap(),
7653 &[
7654 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7655 (
7656 1..2,
7657 "two\n",
7658 "TWO\n",
7659 DiffHunkStatus::modified(NoSecondaryHunk),
7660 ),
7661 (
7662 3..4,
7663 "four\n",
7664 "FOUR\n",
7665 DiffHunkStatus::modified(NoSecondaryHunk),
7666 ),
7667 ],
7668 );
7669 });
7670}
7671
7672#[gpui::test(seeds(340, 472))]
7673async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7674 use DiffHunkSecondaryStatus::*;
7675 init_test(cx);
7676
7677 let committed_contents = r#"
7678 zero
7679 one
7680 two
7681 three
7682 four
7683 five
7684 "#
7685 .unindent();
7686 let file_contents = r#"
7687 one
7688 TWO
7689 three
7690 FOUR
7691 five
7692 "#
7693 .unindent();
7694
7695 let fs = FakeFs::new(cx.background_executor.clone());
7696 fs.insert_tree(
7697 "/dir",
7698 json!({
7699 ".git": {},
7700 "file.txt": file_contents.clone()
7701 }),
7702 )
7703 .await;
7704
7705 fs.set_head_for_repo(
7706 "/dir/.git".as_ref(),
7707 &[("file.txt", committed_contents.clone())],
7708 "deadbeef",
7709 );
7710 fs.set_index_for_repo(
7711 "/dir/.git".as_ref(),
7712 &[("file.txt", committed_contents.clone())],
7713 );
7714
7715 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7716
7717 let buffer = project
7718 .update(cx, |project, cx| {
7719 project.open_local_buffer("/dir/file.txt", cx)
7720 })
7721 .await
7722 .unwrap();
7723 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7724 let uncommitted_diff = project
7725 .update(cx, |project, cx| {
7726 project.open_uncommitted_diff(buffer.clone(), cx)
7727 })
7728 .await
7729 .unwrap();
7730
7731 // The hunks are initially unstaged.
7732 uncommitted_diff.read_with(cx, |diff, cx| {
7733 assert_hunks(
7734 diff.hunks(&snapshot, cx),
7735 &snapshot,
7736 &diff.base_text_string().unwrap(),
7737 &[
7738 (
7739 0..0,
7740 "zero\n",
7741 "",
7742 DiffHunkStatus::deleted(HasSecondaryHunk),
7743 ),
7744 (
7745 1..2,
7746 "two\n",
7747 "TWO\n",
7748 DiffHunkStatus::modified(HasSecondaryHunk),
7749 ),
7750 (
7751 3..4,
7752 "four\n",
7753 "FOUR\n",
7754 DiffHunkStatus::modified(HasSecondaryHunk),
7755 ),
7756 ],
7757 );
7758 });
7759
7760 // Pause IO events
7761 fs.pause_events();
7762
7763 // Stage the first hunk.
7764 uncommitted_diff.update(cx, |diff, cx| {
7765 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7766 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7767 assert_hunks(
7768 diff.hunks(&snapshot, cx),
7769 &snapshot,
7770 &diff.base_text_string().unwrap(),
7771 &[
7772 (
7773 0..0,
7774 "zero\n",
7775 "",
7776 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7777 ),
7778 (
7779 1..2,
7780 "two\n",
7781 "TWO\n",
7782 DiffHunkStatus::modified(HasSecondaryHunk),
7783 ),
7784 (
7785 3..4,
7786 "four\n",
7787 "FOUR\n",
7788 DiffHunkStatus::modified(HasSecondaryHunk),
7789 ),
7790 ],
7791 );
7792 });
7793
7794 // Stage the second hunk *before* receiving the FS event for the first hunk.
7795 cx.run_until_parked();
7796 uncommitted_diff.update(cx, |diff, cx| {
7797 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7798 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7799 assert_hunks(
7800 diff.hunks(&snapshot, cx),
7801 &snapshot,
7802 &diff.base_text_string().unwrap(),
7803 &[
7804 (
7805 0..0,
7806 "zero\n",
7807 "",
7808 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7809 ),
7810 (
7811 1..2,
7812 "two\n",
7813 "TWO\n",
7814 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7815 ),
7816 (
7817 3..4,
7818 "four\n",
7819 "FOUR\n",
7820 DiffHunkStatus::modified(HasSecondaryHunk),
7821 ),
7822 ],
7823 );
7824 });
7825
7826 // Process the FS event for staging the first hunk (second event is still pending).
7827 fs.flush_events(1);
7828 cx.run_until_parked();
7829
7830 // Stage the third hunk before receiving the second FS event.
7831 uncommitted_diff.update(cx, |diff, cx| {
7832 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7833 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7834 });
7835
7836 // Wait for all remaining IO.
7837 cx.run_until_parked();
7838 fs.flush_events(fs.buffered_event_count());
7839
7840 // Now all hunks are staged.
7841 cx.run_until_parked();
7842 uncommitted_diff.update(cx, |diff, cx| {
7843 assert_hunks(
7844 diff.hunks(&snapshot, cx),
7845 &snapshot,
7846 &diff.base_text_string().unwrap(),
7847 &[
7848 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7849 (
7850 1..2,
7851 "two\n",
7852 "TWO\n",
7853 DiffHunkStatus::modified(NoSecondaryHunk),
7854 ),
7855 (
7856 3..4,
7857 "four\n",
7858 "FOUR\n",
7859 DiffHunkStatus::modified(NoSecondaryHunk),
7860 ),
7861 ],
7862 );
7863 });
7864}
7865
7866#[gpui::test(iterations = 25)]
7867async fn test_staging_random_hunks(
7868 mut rng: StdRng,
7869 executor: BackgroundExecutor,
7870 cx: &mut gpui::TestAppContext,
7871) {
7872 let operations = env::var("OPERATIONS")
7873 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7874 .unwrap_or(20);
7875
7876 // Try to induce races between diff recalculation and index writes.
7877 if rng.random_bool(0.5) {
7878 executor.deprioritize(*CALCULATE_DIFF_TASK);
7879 }
7880
7881 use DiffHunkSecondaryStatus::*;
7882 init_test(cx);
7883
7884 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7885 let index_text = committed_text.clone();
7886 let buffer_text = (0..30)
7887 .map(|i| match i % 5 {
7888 0 => format!("line {i} (modified)\n"),
7889 _ => format!("line {i}\n"),
7890 })
7891 .collect::<String>();
7892
7893 let fs = FakeFs::new(cx.background_executor.clone());
7894 fs.insert_tree(
7895 path!("/dir"),
7896 json!({
7897 ".git": {},
7898 "file.txt": buffer_text.clone()
7899 }),
7900 )
7901 .await;
7902 fs.set_head_for_repo(
7903 path!("/dir/.git").as_ref(),
7904 &[("file.txt", committed_text.clone())],
7905 "deadbeef",
7906 );
7907 fs.set_index_for_repo(
7908 path!("/dir/.git").as_ref(),
7909 &[("file.txt", index_text.clone())],
7910 );
7911 let repo = fs
7912 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7913 .unwrap();
7914
7915 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7916 let buffer = project
7917 .update(cx, |project, cx| {
7918 project.open_local_buffer(path!("/dir/file.txt"), cx)
7919 })
7920 .await
7921 .unwrap();
7922 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7923 let uncommitted_diff = project
7924 .update(cx, |project, cx| {
7925 project.open_uncommitted_diff(buffer.clone(), cx)
7926 })
7927 .await
7928 .unwrap();
7929
7930 let mut hunks =
7931 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7932 assert_eq!(hunks.len(), 6);
7933
7934 for _i in 0..operations {
7935 let hunk_ix = rng.random_range(0..hunks.len());
7936 let hunk = &mut hunks[hunk_ix];
7937 let row = hunk.range.start.row;
7938
7939 if hunk.status().has_secondary_hunk() {
7940 log::info!("staging hunk at {row}");
7941 uncommitted_diff.update(cx, |diff, cx| {
7942 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7943 });
7944 hunk.secondary_status = SecondaryHunkRemovalPending;
7945 } else {
7946 log::info!("unstaging hunk at {row}");
7947 uncommitted_diff.update(cx, |diff, cx| {
7948 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7949 });
7950 hunk.secondary_status = SecondaryHunkAdditionPending;
7951 }
7952
7953 for _ in 0..rng.random_range(0..10) {
7954 log::info!("yielding");
7955 cx.executor().simulate_random_delay().await;
7956 }
7957 }
7958
7959 cx.executor().run_until_parked();
7960
7961 for hunk in &mut hunks {
7962 if hunk.secondary_status == SecondaryHunkRemovalPending {
7963 hunk.secondary_status = NoSecondaryHunk;
7964 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7965 hunk.secondary_status = HasSecondaryHunk;
7966 }
7967 }
7968
7969 log::info!(
7970 "index text:\n{}",
7971 repo.load_index_text(rel_path("file.txt").into())
7972 .await
7973 .unwrap()
7974 );
7975
7976 uncommitted_diff.update(cx, |diff, cx| {
7977 let expected_hunks = hunks
7978 .iter()
7979 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7980 .collect::<Vec<_>>();
7981 let actual_hunks = diff
7982 .hunks(&snapshot, cx)
7983 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7984 .collect::<Vec<_>>();
7985 assert_eq!(actual_hunks, expected_hunks);
7986 });
7987}
7988
7989#[gpui::test]
7990async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7991 init_test(cx);
7992
7993 let committed_contents = r#"
7994 fn main() {
7995 println!("hello from HEAD");
7996 }
7997 "#
7998 .unindent();
7999 let file_contents = r#"
8000 fn main() {
8001 println!("hello from the working copy");
8002 }
8003 "#
8004 .unindent();
8005
8006 let fs = FakeFs::new(cx.background_executor.clone());
8007 fs.insert_tree(
8008 "/dir",
8009 json!({
8010 ".git": {},
8011 "src": {
8012 "main.rs": file_contents,
8013 }
8014 }),
8015 )
8016 .await;
8017
8018 fs.set_head_for_repo(
8019 Path::new("/dir/.git"),
8020 &[("src/main.rs", committed_contents.clone())],
8021 "deadbeef",
8022 );
8023 fs.set_index_for_repo(
8024 Path::new("/dir/.git"),
8025 &[("src/main.rs", committed_contents.clone())],
8026 );
8027
8028 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8029
8030 let buffer = project
8031 .update(cx, |project, cx| {
8032 project.open_local_buffer("/dir/src/main.rs", cx)
8033 })
8034 .await
8035 .unwrap();
8036 let uncommitted_diff = project
8037 .update(cx, |project, cx| {
8038 project.open_uncommitted_diff(buffer.clone(), cx)
8039 })
8040 .await
8041 .unwrap();
8042
8043 cx.run_until_parked();
8044 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8045 let snapshot = buffer.read(cx).snapshot();
8046 assert_hunks(
8047 uncommitted_diff.hunks(&snapshot, cx),
8048 &snapshot,
8049 &uncommitted_diff.base_text_string().unwrap(),
8050 &[(
8051 1..2,
8052 " println!(\"hello from HEAD\");\n",
8053 " println!(\"hello from the working copy\");\n",
8054 DiffHunkStatus {
8055 kind: DiffHunkStatusKind::Modified,
8056 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8057 },
8058 )],
8059 );
8060 });
8061}
8062
8063#[gpui::test]
8064async fn test_repository_and_path_for_project_path(
8065 background_executor: BackgroundExecutor,
8066 cx: &mut gpui::TestAppContext,
8067) {
8068 init_test(cx);
8069 let fs = FakeFs::new(background_executor);
8070 fs.insert_tree(
8071 path!("/root"),
8072 json!({
8073 "c.txt": "",
8074 "dir1": {
8075 ".git": {},
8076 "deps": {
8077 "dep1": {
8078 ".git": {},
8079 "src": {
8080 "a.txt": ""
8081 }
8082 }
8083 },
8084 "src": {
8085 "b.txt": ""
8086 }
8087 },
8088 }),
8089 )
8090 .await;
8091
8092 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8093 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8094 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8095 project
8096 .update(cx, |project, cx| project.git_scans_complete(cx))
8097 .await;
8098 cx.run_until_parked();
8099
8100 project.read_with(cx, |project, cx| {
8101 let git_store = project.git_store().read(cx);
8102 let pairs = [
8103 ("c.txt", None),
8104 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8105 (
8106 "dir1/deps/dep1/src/a.txt",
8107 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8108 ),
8109 ];
8110 let expected = pairs
8111 .iter()
8112 .map(|(path, result)| {
8113 (
8114 path,
8115 result.map(|(repo, repo_path)| {
8116 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8117 }),
8118 )
8119 })
8120 .collect::<Vec<_>>();
8121 let actual = pairs
8122 .iter()
8123 .map(|(path, _)| {
8124 let project_path = (tree_id, rel_path(path)).into();
8125 let result = maybe!({
8126 let (repo, repo_path) =
8127 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8128 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8129 });
8130 (path, result)
8131 })
8132 .collect::<Vec<_>>();
8133 pretty_assertions::assert_eq!(expected, actual);
8134 });
8135
8136 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8137 .await
8138 .unwrap();
8139 cx.run_until_parked();
8140
8141 project.read_with(cx, |project, cx| {
8142 let git_store = project.git_store().read(cx);
8143 assert_eq!(
8144 git_store.repository_and_path_for_project_path(
8145 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8146 cx
8147 ),
8148 None
8149 );
8150 });
8151}
8152
8153#[gpui::test]
8154async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8155 init_test(cx);
8156 let fs = FakeFs::new(cx.background_executor.clone());
8157 let home = paths::home_dir();
8158 fs.insert_tree(
8159 home,
8160 json!({
8161 ".git": {},
8162 "project": {
8163 "a.txt": "A"
8164 },
8165 }),
8166 )
8167 .await;
8168
8169 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8170 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8171 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8172
8173 project
8174 .update(cx, |project, cx| project.git_scans_complete(cx))
8175 .await;
8176 tree.flush_fs_events(cx).await;
8177
8178 project.read_with(cx, |project, cx| {
8179 let containing = project
8180 .git_store()
8181 .read(cx)
8182 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8183 assert!(containing.is_none());
8184 });
8185
8186 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8187 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8188 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8189 project
8190 .update(cx, |project, cx| project.git_scans_complete(cx))
8191 .await;
8192 tree.flush_fs_events(cx).await;
8193
8194 project.read_with(cx, |project, cx| {
8195 let containing = project
8196 .git_store()
8197 .read(cx)
8198 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8199 assert_eq!(
8200 containing
8201 .unwrap()
8202 .0
8203 .read(cx)
8204 .work_directory_abs_path
8205 .as_ref(),
8206 home,
8207 );
8208 });
8209}
8210
8211#[gpui::test]
8212async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8213 init_test(cx);
8214 cx.executor().allow_parking();
8215
8216 let root = TempTree::new(json!({
8217 "project": {
8218 "a.txt": "a", // Modified
8219 "b.txt": "bb", // Added
8220 "c.txt": "ccc", // Unchanged
8221 "d.txt": "dddd", // Deleted
8222 },
8223 }));
8224
8225 // Set up git repository before creating the project.
8226 let work_dir = root.path().join("project");
8227 let repo = git_init(work_dir.as_path());
8228 git_add("a.txt", &repo);
8229 git_add("c.txt", &repo);
8230 git_add("d.txt", &repo);
8231 git_commit("Initial commit", &repo);
8232 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8233 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8234
8235 let project = Project::test(
8236 Arc::new(RealFs::new(None, cx.executor())),
8237 [root.path()],
8238 cx,
8239 )
8240 .await;
8241
8242 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8243 tree.flush_fs_events(cx).await;
8244 project
8245 .update(cx, |project, cx| project.git_scans_complete(cx))
8246 .await;
8247 cx.executor().run_until_parked();
8248
8249 let repository = project.read_with(cx, |project, cx| {
8250 project.repositories(cx).values().next().unwrap().clone()
8251 });
8252
8253 // Check that the right git state is observed on startup
8254 repository.read_with(cx, |repository, _| {
8255 let entries = repository.cached_status().collect::<Vec<_>>();
8256 assert_eq!(
8257 entries,
8258 [
8259 StatusEntry {
8260 repo_path: repo_path("a.txt"),
8261 status: StatusCode::Modified.worktree(),
8262 },
8263 StatusEntry {
8264 repo_path: repo_path("b.txt"),
8265 status: FileStatus::Untracked,
8266 },
8267 StatusEntry {
8268 repo_path: repo_path("d.txt"),
8269 status: StatusCode::Deleted.worktree(),
8270 },
8271 ]
8272 );
8273 });
8274
8275 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8276
8277 tree.flush_fs_events(cx).await;
8278 project
8279 .update(cx, |project, cx| project.git_scans_complete(cx))
8280 .await;
8281 cx.executor().run_until_parked();
8282
8283 repository.read_with(cx, |repository, _| {
8284 let entries = repository.cached_status().collect::<Vec<_>>();
8285 assert_eq!(
8286 entries,
8287 [
8288 StatusEntry {
8289 repo_path: repo_path("a.txt"),
8290 status: StatusCode::Modified.worktree(),
8291 },
8292 StatusEntry {
8293 repo_path: repo_path("b.txt"),
8294 status: FileStatus::Untracked,
8295 },
8296 StatusEntry {
8297 repo_path: repo_path("c.txt"),
8298 status: StatusCode::Modified.worktree(),
8299 },
8300 StatusEntry {
8301 repo_path: repo_path("d.txt"),
8302 status: StatusCode::Deleted.worktree(),
8303 },
8304 ]
8305 );
8306 });
8307
8308 git_add("a.txt", &repo);
8309 git_add("c.txt", &repo);
8310 git_remove_index(Path::new("d.txt"), &repo);
8311 git_commit("Another commit", &repo);
8312 tree.flush_fs_events(cx).await;
8313 project
8314 .update(cx, |project, cx| project.git_scans_complete(cx))
8315 .await;
8316 cx.executor().run_until_parked();
8317
8318 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8319 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8320 tree.flush_fs_events(cx).await;
8321 project
8322 .update(cx, |project, cx| project.git_scans_complete(cx))
8323 .await;
8324 cx.executor().run_until_parked();
8325
8326 repository.read_with(cx, |repository, _cx| {
8327 let entries = repository.cached_status().collect::<Vec<_>>();
8328
8329 // Deleting an untracked entry, b.txt, should leave no status
8330 // a.txt was tracked, and so should have a status
8331 assert_eq!(
8332 entries,
8333 [StatusEntry {
8334 repo_path: repo_path("a.txt"),
8335 status: StatusCode::Deleted.worktree(),
8336 }]
8337 );
8338 });
8339}
8340
8341#[gpui::test]
8342async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8343 init_test(cx);
8344 cx.executor().allow_parking();
8345
8346 let root = TempTree::new(json!({
8347 "project": {
8348 "sub": {},
8349 "a.txt": "",
8350 },
8351 }));
8352
8353 let work_dir = root.path().join("project");
8354 let repo = git_init(work_dir.as_path());
8355 // a.txt exists in HEAD and the working copy but is deleted in the index.
8356 git_add("a.txt", &repo);
8357 git_commit("Initial commit", &repo);
8358 git_remove_index("a.txt".as_ref(), &repo);
8359 // `sub` is a nested git repository.
8360 let _sub = git_init(&work_dir.join("sub"));
8361
8362 let project = Project::test(
8363 Arc::new(RealFs::new(None, cx.executor())),
8364 [root.path()],
8365 cx,
8366 )
8367 .await;
8368
8369 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8370 tree.flush_fs_events(cx).await;
8371 project
8372 .update(cx, |project, cx| project.git_scans_complete(cx))
8373 .await;
8374 cx.executor().run_until_parked();
8375
8376 let repository = project.read_with(cx, |project, cx| {
8377 project
8378 .repositories(cx)
8379 .values()
8380 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8381 .unwrap()
8382 .clone()
8383 });
8384
8385 repository.read_with(cx, |repository, _cx| {
8386 let entries = repository.cached_status().collect::<Vec<_>>();
8387
8388 // `sub` doesn't appear in our computed statuses.
8389 // a.txt appears with a combined `DA` status.
8390 assert_eq!(
8391 entries,
8392 [StatusEntry {
8393 repo_path: repo_path("a.txt"),
8394 status: TrackedStatus {
8395 index_status: StatusCode::Deleted,
8396 worktree_status: StatusCode::Added
8397 }
8398 .into(),
8399 }]
8400 )
8401 });
8402}
8403
8404#[gpui::test]
8405async fn test_repository_subfolder_git_status(
8406 executor: gpui::BackgroundExecutor,
8407 cx: &mut gpui::TestAppContext,
8408) {
8409 init_test(cx);
8410
8411 let fs = FakeFs::new(executor);
8412 fs.insert_tree(
8413 path!("/root"),
8414 json!({
8415 "my-repo": {
8416 ".git": {},
8417 "a.txt": "a",
8418 "sub-folder-1": {
8419 "sub-folder-2": {
8420 "c.txt": "cc",
8421 "d": {
8422 "e.txt": "eee"
8423 }
8424 },
8425 }
8426 },
8427 }),
8428 )
8429 .await;
8430
8431 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8432 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8433
8434 fs.set_status_for_repo(
8435 path!("/root/my-repo/.git").as_ref(),
8436 &[(E_TXT, FileStatus::Untracked)],
8437 );
8438
8439 let project = Project::test(
8440 fs.clone(),
8441 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8442 cx,
8443 )
8444 .await;
8445
8446 project
8447 .update(cx, |project, cx| project.git_scans_complete(cx))
8448 .await;
8449 cx.run_until_parked();
8450
8451 let repository = project.read_with(cx, |project, cx| {
8452 project.repositories(cx).values().next().unwrap().clone()
8453 });
8454
8455 // Ensure that the git status is loaded correctly
8456 repository.read_with(cx, |repository, _cx| {
8457 assert_eq!(
8458 repository.work_directory_abs_path,
8459 Path::new(path!("/root/my-repo")).into()
8460 );
8461
8462 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8463 assert_eq!(
8464 repository
8465 .status_for_path(&repo_path(E_TXT))
8466 .unwrap()
8467 .status,
8468 FileStatus::Untracked
8469 );
8470 });
8471
8472 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8473 project
8474 .update(cx, |project, cx| project.git_scans_complete(cx))
8475 .await;
8476 cx.run_until_parked();
8477
8478 repository.read_with(cx, |repository, _cx| {
8479 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8480 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8481 });
8482}
8483
8484// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8485#[cfg(any())]
8486#[gpui::test]
8487async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8488 init_test(cx);
8489 cx.executor().allow_parking();
8490
8491 let root = TempTree::new(json!({
8492 "project": {
8493 "a.txt": "a",
8494 },
8495 }));
8496 let root_path = root.path();
8497
8498 let repo = git_init(&root_path.join("project"));
8499 git_add("a.txt", &repo);
8500 git_commit("init", &repo);
8501
8502 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8503
8504 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8505 tree.flush_fs_events(cx).await;
8506 project
8507 .update(cx, |project, cx| project.git_scans_complete(cx))
8508 .await;
8509 cx.executor().run_until_parked();
8510
8511 let repository = project.read_with(cx, |project, cx| {
8512 project.repositories(cx).values().next().unwrap().clone()
8513 });
8514
8515 git_branch("other-branch", &repo);
8516 git_checkout("refs/heads/other-branch", &repo);
8517 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8518 git_add("a.txt", &repo);
8519 git_commit("capitalize", &repo);
8520 let commit = repo
8521 .head()
8522 .expect("Failed to get HEAD")
8523 .peel_to_commit()
8524 .expect("HEAD is not a commit");
8525 git_checkout("refs/heads/main", &repo);
8526 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8527 git_add("a.txt", &repo);
8528 git_commit("improve letter", &repo);
8529 git_cherry_pick(&commit, &repo);
8530 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8531 .expect("No CHERRY_PICK_HEAD");
8532 pretty_assertions::assert_eq!(
8533 git_status(&repo),
8534 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8535 );
8536 tree.flush_fs_events(cx).await;
8537 project
8538 .update(cx, |project, cx| project.git_scans_complete(cx))
8539 .await;
8540 cx.executor().run_until_parked();
8541 let conflicts = repository.update(cx, |repository, _| {
8542 repository
8543 .merge_conflicts
8544 .iter()
8545 .cloned()
8546 .collect::<Vec<_>>()
8547 });
8548 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8549
8550 git_add("a.txt", &repo);
8551 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8552 git_commit("whatevs", &repo);
8553 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8554 .expect("Failed to remove CHERRY_PICK_HEAD");
8555 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8556 tree.flush_fs_events(cx).await;
8557 let conflicts = repository.update(cx, |repository, _| {
8558 repository
8559 .merge_conflicts
8560 .iter()
8561 .cloned()
8562 .collect::<Vec<_>>()
8563 });
8564 pretty_assertions::assert_eq!(conflicts, []);
8565}
8566
8567#[gpui::test]
8568async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8569 init_test(cx);
8570 let fs = FakeFs::new(cx.background_executor.clone());
8571 fs.insert_tree(
8572 path!("/root"),
8573 json!({
8574 ".git": {},
8575 ".gitignore": "*.txt\n",
8576 "a.xml": "<a></a>",
8577 "b.txt": "Some text"
8578 }),
8579 )
8580 .await;
8581
8582 fs.set_head_and_index_for_repo(
8583 path!("/root/.git").as_ref(),
8584 &[
8585 (".gitignore", "*.txt\n".into()),
8586 ("a.xml", "<a></a>".into()),
8587 ],
8588 );
8589
8590 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8591
8592 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8593 tree.flush_fs_events(cx).await;
8594 project
8595 .update(cx, |project, cx| project.git_scans_complete(cx))
8596 .await;
8597 cx.executor().run_until_parked();
8598
8599 let repository = project.read_with(cx, |project, cx| {
8600 project.repositories(cx).values().next().unwrap().clone()
8601 });
8602
8603 // One file is unmodified, the other is ignored.
8604 cx.read(|cx| {
8605 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8606 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8607 });
8608
8609 // Change the gitignore, and stage the newly non-ignored file.
8610 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8611 .await
8612 .unwrap();
8613 fs.set_index_for_repo(
8614 Path::new(path!("/root/.git")),
8615 &[
8616 (".gitignore", "*.txt\n".into()),
8617 ("a.xml", "<a></a>".into()),
8618 ("b.txt", "Some text".into()),
8619 ],
8620 );
8621
8622 cx.executor().run_until_parked();
8623 cx.read(|cx| {
8624 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8625 assert_entry_git_state(
8626 tree.read(cx),
8627 repository.read(cx),
8628 "b.txt",
8629 Some(StatusCode::Added),
8630 false,
8631 );
8632 });
8633}
8634
8635// NOTE:
8636// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8637// a directory which some program has already open.
8638// This is a limitation of the Windows.
8639// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8640// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8641#[gpui::test]
8642#[cfg_attr(target_os = "windows", ignore)]
8643async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8644 init_test(cx);
8645 cx.executor().allow_parking();
8646 let root = TempTree::new(json!({
8647 "projects": {
8648 "project1": {
8649 "a": "",
8650 "b": "",
8651 }
8652 },
8653
8654 }));
8655 let root_path = root.path();
8656
8657 let repo = git_init(&root_path.join("projects/project1"));
8658 git_add("a", &repo);
8659 git_commit("init", &repo);
8660 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8661
8662 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8663
8664 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8665 tree.flush_fs_events(cx).await;
8666 project
8667 .update(cx, |project, cx| project.git_scans_complete(cx))
8668 .await;
8669 cx.executor().run_until_parked();
8670
8671 let repository = project.read_with(cx, |project, cx| {
8672 project.repositories(cx).values().next().unwrap().clone()
8673 });
8674
8675 repository.read_with(cx, |repository, _| {
8676 assert_eq!(
8677 repository.work_directory_abs_path.as_ref(),
8678 root_path.join("projects/project1").as_path()
8679 );
8680 assert_eq!(
8681 repository
8682 .status_for_path(&repo_path("a"))
8683 .map(|entry| entry.status),
8684 Some(StatusCode::Modified.worktree()),
8685 );
8686 assert_eq!(
8687 repository
8688 .status_for_path(&repo_path("b"))
8689 .map(|entry| entry.status),
8690 Some(FileStatus::Untracked),
8691 );
8692 });
8693
8694 std::fs::rename(
8695 root_path.join("projects/project1"),
8696 root_path.join("projects/project2"),
8697 )
8698 .unwrap();
8699 tree.flush_fs_events(cx).await;
8700
8701 repository.read_with(cx, |repository, _| {
8702 assert_eq!(
8703 repository.work_directory_abs_path.as_ref(),
8704 root_path.join("projects/project2").as_path()
8705 );
8706 assert_eq!(
8707 repository.status_for_path(&repo_path("a")).unwrap().status,
8708 StatusCode::Modified.worktree(),
8709 );
8710 assert_eq!(
8711 repository.status_for_path(&repo_path("b")).unwrap().status,
8712 FileStatus::Untracked,
8713 );
8714 });
8715}
8716
8717// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8718// you can't rename a directory which some program has already open. This is a
8719// limitation of the Windows. See:
8720// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8721// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8722#[gpui::test]
8723#[cfg_attr(target_os = "windows", ignore)]
8724async fn test_file_status(cx: &mut gpui::TestAppContext) {
8725 init_test(cx);
8726 cx.executor().allow_parking();
8727 const IGNORE_RULE: &str = "**/target";
8728
8729 let root = TempTree::new(json!({
8730 "project": {
8731 "a.txt": "a",
8732 "b.txt": "bb",
8733 "c": {
8734 "d": {
8735 "e.txt": "eee"
8736 }
8737 },
8738 "f.txt": "ffff",
8739 "target": {
8740 "build_file": "???"
8741 },
8742 ".gitignore": IGNORE_RULE
8743 },
8744
8745 }));
8746 let root_path = root.path();
8747
8748 const A_TXT: &str = "a.txt";
8749 const B_TXT: &str = "b.txt";
8750 const E_TXT: &str = "c/d/e.txt";
8751 const F_TXT: &str = "f.txt";
8752 const DOTGITIGNORE: &str = ".gitignore";
8753 const BUILD_FILE: &str = "target/build_file";
8754
8755 // Set up git repository before creating the worktree.
8756 let work_dir = root.path().join("project");
8757 let mut repo = git_init(work_dir.as_path());
8758 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8759 git_add(A_TXT, &repo);
8760 git_add(E_TXT, &repo);
8761 git_add(DOTGITIGNORE, &repo);
8762 git_commit("Initial commit", &repo);
8763
8764 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8765
8766 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8767 tree.flush_fs_events(cx).await;
8768 project
8769 .update(cx, |project, cx| project.git_scans_complete(cx))
8770 .await;
8771 cx.executor().run_until_parked();
8772
8773 let repository = project.read_with(cx, |project, cx| {
8774 project.repositories(cx).values().next().unwrap().clone()
8775 });
8776
8777 // Check that the right git state is observed on startup
8778 repository.read_with(cx, |repository, _cx| {
8779 assert_eq!(
8780 repository.work_directory_abs_path.as_ref(),
8781 root_path.join("project").as_path()
8782 );
8783
8784 assert_eq!(
8785 repository
8786 .status_for_path(&repo_path(B_TXT))
8787 .unwrap()
8788 .status,
8789 FileStatus::Untracked,
8790 );
8791 assert_eq!(
8792 repository
8793 .status_for_path(&repo_path(F_TXT))
8794 .unwrap()
8795 .status,
8796 FileStatus::Untracked,
8797 );
8798 });
8799
8800 // Modify a file in the working copy.
8801 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8802 tree.flush_fs_events(cx).await;
8803 project
8804 .update(cx, |project, cx| project.git_scans_complete(cx))
8805 .await;
8806 cx.executor().run_until_parked();
8807
8808 // The worktree detects that the file's git status has changed.
8809 repository.read_with(cx, |repository, _| {
8810 assert_eq!(
8811 repository
8812 .status_for_path(&repo_path(A_TXT))
8813 .unwrap()
8814 .status,
8815 StatusCode::Modified.worktree(),
8816 );
8817 });
8818
8819 // Create a commit in the git repository.
8820 git_add(A_TXT, &repo);
8821 git_add(B_TXT, &repo);
8822 git_commit("Committing modified and added", &repo);
8823 tree.flush_fs_events(cx).await;
8824 project
8825 .update(cx, |project, cx| project.git_scans_complete(cx))
8826 .await;
8827 cx.executor().run_until_parked();
8828
8829 // The worktree detects that the files' git status have changed.
8830 repository.read_with(cx, |repository, _cx| {
8831 assert_eq!(
8832 repository
8833 .status_for_path(&repo_path(F_TXT))
8834 .unwrap()
8835 .status,
8836 FileStatus::Untracked,
8837 );
8838 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8839 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8840 });
8841
8842 // Modify files in the working copy and perform git operations on other files.
8843 git_reset(0, &repo);
8844 git_remove_index(Path::new(B_TXT), &repo);
8845 git_stash(&mut repo);
8846 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8847 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8848 tree.flush_fs_events(cx).await;
8849 project
8850 .update(cx, |project, cx| project.git_scans_complete(cx))
8851 .await;
8852 cx.executor().run_until_parked();
8853
8854 // Check that more complex repo changes are tracked
8855 repository.read_with(cx, |repository, _cx| {
8856 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8857 assert_eq!(
8858 repository
8859 .status_for_path(&repo_path(B_TXT))
8860 .unwrap()
8861 .status,
8862 FileStatus::Untracked,
8863 );
8864 assert_eq!(
8865 repository
8866 .status_for_path(&repo_path(E_TXT))
8867 .unwrap()
8868 .status,
8869 StatusCode::Modified.worktree(),
8870 );
8871 });
8872
8873 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8874 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8875 std::fs::write(
8876 work_dir.join(DOTGITIGNORE),
8877 [IGNORE_RULE, "f.txt"].join("\n"),
8878 )
8879 .unwrap();
8880
8881 git_add(Path::new(DOTGITIGNORE), &repo);
8882 git_commit("Committing modified git ignore", &repo);
8883
8884 tree.flush_fs_events(cx).await;
8885 cx.executor().run_until_parked();
8886
8887 let mut renamed_dir_name = "first_directory/second_directory";
8888 const RENAMED_FILE: &str = "rf.txt";
8889
8890 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8891 std::fs::write(
8892 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8893 "new-contents",
8894 )
8895 .unwrap();
8896
8897 tree.flush_fs_events(cx).await;
8898 project
8899 .update(cx, |project, cx| project.git_scans_complete(cx))
8900 .await;
8901 cx.executor().run_until_parked();
8902
8903 repository.read_with(cx, |repository, _cx| {
8904 assert_eq!(
8905 repository
8906 .status_for_path(
8907 &rel_path(renamed_dir_name)
8908 .join(rel_path(RENAMED_FILE))
8909 .into()
8910 )
8911 .unwrap()
8912 .status,
8913 FileStatus::Untracked,
8914 );
8915 });
8916
8917 renamed_dir_name = "new_first_directory/second_directory";
8918
8919 std::fs::rename(
8920 work_dir.join("first_directory"),
8921 work_dir.join("new_first_directory"),
8922 )
8923 .unwrap();
8924
8925 tree.flush_fs_events(cx).await;
8926 project
8927 .update(cx, |project, cx| project.git_scans_complete(cx))
8928 .await;
8929 cx.executor().run_until_parked();
8930
8931 repository.read_with(cx, |repository, _cx| {
8932 assert_eq!(
8933 repository
8934 .status_for_path(
8935 &rel_path(renamed_dir_name)
8936 .join(rel_path(RENAMED_FILE))
8937 .into()
8938 )
8939 .unwrap()
8940 .status,
8941 FileStatus::Untracked,
8942 );
8943 });
8944}
8945
8946#[gpui::test]
8947#[ignore]
8948async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8949 init_test(cx);
8950 cx.executor().allow_parking();
8951
8952 const IGNORE_RULE: &str = "**/target";
8953
8954 let root = TempTree::new(json!({
8955 "project": {
8956 "src": {
8957 "main.rs": "fn main() {}"
8958 },
8959 "target": {
8960 "debug": {
8961 "important_text.txt": "important text",
8962 },
8963 },
8964 ".gitignore": IGNORE_RULE
8965 },
8966
8967 }));
8968 let root_path = root.path();
8969
8970 // Set up git repository before creating the worktree.
8971 let work_dir = root.path().join("project");
8972 let repo = git_init(work_dir.as_path());
8973 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8974 git_add("src/main.rs", &repo);
8975 git_add(".gitignore", &repo);
8976 git_commit("Initial commit", &repo);
8977
8978 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8979 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8980 let project_events = Arc::new(Mutex::new(Vec::new()));
8981 project.update(cx, |project, cx| {
8982 let repo_events = repository_updates.clone();
8983 cx.subscribe(project.git_store(), move |_, _, e, _| {
8984 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8985 repo_events.lock().push(e.clone());
8986 }
8987 })
8988 .detach();
8989 let project_events = project_events.clone();
8990 cx.subscribe_self(move |_, e, _| {
8991 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8992 project_events.lock().extend(
8993 updates
8994 .iter()
8995 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8996 .filter(|(path, _)| path != "fs-event-sentinel"),
8997 );
8998 }
8999 })
9000 .detach();
9001 });
9002
9003 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9004 tree.flush_fs_events(cx).await;
9005 tree.update(cx, |tree, cx| {
9006 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9007 })
9008 .await
9009 .unwrap();
9010 tree.update(cx, |tree, _| {
9011 assert_eq!(
9012 tree.entries(true, 0)
9013 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9014 .collect::<Vec<_>>(),
9015 vec![
9016 (rel_path(""), false),
9017 (rel_path("project/"), false),
9018 (rel_path("project/.gitignore"), false),
9019 (rel_path("project/src"), false),
9020 (rel_path("project/src/main.rs"), false),
9021 (rel_path("project/target"), true),
9022 (rel_path("project/target/debug"), true),
9023 (rel_path("project/target/debug/important_text.txt"), true),
9024 ]
9025 );
9026 });
9027
9028 assert_eq!(
9029 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9030 vec![
9031 RepositoryEvent::StatusesChanged { full_scan: true },
9032 RepositoryEvent::MergeHeadsChanged,
9033 ],
9034 "Initial worktree scan should produce a repo update event"
9035 );
9036 assert_eq!(
9037 project_events.lock().drain(..).collect::<Vec<_>>(),
9038 vec![
9039 ("project/target".to_string(), PathChange::Loaded),
9040 ("project/target/debug".to_string(), PathChange::Loaded),
9041 (
9042 "project/target/debug/important_text.txt".to_string(),
9043 PathChange::Loaded
9044 ),
9045 ],
9046 "Initial project changes should show that all not-ignored and all opened files are loaded"
9047 );
9048
9049 let deps_dir = work_dir.join("target").join("debug").join("deps");
9050 std::fs::create_dir_all(&deps_dir).unwrap();
9051 tree.flush_fs_events(cx).await;
9052 project
9053 .update(cx, |project, cx| project.git_scans_complete(cx))
9054 .await;
9055 cx.executor().run_until_parked();
9056 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9057 tree.flush_fs_events(cx).await;
9058 project
9059 .update(cx, |project, cx| project.git_scans_complete(cx))
9060 .await;
9061 cx.executor().run_until_parked();
9062 std::fs::remove_dir_all(&deps_dir).unwrap();
9063 tree.flush_fs_events(cx).await;
9064 project
9065 .update(cx, |project, cx| project.git_scans_complete(cx))
9066 .await;
9067 cx.executor().run_until_parked();
9068
9069 tree.update(cx, |tree, _| {
9070 assert_eq!(
9071 tree.entries(true, 0)
9072 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9073 .collect::<Vec<_>>(),
9074 vec![
9075 (rel_path(""), false),
9076 (rel_path("project/"), false),
9077 (rel_path("project/.gitignore"), false),
9078 (rel_path("project/src"), false),
9079 (rel_path("project/src/main.rs"), false),
9080 (rel_path("project/target"), true),
9081 (rel_path("project/target/debug"), true),
9082 (rel_path("project/target/debug/important_text.txt"), true),
9083 ],
9084 "No stray temp files should be left after the flycheck changes"
9085 );
9086 });
9087
9088 assert_eq!(
9089 repository_updates
9090 .lock()
9091 .iter()
9092 .cloned()
9093 .collect::<Vec<_>>(),
9094 Vec::new(),
9095 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9096 );
9097 assert_eq!(
9098 project_events.lock().as_slice(),
9099 vec![
9100 ("project/target/debug/deps".to_string(), PathChange::Added),
9101 ("project/target/debug/deps".to_string(), PathChange::Removed),
9102 ],
9103 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9104 No updates for more nested directories should happen as those are ignored",
9105 );
9106}
9107
9108#[gpui::test]
9109async fn test_odd_events_for_ignored_dirs(
9110 executor: BackgroundExecutor,
9111 cx: &mut gpui::TestAppContext,
9112) {
9113 init_test(cx);
9114 let fs = FakeFs::new(executor);
9115 fs.insert_tree(
9116 path!("/root"),
9117 json!({
9118 ".git": {},
9119 ".gitignore": "**/target/",
9120 "src": {
9121 "main.rs": "fn main() {}",
9122 },
9123 "target": {
9124 "debug": {
9125 "foo.txt": "foo",
9126 "deps": {}
9127 }
9128 }
9129 }),
9130 )
9131 .await;
9132 fs.set_head_and_index_for_repo(
9133 path!("/root/.git").as_ref(),
9134 &[
9135 (".gitignore", "**/target/".into()),
9136 ("src/main.rs", "fn main() {}".into()),
9137 ],
9138 );
9139
9140 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9141 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9142 let project_events = Arc::new(Mutex::new(Vec::new()));
9143 project.update(cx, |project, cx| {
9144 let repository_updates = repository_updates.clone();
9145 cx.subscribe(project.git_store(), move |_, _, e, _| {
9146 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9147 repository_updates.lock().push(e.clone());
9148 }
9149 })
9150 .detach();
9151 let project_events = project_events.clone();
9152 cx.subscribe_self(move |_, e, _| {
9153 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9154 project_events.lock().extend(
9155 updates
9156 .iter()
9157 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9158 .filter(|(path, _)| path != "fs-event-sentinel"),
9159 );
9160 }
9161 })
9162 .detach();
9163 });
9164
9165 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9166 tree.update(cx, |tree, cx| {
9167 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9168 })
9169 .await
9170 .unwrap();
9171 tree.flush_fs_events(cx).await;
9172 project
9173 .update(cx, |project, cx| project.git_scans_complete(cx))
9174 .await;
9175 cx.run_until_parked();
9176 tree.update(cx, |tree, _| {
9177 assert_eq!(
9178 tree.entries(true, 0)
9179 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9180 .collect::<Vec<_>>(),
9181 vec![
9182 (rel_path(""), false),
9183 (rel_path(".gitignore"), false),
9184 (rel_path("src"), false),
9185 (rel_path("src/main.rs"), false),
9186 (rel_path("target"), true),
9187 (rel_path("target/debug"), true),
9188 (rel_path("target/debug/deps"), true),
9189 (rel_path("target/debug/foo.txt"), true),
9190 ]
9191 );
9192 });
9193
9194 assert_eq!(
9195 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9196 vec![
9197 RepositoryEvent::MergeHeadsChanged,
9198 RepositoryEvent::BranchChanged,
9199 RepositoryEvent::StatusesChanged { full_scan: false },
9200 RepositoryEvent::StatusesChanged { full_scan: false },
9201 ],
9202 "Initial worktree scan should produce a repo update event"
9203 );
9204 assert_eq!(
9205 project_events.lock().drain(..).collect::<Vec<_>>(),
9206 vec![
9207 ("target".to_string(), PathChange::Loaded),
9208 ("target/debug".to_string(), PathChange::Loaded),
9209 ("target/debug/deps".to_string(), PathChange::Loaded),
9210 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9211 ],
9212 "All non-ignored entries and all opened firs should be getting a project event",
9213 );
9214
9215 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9216 // This may happen multiple times during a single flycheck, but once is enough for testing.
9217 fs.emit_fs_event("/root/target/debug/deps", None);
9218 tree.flush_fs_events(cx).await;
9219 project
9220 .update(cx, |project, cx| project.git_scans_complete(cx))
9221 .await;
9222 cx.executor().run_until_parked();
9223
9224 assert_eq!(
9225 repository_updates
9226 .lock()
9227 .iter()
9228 .cloned()
9229 .collect::<Vec<_>>(),
9230 Vec::new(),
9231 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9232 );
9233 assert_eq!(
9234 project_events.lock().as_slice(),
9235 Vec::new(),
9236 "No further project events should happen, as only ignored dirs received FS events",
9237 );
9238}
9239
9240#[gpui::test]
9241async fn test_repos_in_invisible_worktrees(
9242 executor: BackgroundExecutor,
9243 cx: &mut gpui::TestAppContext,
9244) {
9245 init_test(cx);
9246 let fs = FakeFs::new(executor);
9247 fs.insert_tree(
9248 path!("/root"),
9249 json!({
9250 "dir1": {
9251 ".git": {},
9252 "dep1": {
9253 ".git": {},
9254 "src": {
9255 "a.txt": "",
9256 },
9257 },
9258 "b.txt": "",
9259 },
9260 }),
9261 )
9262 .await;
9263
9264 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9265 let _visible_worktree =
9266 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9267 project
9268 .update(cx, |project, cx| project.git_scans_complete(cx))
9269 .await;
9270
9271 let repos = project.read_with(cx, |project, cx| {
9272 project
9273 .repositories(cx)
9274 .values()
9275 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9276 .collect::<Vec<_>>()
9277 });
9278 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9279
9280 let (_invisible_worktree, _) = project
9281 .update(cx, |project, cx| {
9282 project.worktree_store.update(cx, |worktree_store, cx| {
9283 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9284 })
9285 })
9286 .await
9287 .expect("failed to create worktree");
9288 project
9289 .update(cx, |project, cx| project.git_scans_complete(cx))
9290 .await;
9291
9292 let repos = project.read_with(cx, |project, cx| {
9293 project
9294 .repositories(cx)
9295 .values()
9296 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9297 .collect::<Vec<_>>()
9298 });
9299 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9300}
9301
9302#[gpui::test(iterations = 10)]
9303async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9304 init_test(cx);
9305 cx.update(|cx| {
9306 cx.update_global::<SettingsStore, _>(|store, cx| {
9307 store.update_user_settings(cx, |settings| {
9308 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9309 });
9310 });
9311 });
9312 let fs = FakeFs::new(cx.background_executor.clone());
9313 fs.insert_tree(
9314 path!("/root"),
9315 json!({
9316 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9317 "tree": {
9318 ".git": {},
9319 ".gitignore": "ignored-dir\n",
9320 "tracked-dir": {
9321 "tracked-file1": "",
9322 "ancestor-ignored-file1": "",
9323 },
9324 "ignored-dir": {
9325 "ignored-file1": ""
9326 }
9327 }
9328 }),
9329 )
9330 .await;
9331 fs.set_head_and_index_for_repo(
9332 path!("/root/tree/.git").as_ref(),
9333 &[
9334 (".gitignore", "ignored-dir\n".into()),
9335 ("tracked-dir/tracked-file1", "".into()),
9336 ],
9337 );
9338
9339 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9340
9341 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9342 tree.flush_fs_events(cx).await;
9343 project
9344 .update(cx, |project, cx| project.git_scans_complete(cx))
9345 .await;
9346 cx.executor().run_until_parked();
9347
9348 let repository = project.read_with(cx, |project, cx| {
9349 project.repositories(cx).values().next().unwrap().clone()
9350 });
9351
9352 tree.read_with(cx, |tree, _| {
9353 tree.as_local()
9354 .unwrap()
9355 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9356 })
9357 .recv()
9358 .await;
9359
9360 cx.read(|cx| {
9361 assert_entry_git_state(
9362 tree.read(cx),
9363 repository.read(cx),
9364 "tracked-dir/tracked-file1",
9365 None,
9366 false,
9367 );
9368 assert_entry_git_state(
9369 tree.read(cx),
9370 repository.read(cx),
9371 "tracked-dir/ancestor-ignored-file1",
9372 None,
9373 false,
9374 );
9375 assert_entry_git_state(
9376 tree.read(cx),
9377 repository.read(cx),
9378 "ignored-dir/ignored-file1",
9379 None,
9380 true,
9381 );
9382 });
9383
9384 fs.create_file(
9385 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9386 Default::default(),
9387 )
9388 .await
9389 .unwrap();
9390 fs.set_index_for_repo(
9391 path!("/root/tree/.git").as_ref(),
9392 &[
9393 (".gitignore", "ignored-dir\n".into()),
9394 ("tracked-dir/tracked-file1", "".into()),
9395 ("tracked-dir/tracked-file2", "".into()),
9396 ],
9397 );
9398 fs.create_file(
9399 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9400 Default::default(),
9401 )
9402 .await
9403 .unwrap();
9404 fs.create_file(
9405 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9406 Default::default(),
9407 )
9408 .await
9409 .unwrap();
9410
9411 cx.executor().run_until_parked();
9412 cx.read(|cx| {
9413 assert_entry_git_state(
9414 tree.read(cx),
9415 repository.read(cx),
9416 "tracked-dir/tracked-file2",
9417 Some(StatusCode::Added),
9418 false,
9419 );
9420 assert_entry_git_state(
9421 tree.read(cx),
9422 repository.read(cx),
9423 "tracked-dir/ancestor-ignored-file2",
9424 None,
9425 false,
9426 );
9427 assert_entry_git_state(
9428 tree.read(cx),
9429 repository.read(cx),
9430 "ignored-dir/ignored-file2",
9431 None,
9432 true,
9433 );
9434 assert!(
9435 tree.read(cx)
9436 .entry_for_path(&rel_path(".git"))
9437 .unwrap()
9438 .is_ignored
9439 );
9440 });
9441}
9442
9443#[gpui::test]
9444async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9445 init_test(cx);
9446
9447 let fs = FakeFs::new(cx.executor());
9448 fs.insert_tree(
9449 path!("/project"),
9450 json!({
9451 ".git": {
9452 "worktrees": {
9453 "some-worktree": {
9454 "commondir": "../..\n",
9455 // For is_git_dir
9456 "HEAD": "",
9457 "config": ""
9458 }
9459 },
9460 "modules": {
9461 "subdir": {
9462 "some-submodule": {
9463 // For is_git_dir
9464 "HEAD": "",
9465 "config": "",
9466 }
9467 }
9468 }
9469 },
9470 "src": {
9471 "a.txt": "A",
9472 },
9473 "some-worktree": {
9474 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9475 "src": {
9476 "b.txt": "B",
9477 }
9478 },
9479 "subdir": {
9480 "some-submodule": {
9481 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9482 "c.txt": "C",
9483 }
9484 }
9485 }),
9486 )
9487 .await;
9488
9489 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9490 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9491 scan_complete.await;
9492
9493 let mut repositories = project.update(cx, |project, cx| {
9494 project
9495 .repositories(cx)
9496 .values()
9497 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9498 .collect::<Vec<_>>()
9499 });
9500 repositories.sort();
9501 pretty_assertions::assert_eq!(
9502 repositories,
9503 [
9504 Path::new(path!("/project")).into(),
9505 Path::new(path!("/project/some-worktree")).into(),
9506 Path::new(path!("/project/subdir/some-submodule")).into(),
9507 ]
9508 );
9509
9510 // Generate a git-related event for the worktree and check that it's refreshed.
9511 fs.with_git_state(
9512 path!("/project/some-worktree/.git").as_ref(),
9513 true,
9514 |state| {
9515 state
9516 .head_contents
9517 .insert(repo_path("src/b.txt"), "b".to_owned());
9518 state
9519 .index_contents
9520 .insert(repo_path("src/b.txt"), "b".to_owned());
9521 },
9522 )
9523 .unwrap();
9524 cx.run_until_parked();
9525
9526 let buffer = project
9527 .update(cx, |project, cx| {
9528 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9529 })
9530 .await
9531 .unwrap();
9532 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9533 let (repo, _) = project
9534 .git_store()
9535 .read(cx)
9536 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9537 .unwrap();
9538 pretty_assertions::assert_eq!(
9539 repo.read(cx).work_directory_abs_path,
9540 Path::new(path!("/project/some-worktree")).into(),
9541 );
9542 let barrier = repo.update(cx, |repo, _| repo.barrier());
9543 (repo.clone(), barrier)
9544 });
9545 barrier.await.unwrap();
9546 worktree_repo.update(cx, |repo, _| {
9547 pretty_assertions::assert_eq!(
9548 repo.status_for_path(&repo_path("src/b.txt"))
9549 .unwrap()
9550 .status,
9551 StatusCode::Modified.worktree(),
9552 );
9553 });
9554
9555 // The same for the submodule.
9556 fs.with_git_state(
9557 path!("/project/subdir/some-submodule/.git").as_ref(),
9558 true,
9559 |state| {
9560 state
9561 .head_contents
9562 .insert(repo_path("c.txt"), "c".to_owned());
9563 state
9564 .index_contents
9565 .insert(repo_path("c.txt"), "c".to_owned());
9566 },
9567 )
9568 .unwrap();
9569 cx.run_until_parked();
9570
9571 let buffer = project
9572 .update(cx, |project, cx| {
9573 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9574 })
9575 .await
9576 .unwrap();
9577 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9578 let (repo, _) = project
9579 .git_store()
9580 .read(cx)
9581 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9582 .unwrap();
9583 pretty_assertions::assert_eq!(
9584 repo.read(cx).work_directory_abs_path,
9585 Path::new(path!("/project/subdir/some-submodule")).into(),
9586 );
9587 let barrier = repo.update(cx, |repo, _| repo.barrier());
9588 (repo.clone(), barrier)
9589 });
9590 barrier.await.unwrap();
9591 submodule_repo.update(cx, |repo, _| {
9592 pretty_assertions::assert_eq!(
9593 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9594 StatusCode::Modified.worktree(),
9595 );
9596 });
9597}
9598
9599#[gpui::test]
9600async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9601 init_test(cx);
9602 let fs = FakeFs::new(cx.background_executor.clone());
9603 fs.insert_tree(
9604 path!("/root"),
9605 json!({
9606 "project": {
9607 ".git": {},
9608 "child1": {
9609 "a.txt": "A",
9610 },
9611 "child2": {
9612 "b.txt": "B",
9613 }
9614 }
9615 }),
9616 )
9617 .await;
9618
9619 let project = Project::test(
9620 fs.clone(),
9621 [
9622 path!("/root/project/child1").as_ref(),
9623 path!("/root/project/child2").as_ref(),
9624 ],
9625 cx,
9626 )
9627 .await;
9628
9629 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9630 tree.flush_fs_events(cx).await;
9631 project
9632 .update(cx, |project, cx| project.git_scans_complete(cx))
9633 .await;
9634 cx.executor().run_until_parked();
9635
9636 let repos = project.read_with(cx, |project, cx| {
9637 project
9638 .repositories(cx)
9639 .values()
9640 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9641 .collect::<Vec<_>>()
9642 });
9643 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9644}
9645
9646async fn search(
9647 project: &Entity<Project>,
9648 query: SearchQuery,
9649 cx: &mut gpui::TestAppContext,
9650) -> Result<HashMap<String, Vec<Range<usize>>>> {
9651 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9652 let mut results = HashMap::default();
9653 while let Ok(search_result) = search_rx.recv().await {
9654 match search_result {
9655 SearchResult::Buffer { buffer, ranges } => {
9656 results.entry(buffer).or_insert(ranges);
9657 }
9658 SearchResult::LimitReached => {}
9659 }
9660 }
9661 Ok(results
9662 .into_iter()
9663 .map(|(buffer, ranges)| {
9664 buffer.update(cx, |buffer, cx| {
9665 let path = buffer
9666 .file()
9667 .unwrap()
9668 .full_path(cx)
9669 .to_string_lossy()
9670 .to_string();
9671 let ranges = ranges
9672 .into_iter()
9673 .map(|range| range.to_offset(buffer))
9674 .collect::<Vec<_>>();
9675 (path, ranges)
9676 })
9677 })
9678 .collect())
9679}
9680
9681pub fn init_test(cx: &mut gpui::TestAppContext) {
9682 zlog::init_test();
9683
9684 cx.update(|cx| {
9685 let settings_store = SettingsStore::test(cx);
9686 cx.set_global(settings_store);
9687 release_channel::init(SemanticVersion::default(), cx);
9688 language::init(cx);
9689 Project::init_settings(cx);
9690 });
9691}
9692
9693fn json_lang() -> Arc<Language> {
9694 Arc::new(Language::new(
9695 LanguageConfig {
9696 name: "JSON".into(),
9697 matcher: LanguageMatcher {
9698 path_suffixes: vec!["json".to_string()],
9699 ..Default::default()
9700 },
9701 ..Default::default()
9702 },
9703 None,
9704 ))
9705}
9706
9707fn js_lang() -> Arc<Language> {
9708 Arc::new(Language::new(
9709 LanguageConfig {
9710 name: "JavaScript".into(),
9711 matcher: LanguageMatcher {
9712 path_suffixes: vec!["js".to_string()],
9713 ..Default::default()
9714 },
9715 ..Default::default()
9716 },
9717 None,
9718 ))
9719}
9720
9721fn rust_lang() -> Arc<Language> {
9722 Arc::new(Language::new(
9723 LanguageConfig {
9724 name: "Rust".into(),
9725 matcher: LanguageMatcher {
9726 path_suffixes: vec!["rs".to_string()],
9727 ..Default::default()
9728 },
9729 ..Default::default()
9730 },
9731 Some(tree_sitter_rust::LANGUAGE.into()),
9732 ))
9733}
9734
9735fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9736 struct PythonMootToolchainLister(Arc<FakeFs>);
9737 #[async_trait]
9738 impl ToolchainLister for PythonMootToolchainLister {
9739 async fn list(
9740 &self,
9741 worktree_root: PathBuf,
9742 subroot_relative_path: Arc<RelPath>,
9743 _: Option<HashMap<String, String>>,
9744 _: &dyn Fs,
9745 ) -> ToolchainList {
9746 // This lister will always return a path .venv directories within ancestors
9747 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9748 let mut toolchains = vec![];
9749 for ancestor in ancestors {
9750 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9751 if self.0.is_dir(&venv_path).await {
9752 toolchains.push(Toolchain {
9753 name: SharedString::new("Python Venv"),
9754 path: venv_path.to_string_lossy().into_owned().into(),
9755 language_name: LanguageName(SharedString::new_static("Python")),
9756 as_json: serde_json::Value::Null,
9757 })
9758 }
9759 }
9760 ToolchainList {
9761 toolchains,
9762 ..Default::default()
9763 }
9764 }
9765 async fn resolve(
9766 &self,
9767 _: PathBuf,
9768 _: Option<HashMap<String, String>>,
9769 _: &dyn Fs,
9770 ) -> anyhow::Result<Toolchain> {
9771 Err(anyhow::anyhow!("Not implemented"))
9772 }
9773 fn meta(&self) -> ToolchainMetadata {
9774 ToolchainMetadata {
9775 term: SharedString::new_static("Virtual Environment"),
9776 new_toolchain_placeholder: SharedString::new_static(
9777 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9778 ),
9779 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9780 }
9781 }
9782 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9783 vec![]
9784 }
9785 }
9786 Arc::new(
9787 Language::new(
9788 LanguageConfig {
9789 name: "Python".into(),
9790 matcher: LanguageMatcher {
9791 path_suffixes: vec!["py".to_string()],
9792 ..Default::default()
9793 },
9794 ..Default::default()
9795 },
9796 None, // We're not testing Python parsing with this language.
9797 )
9798 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9799 "pyproject.toml",
9800 ))))
9801 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9802 )
9803}
9804
9805fn typescript_lang() -> Arc<Language> {
9806 Arc::new(Language::new(
9807 LanguageConfig {
9808 name: "TypeScript".into(),
9809 matcher: LanguageMatcher {
9810 path_suffixes: vec!["ts".to_string()],
9811 ..Default::default()
9812 },
9813 ..Default::default()
9814 },
9815 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9816 ))
9817}
9818
9819fn tsx_lang() -> Arc<Language> {
9820 Arc::new(Language::new(
9821 LanguageConfig {
9822 name: "tsx".into(),
9823 matcher: LanguageMatcher {
9824 path_suffixes: vec!["tsx".to_string()],
9825 ..Default::default()
9826 },
9827 ..Default::default()
9828 },
9829 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9830 ))
9831}
9832
9833fn get_all_tasks(
9834 project: &Entity<Project>,
9835 task_contexts: Arc<TaskContexts>,
9836 cx: &mut App,
9837) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9838 let new_tasks = project.update(cx, |project, cx| {
9839 project.task_store.update(cx, |task_store, cx| {
9840 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9841 this.used_and_current_resolved_tasks(task_contexts, cx)
9842 })
9843 })
9844 });
9845
9846 cx.background_spawn(async move {
9847 let (mut old, new) = new_tasks.await;
9848 old.extend(new);
9849 old
9850 })
9851}
9852
9853#[track_caller]
9854fn assert_entry_git_state(
9855 tree: &Worktree,
9856 repository: &Repository,
9857 path: &str,
9858 index_status: Option<StatusCode>,
9859 is_ignored: bool,
9860) {
9861 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9862 let entry = tree
9863 .entry_for_path(&rel_path(path))
9864 .unwrap_or_else(|| panic!("entry {path} not found"));
9865 let status = repository
9866 .status_for_path(&repo_path(path))
9867 .map(|entry| entry.status);
9868 let expected = index_status.map(|index_status| {
9869 TrackedStatus {
9870 index_status,
9871 worktree_status: StatusCode::Unmodified,
9872 }
9873 .into()
9874 });
9875 assert_eq!(
9876 status, expected,
9877 "expected {path} to have git status: {expected:?}"
9878 );
9879 assert_eq!(
9880 entry.is_ignored, is_ignored,
9881 "expected {path} to have is_ignored: {is_ignored}"
9882 );
9883}
9884
9885#[track_caller]
9886fn git_init(path: &Path) -> git2::Repository {
9887 let mut init_opts = RepositoryInitOptions::new();
9888 init_opts.initial_head("main");
9889 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9890}
9891
9892#[track_caller]
9893fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9894 let path = path.as_ref();
9895 let mut index = repo.index().expect("Failed to get index");
9896 index.add_path(path).expect("Failed to add file");
9897 index.write().expect("Failed to write index");
9898}
9899
9900#[track_caller]
9901fn git_remove_index(path: &Path, repo: &git2::Repository) {
9902 let mut index = repo.index().expect("Failed to get index");
9903 index.remove_path(path).expect("Failed to add file");
9904 index.write().expect("Failed to write index");
9905}
9906
9907#[track_caller]
9908fn git_commit(msg: &'static str, repo: &git2::Repository) {
9909 use git2::Signature;
9910
9911 let signature = Signature::now("test", "test@zed.dev").unwrap();
9912 let oid = repo.index().unwrap().write_tree().unwrap();
9913 let tree = repo.find_tree(oid).unwrap();
9914 if let Ok(head) = repo.head() {
9915 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9916
9917 let parent_commit = parent_obj.as_commit().unwrap();
9918
9919 repo.commit(
9920 Some("HEAD"),
9921 &signature,
9922 &signature,
9923 msg,
9924 &tree,
9925 &[parent_commit],
9926 )
9927 .expect("Failed to commit with parent");
9928 } else {
9929 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9930 .expect("Failed to commit");
9931 }
9932}
9933
9934#[cfg(any())]
9935#[track_caller]
9936fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9937 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9938}
9939
9940#[track_caller]
9941fn git_stash(repo: &mut git2::Repository) {
9942 use git2::Signature;
9943
9944 let signature = Signature::now("test", "test@zed.dev").unwrap();
9945 repo.stash_save(&signature, "N/A", None)
9946 .expect("Failed to stash");
9947}
9948
9949#[track_caller]
9950fn git_reset(offset: usize, repo: &git2::Repository) {
9951 let head = repo.head().expect("Couldn't get repo head");
9952 let object = head.peel(git2::ObjectType::Commit).unwrap();
9953 let commit = object.as_commit().unwrap();
9954 let new_head = commit
9955 .parents()
9956 .inspect(|parnet| {
9957 parnet.message();
9958 })
9959 .nth(offset)
9960 .expect("Not enough history");
9961 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9962 .expect("Could not reset");
9963}
9964
9965#[cfg(any())]
9966#[track_caller]
9967fn git_branch(name: &str, repo: &git2::Repository) {
9968 let head = repo
9969 .head()
9970 .expect("Couldn't get repo head")
9971 .peel_to_commit()
9972 .expect("HEAD is not a commit");
9973 repo.branch(name, &head, false).expect("Failed to commit");
9974}
9975
9976#[cfg(any())]
9977#[track_caller]
9978fn git_checkout(name: &str, repo: &git2::Repository) {
9979 repo.set_head(name).expect("Failed to set head");
9980 repo.checkout_head(None).expect("Failed to check out head");
9981}
9982
9983#[cfg(any())]
9984#[track_caller]
9985fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9986 repo.statuses(None)
9987 .unwrap()
9988 .iter()
9989 .map(|status| (status.path().unwrap().to_string(), status.status()))
9990 .collect()
9991}
9992
9993#[gpui::test]
9994async fn test_find_project_path_abs(
9995 background_executor: BackgroundExecutor,
9996 cx: &mut gpui::TestAppContext,
9997) {
9998 // find_project_path should work with absolute paths
9999 init_test(cx);
10000
10001 let fs = FakeFs::new(background_executor);
10002 fs.insert_tree(
10003 path!("/root"),
10004 json!({
10005 "project1": {
10006 "file1.txt": "content1",
10007 "subdir": {
10008 "file2.txt": "content2"
10009 }
10010 },
10011 "project2": {
10012 "file3.txt": "content3"
10013 }
10014 }),
10015 )
10016 .await;
10017
10018 let project = Project::test(
10019 fs.clone(),
10020 [
10021 path!("/root/project1").as_ref(),
10022 path!("/root/project2").as_ref(),
10023 ],
10024 cx,
10025 )
10026 .await;
10027
10028 // Make sure the worktrees are fully initialized
10029 project
10030 .update(cx, |project, cx| project.git_scans_complete(cx))
10031 .await;
10032 cx.run_until_parked();
10033
10034 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10035 project.read_with(cx, |project, cx| {
10036 let worktrees: Vec<_> = project.worktrees(cx).collect();
10037 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10038 let id1 = worktrees[0].read(cx).id();
10039 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10040 let id2 = worktrees[1].read(cx).id();
10041 (abs_path1, id1, abs_path2, id2)
10042 });
10043
10044 project.update(cx, |project, cx| {
10045 let abs_path = project1_abs_path.join("file1.txt");
10046 let found_path = project.find_project_path(abs_path, cx).unwrap();
10047 assert_eq!(found_path.worktree_id, project1_id);
10048 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10049
10050 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10051 let found_path = project.find_project_path(abs_path, cx).unwrap();
10052 assert_eq!(found_path.worktree_id, project1_id);
10053 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10054
10055 let abs_path = project2_abs_path.join("file3.txt");
10056 let found_path = project.find_project_path(abs_path, cx).unwrap();
10057 assert_eq!(found_path.worktree_id, project2_id);
10058 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10059
10060 let abs_path = project1_abs_path.join("nonexistent.txt");
10061 let found_path = project.find_project_path(abs_path, cx);
10062 assert!(
10063 found_path.is_some(),
10064 "Should find project path for nonexistent file in worktree"
10065 );
10066
10067 // Test with an absolute path outside any worktree
10068 let abs_path = Path::new("/some/other/path");
10069 let found_path = project.find_project_path(abs_path, cx);
10070 assert!(
10071 found_path.is_none(),
10072 "Should not find project path for path outside any worktree"
10073 );
10074 });
10075}