1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use encodings::{Encoding, UTF_8};
16use fs::FakeFs;
17use futures::{StreamExt, future};
18use git::{
19 GitHostingProviderRegistry,
20 repository::{RepoPath, repo_path},
21 status::{StatusCode, TrackedStatus},
22};
23use git2::RepositoryInitOptions;
24use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
25use itertools::Itertools;
26use language::{
27 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
28 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
29 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
30 ToolchainLister,
31 language_settings::{LanguageSettingsContent, language_settings},
32 tree_sitter_rust, tree_sitter_typescript,
33};
34use lsp::{
35 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
36 Uri, WillRenameFiles, notification::DidRenameFiles,
37};
38use parking_lot::Mutex;
39use paths::{config_dir, global_gitignore_path, tasks_file};
40use postage::stream::Stream as _;
41use pretty_assertions::{assert_eq, assert_matches};
42use rand::{Rng as _, rngs::StdRng};
43use serde_json::json;
44#[cfg(not(windows))]
45use std::os;
46use std::{
47 env, mem,
48 num::NonZeroU32,
49 ops::Range,
50 str::FromStr,
51 sync::{Arc, OnceLock},
52 task::Poll,
53};
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree(
1217 path!("/the-root"),
1218 json!({
1219 ".gitignore": "target\n",
1220 "Cargo.lock": "",
1221 "src": {
1222 "a.rs": "",
1223 "b.rs": "",
1224 },
1225 "target": {
1226 "x": {
1227 "out": {
1228 "x.rs": ""
1229 }
1230 },
1231 "y": {
1232 "out": {
1233 "y.rs": "",
1234 }
1235 },
1236 "z": {
1237 "out": {
1238 "z.rs": ""
1239 }
1240 }
1241 }
1242 }),
1243 )
1244 .await;
1245 fs.insert_tree(
1246 path!("/the-registry"),
1247 json!({
1248 "dep1": {
1249 "src": {
1250 "dep1.rs": "",
1251 }
1252 },
1253 "dep2": {
1254 "src": {
1255 "dep2.rs": "",
1256 }
1257 },
1258 }),
1259 )
1260 .await;
1261 fs.insert_tree(
1262 path!("/the/stdlib"),
1263 json!({
1264 "LICENSE": "",
1265 "src": {
1266 "string.rs": "",
1267 }
1268 }),
1269 )
1270 .await;
1271
1272 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1273 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1274 (project.languages().clone(), project.lsp_store())
1275 });
1276 language_registry.add(rust_lang());
1277 let mut fake_servers = language_registry.register_fake_lsp(
1278 "Rust",
1279 FakeLspAdapter {
1280 name: "the-language-server",
1281 ..Default::default()
1282 },
1283 );
1284
1285 cx.executor().run_until_parked();
1286
1287 // Start the language server by opening a buffer with a compatible file extension.
1288 project
1289 .update(cx, |project, cx| {
1290 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1291 })
1292 .await
1293 .unwrap();
1294
1295 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1296 project.update(cx, |project, cx| {
1297 let worktree = project.worktrees(cx).next().unwrap();
1298 assert_eq!(
1299 worktree
1300 .read(cx)
1301 .snapshot()
1302 .entries(true, 0)
1303 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("", false),
1307 (".gitignore", false),
1308 ("Cargo.lock", false),
1309 ("src", false),
1310 ("src/a.rs", false),
1311 ("src/b.rs", false),
1312 ("target", true),
1313 ]
1314 );
1315 });
1316
1317 let prev_read_dir_count = fs.read_dir_call_count();
1318
1319 let fake_server = fake_servers.next().await.unwrap();
1320 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1321 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1322 id
1323 });
1324
1325 // Simulate jumping to a definition in a dependency outside of the worktree.
1326 let _out_of_worktree_buffer = project
1327 .update(cx, |project, cx| {
1328 project.open_local_buffer_via_lsp(
1329 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1330 server_id,
1331 cx,
1332 )
1333 })
1334 .await
1335 .unwrap();
1336
1337 // Keep track of the FS events reported to the language server.
1338 let file_changes = Arc::new(Mutex::new(Vec::new()));
1339 fake_server
1340 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1341 registrations: vec![lsp::Registration {
1342 id: Default::default(),
1343 method: "workspace/didChangeWatchedFiles".to_string(),
1344 register_options: serde_json::to_value(
1345 lsp::DidChangeWatchedFilesRegistrationOptions {
1346 watchers: vec![
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/Cargo.toml").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/src/*.{rs,c}").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the-root/target/y/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("/the/stdlib/src/**/*.rs").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 lsp::FileSystemWatcher {
1372 glob_pattern: lsp::GlobPattern::String(
1373 path!("**/Cargo.lock").to_string(),
1374 ),
1375 kind: None,
1376 },
1377 ],
1378 },
1379 )
1380 .ok(),
1381 }],
1382 })
1383 .await
1384 .into_response()
1385 .unwrap();
1386 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1387 let file_changes = file_changes.clone();
1388 move |params, _| {
1389 let mut file_changes = file_changes.lock();
1390 file_changes.extend(params.changes);
1391 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1392 }
1393 });
1394
1395 cx.executor().run_until_parked();
1396 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1397 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1398
1399 let mut new_watched_paths = fs.watched_paths();
1400 new_watched_paths.retain(|path| {
1401 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1402 });
1403 assert_eq!(
1404 &new_watched_paths,
1405 &[
1406 Path::new(path!("/the-root")),
1407 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1408 Path::new(path!("/the/stdlib/src"))
1409 ]
1410 );
1411
1412 // Now the language server has asked us to watch an ignored directory path,
1413 // so we recursively load it.
1414 project.update(cx, |project, cx| {
1415 let worktree = project.visible_worktrees(cx).next().unwrap();
1416 assert_eq!(
1417 worktree
1418 .read(cx)
1419 .snapshot()
1420 .entries(true, 0)
1421 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1422 .collect::<Vec<_>>(),
1423 &[
1424 ("", false),
1425 (".gitignore", false),
1426 ("Cargo.lock", false),
1427 ("src", false),
1428 ("src/a.rs", false),
1429 ("src/b.rs", false),
1430 ("target", true),
1431 ("target/x", true),
1432 ("target/y", true),
1433 ("target/y/out", true),
1434 ("target/y/out/y.rs", true),
1435 ("target/z", true),
1436 ]
1437 );
1438 });
1439
1440 // Perform some file system mutations, two of which match the watched patterns,
1441 // and one of which does not.
1442 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1446 .await
1447 .unwrap();
1448 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/x/out/x2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.create_file(
1458 path!("/the-root/target/y/out/y2.rs").as_ref(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463
1464 let encoding = Encoding::default();
1465
1466 fs.save(
1467 path!("/the-root/Cargo.lock").as_ref(),
1468 &Rope::default(),
1469 Default::default(),
1470 encoding.clone(),
1471 )
1472 .await
1473 .unwrap();
1474 fs.save(
1475 path!("/the-stdlib/LICENSE").as_ref(),
1476 &Rope::default(),
1477 Default::default(),
1478 encoding.clone(),
1479 )
1480 .await
1481 .unwrap();
1482 fs.save(
1483 path!("/the/stdlib/src/string.rs").as_ref(),
1484 &Rope::default(),
1485 Default::default(),
1486 encoding,
1487 )
1488 .await
1489 .unwrap();
1490
1491 // The language server receives events for the FS mutations that match its watch patterns.
1492 cx.executor().run_until_parked();
1493 assert_eq!(
1494 &*file_changes.lock(),
1495 &[
1496 lsp::FileEvent {
1497 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1498 typ: lsp::FileChangeType::CHANGED,
1499 },
1500 lsp::FileEvent {
1501 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1502 typ: lsp::FileChangeType::DELETED,
1503 },
1504 lsp::FileEvent {
1505 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1506 typ: lsp::FileChangeType::CREATED,
1507 },
1508 lsp::FileEvent {
1509 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1510 typ: lsp::FileChangeType::CREATED,
1511 },
1512 lsp::FileEvent {
1513 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1514 typ: lsp::FileChangeType::CHANGED,
1515 },
1516 ]
1517 );
1518}
1519
1520#[gpui::test]
1521async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1522 init_test(cx);
1523
1524 let fs = FakeFs::new(cx.executor());
1525 fs.insert_tree(
1526 path!("/dir"),
1527 json!({
1528 "a.rs": "let a = 1;",
1529 "b.rs": "let b = 2;"
1530 }),
1531 )
1532 .await;
1533
1534 let project = Project::test(
1535 fs,
1536 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1537 cx,
1538 )
1539 .await;
1540 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1541
1542 let buffer_a = project
1543 .update(cx, |project, cx| {
1544 project.open_local_buffer(path!("/dir/a.rs"), cx)
1545 })
1546 .await
1547 .unwrap();
1548 let buffer_b = project
1549 .update(cx, |project, cx| {
1550 project.open_local_buffer(path!("/dir/b.rs"), cx)
1551 })
1552 .await
1553 .unwrap();
1554
1555 lsp_store.update(cx, |lsp_store, cx| {
1556 lsp_store
1557 .update_diagnostics(
1558 LanguageServerId(0),
1559 lsp::PublishDiagnosticsParams {
1560 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1561 version: None,
1562 diagnostics: vec![lsp::Diagnostic {
1563 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1564 severity: Some(lsp::DiagnosticSeverity::ERROR),
1565 message: "error 1".to_string(),
1566 ..Default::default()
1567 }],
1568 },
1569 None,
1570 DiagnosticSourceKind::Pushed,
1571 &[],
1572 cx,
1573 )
1574 .unwrap();
1575 lsp_store
1576 .update_diagnostics(
1577 LanguageServerId(0),
1578 lsp::PublishDiagnosticsParams {
1579 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1580 version: None,
1581 diagnostics: vec![lsp::Diagnostic {
1582 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1583 severity: Some(DiagnosticSeverity::WARNING),
1584 message: "error 2".to_string(),
1585 ..Default::default()
1586 }],
1587 },
1588 None,
1589 DiagnosticSourceKind::Pushed,
1590 &[],
1591 cx,
1592 )
1593 .unwrap();
1594 });
1595
1596 buffer_a.update(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let ", None),
1605 ("a", Some(DiagnosticSeverity::ERROR)),
1606 (" = 1;", None),
1607 ]
1608 );
1609 });
1610 buffer_b.update(cx, |buffer, _| {
1611 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1612 assert_eq!(
1613 chunks
1614 .iter()
1615 .map(|(s, d)| (s.as_str(), *d))
1616 .collect::<Vec<_>>(),
1617 &[
1618 ("let ", None),
1619 ("b", Some(DiagnosticSeverity::WARNING)),
1620 (" = 2;", None),
1621 ]
1622 );
1623 });
1624}
1625
1626#[gpui::test]
1627async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1628 init_test(cx);
1629
1630 let fs = FakeFs::new(cx.executor());
1631 fs.insert_tree(
1632 path!("/root"),
1633 json!({
1634 "dir": {
1635 ".git": {
1636 "HEAD": "ref: refs/heads/main",
1637 },
1638 ".gitignore": "b.rs",
1639 "a.rs": "let a = 1;",
1640 "b.rs": "let b = 2;",
1641 },
1642 "other.rs": "let b = c;"
1643 }),
1644 )
1645 .await;
1646
1647 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1648 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1649 let (worktree, _) = project
1650 .update(cx, |project, cx| {
1651 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1652 })
1653 .await
1654 .unwrap();
1655 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1656
1657 let (worktree, _) = project
1658 .update(cx, |project, cx| {
1659 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1660 })
1661 .await
1662 .unwrap();
1663 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1664
1665 let server_id = LanguageServerId(0);
1666 lsp_store.update(cx, |lsp_store, cx| {
1667 lsp_store
1668 .update_diagnostics(
1669 server_id,
1670 lsp::PublishDiagnosticsParams {
1671 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1672 version: None,
1673 diagnostics: vec![lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1675 severity: Some(lsp::DiagnosticSeverity::ERROR),
1676 message: "unused variable 'b'".to_string(),
1677 ..Default::default()
1678 }],
1679 },
1680 None,
1681 DiagnosticSourceKind::Pushed,
1682 &[],
1683 cx,
1684 )
1685 .unwrap();
1686 lsp_store
1687 .update_diagnostics(
1688 server_id,
1689 lsp::PublishDiagnosticsParams {
1690 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1691 version: None,
1692 diagnostics: vec![lsp::Diagnostic {
1693 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1694 severity: Some(lsp::DiagnosticSeverity::ERROR),
1695 message: "unknown variable 'c'".to_string(),
1696 ..Default::default()
1697 }],
1698 },
1699 None,
1700 DiagnosticSourceKind::Pushed,
1701 &[],
1702 cx,
1703 )
1704 .unwrap();
1705 });
1706
1707 let main_ignored_buffer = project
1708 .update(cx, |project, cx| {
1709 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1710 })
1711 .await
1712 .unwrap();
1713 main_ignored_buffer.update(cx, |buffer, _| {
1714 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1715 assert_eq!(
1716 chunks
1717 .iter()
1718 .map(|(s, d)| (s.as_str(), *d))
1719 .collect::<Vec<_>>(),
1720 &[
1721 ("let ", None),
1722 ("b", Some(DiagnosticSeverity::ERROR)),
1723 (" = 2;", None),
1724 ],
1725 "Gigitnored buffers should still get in-buffer diagnostics",
1726 );
1727 });
1728 let other_buffer = project
1729 .update(cx, |project, cx| {
1730 project.open_buffer((other_worktree_id, rel_path("")), cx)
1731 })
1732 .await
1733 .unwrap();
1734 other_buffer.update(cx, |buffer, _| {
1735 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1736 assert_eq!(
1737 chunks
1738 .iter()
1739 .map(|(s, d)| (s.as_str(), *d))
1740 .collect::<Vec<_>>(),
1741 &[
1742 ("let b = ", None),
1743 ("c", Some(DiagnosticSeverity::ERROR)),
1744 (";", None),
1745 ],
1746 "Buffers from hidden projects should still get in-buffer diagnostics"
1747 );
1748 });
1749
1750 project.update(cx, |project, cx| {
1751 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1752 assert_eq!(
1753 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1754 vec![(
1755 ProjectPath {
1756 worktree_id: main_worktree_id,
1757 path: rel_path("b.rs").into(),
1758 },
1759 server_id,
1760 DiagnosticSummary {
1761 error_count: 1,
1762 warning_count: 0,
1763 }
1764 )]
1765 );
1766 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1767 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1768 });
1769}
1770
1771#[gpui::test]
1772async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1773 init_test(cx);
1774
1775 let progress_token = "the-progress-token";
1776
1777 let fs = FakeFs::new(cx.executor());
1778 fs.insert_tree(
1779 path!("/dir"),
1780 json!({
1781 "a.rs": "fn a() { A }",
1782 "b.rs": "const y: i32 = 1",
1783 }),
1784 )
1785 .await;
1786
1787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1789
1790 language_registry.add(rust_lang());
1791 let mut fake_servers = language_registry.register_fake_lsp(
1792 "Rust",
1793 FakeLspAdapter {
1794 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1801
1802 // Cause worktree to start the fake language server
1803 let _ = project
1804 .update(cx, |project, cx| {
1805 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1806 })
1807 .await
1808 .unwrap();
1809
1810 let mut events = cx.events(&project);
1811
1812 let fake_server = fake_servers.next().await.unwrap();
1813 assert_eq!(
1814 events.next().await.unwrap(),
1815 Event::LanguageServerAdded(
1816 LanguageServerId(0),
1817 fake_server.server.name(),
1818 Some(worktree_id)
1819 ),
1820 );
1821
1822 fake_server
1823 .start_progress(format!("{}/0", progress_token))
1824 .await;
1825 assert_eq!(
1826 events.next().await.unwrap(),
1827 Event::RefreshInlayHints(fake_server.server.server_id())
1828 );
1829 assert_eq!(
1830 events.next().await.unwrap(),
1831 Event::DiskBasedDiagnosticsStarted {
1832 language_server_id: LanguageServerId(0),
1833 }
1834 );
1835
1836 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1837 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1838 version: None,
1839 diagnostics: vec![lsp::Diagnostic {
1840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1841 severity: Some(lsp::DiagnosticSeverity::ERROR),
1842 message: "undefined variable 'A'".to_string(),
1843 ..Default::default()
1844 }],
1845 });
1846 assert_eq!(
1847 events.next().await.unwrap(),
1848 Event::DiagnosticsUpdated {
1849 language_server_id: LanguageServerId(0),
1850 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1851 }
1852 );
1853
1854 fake_server.end_progress(format!("{}/0", progress_token));
1855 assert_eq!(
1856 events.next().await.unwrap(),
1857 Event::DiskBasedDiagnosticsFinished {
1858 language_server_id: LanguageServerId(0)
1859 }
1860 );
1861
1862 let buffer = project
1863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1864 .await
1865 .unwrap();
1866
1867 buffer.update(cx, |buffer, _| {
1868 let snapshot = buffer.snapshot();
1869 let diagnostics = snapshot
1870 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1871 .collect::<Vec<_>>();
1872 assert_eq!(
1873 diagnostics,
1874 &[DiagnosticEntryRef {
1875 range: Point::new(0, 9)..Point::new(0, 10),
1876 diagnostic: &Diagnostic {
1877 severity: lsp::DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'A'".to_string(),
1879 group_id: 0,
1880 is_primary: true,
1881 source_kind: DiagnosticSourceKind::Pushed,
1882 ..Diagnostic::default()
1883 }
1884 }]
1885 )
1886 });
1887
1888 // Ensure publishing empty diagnostics twice only results in one update event.
1889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1891 version: None,
1892 diagnostics: Default::default(),
1893 });
1894 assert_eq!(
1895 events.next().await.unwrap(),
1896 Event::DiagnosticsUpdated {
1897 language_server_id: LanguageServerId(0),
1898 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1899 }
1900 );
1901
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1903 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1904 version: None,
1905 diagnostics: Default::default(),
1906 });
1907 cx.executor().run_until_parked();
1908 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1909}
1910
1911#[gpui::test]
1912async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1913 init_test(cx);
1914
1915 let progress_token = "the-progress-token";
1916
1917 let fs = FakeFs::new(cx.executor());
1918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1919
1920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1921
1922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1923 language_registry.add(rust_lang());
1924 let mut fake_servers = language_registry.register_fake_lsp(
1925 "Rust",
1926 FakeLspAdapter {
1927 name: "the-language-server",
1928 disk_based_diagnostics_sources: vec!["disk".into()],
1929 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1930 ..FakeLspAdapter::default()
1931 },
1932 );
1933
1934 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1935
1936 let (buffer, _handle) = project
1937 .update(cx, |project, cx| {
1938 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1939 })
1940 .await
1941 .unwrap();
1942 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1943 // Simulate diagnostics starting to update.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 fake_server.start_progress(progress_token).await;
1946
1947 // Restart the server before the diagnostics finish updating.
1948 project.update(cx, |project, cx| {
1949 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1950 });
1951 let mut events = cx.events(&project);
1952
1953 // Simulate the newly started server sending more diagnostics.
1954 let fake_server = fake_servers.next().await.unwrap();
1955 assert_eq!(
1956 events.next().await.unwrap(),
1957 Event::LanguageServerRemoved(LanguageServerId(0))
1958 );
1959 assert_eq!(
1960 events.next().await.unwrap(),
1961 Event::LanguageServerAdded(
1962 LanguageServerId(1),
1963 fake_server.server.name(),
1964 Some(worktree_id)
1965 )
1966 );
1967 assert_eq!(
1968 events.next().await.unwrap(),
1969 Event::RefreshInlayHints(fake_server.server.server_id())
1970 );
1971 fake_server.start_progress(progress_token).await;
1972 assert_eq!(
1973 events.next().await.unwrap(),
1974 Event::LanguageServerBufferRegistered {
1975 server_id: LanguageServerId(1),
1976 buffer_id,
1977 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1978 name: Some(fake_server.server.name())
1979 }
1980 );
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiskBasedDiagnosticsStarted {
1984 language_server_id: LanguageServerId(1)
1985 }
1986 );
1987 project.update(cx, |project, cx| {
1988 assert_eq!(
1989 project
1990 .language_servers_running_disk_based_diagnostics(cx)
1991 .collect::<Vec<_>>(),
1992 [LanguageServerId(1)]
1993 );
1994 });
1995
1996 // All diagnostics are considered done, despite the old server's diagnostic
1997 // task never completing.
1998 fake_server.end_progress(progress_token);
1999 assert_eq!(
2000 events.next().await.unwrap(),
2001 Event::DiskBasedDiagnosticsFinished {
2002 language_server_id: LanguageServerId(1)
2003 }
2004 );
2005 project.update(cx, |project, cx| {
2006 assert_eq!(
2007 project
2008 .language_servers_running_disk_based_diagnostics(cx)
2009 .collect::<Vec<_>>(),
2010 [] as [language::LanguageServerId; 0]
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let fs = FakeFs::new(cx.executor());
2020 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2021
2022 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2023
2024 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2025 language_registry.add(rust_lang());
2026 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2027
2028 let (buffer, _) = project
2029 .update(cx, |project, cx| {
2030 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2031 })
2032 .await
2033 .unwrap();
2034
2035 // Publish diagnostics
2036 let fake_server = fake_servers.next().await.unwrap();
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: vec![lsp::Diagnostic {
2041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2042 severity: Some(lsp::DiagnosticSeverity::ERROR),
2043 message: "the message".to_string(),
2044 ..Default::default()
2045 }],
2046 });
2047
2048 cx.executor().run_until_parked();
2049 buffer.update(cx, |buffer, _| {
2050 assert_eq!(
2051 buffer
2052 .snapshot()
2053 .diagnostics_in_range::<_, usize>(0..1, false)
2054 .map(|entry| entry.diagnostic.message.clone())
2055 .collect::<Vec<_>>(),
2056 ["the message".to_string()]
2057 );
2058 });
2059 project.update(cx, |project, cx| {
2060 assert_eq!(
2061 project.diagnostic_summary(false, cx),
2062 DiagnosticSummary {
2063 error_count: 1,
2064 warning_count: 0,
2065 }
2066 );
2067 });
2068
2069 project.update(cx, |project, cx| {
2070 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2071 });
2072
2073 // The diagnostics are cleared.
2074 cx.executor().run_until_parked();
2075 buffer.update(cx, |buffer, _| {
2076 assert_eq!(
2077 buffer
2078 .snapshot()
2079 .diagnostics_in_range::<_, usize>(0..1, false)
2080 .map(|entry| entry.diagnostic.message.clone())
2081 .collect::<Vec<_>>(),
2082 Vec::<String>::new(),
2083 );
2084 });
2085 project.update(cx, |project, cx| {
2086 assert_eq!(
2087 project.diagnostic_summary(false, cx),
2088 DiagnosticSummary {
2089 error_count: 0,
2090 warning_count: 0,
2091 }
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2102
2103 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2104 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2105
2106 language_registry.add(rust_lang());
2107 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2108
2109 let (buffer, _handle) = project
2110 .update(cx, |project, cx| {
2111 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2112 })
2113 .await
2114 .unwrap();
2115
2116 // Before restarting the server, report diagnostics with an unknown buffer version.
2117 let fake_server = fake_servers.next().await.unwrap();
2118 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2119 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2120 version: Some(10000),
2121 diagnostics: Vec::new(),
2122 });
2123 cx.executor().run_until_parked();
2124 project.update(cx, |project, cx| {
2125 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2126 });
2127
2128 let mut fake_server = fake_servers.next().await.unwrap();
2129 let notification = fake_server
2130 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2131 .await
2132 .text_document;
2133 assert_eq!(notification.version, 0);
2134}
2135
2136#[gpui::test]
2137async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2138 init_test(cx);
2139
2140 let progress_token = "the-progress-token";
2141
2142 let fs = FakeFs::new(cx.executor());
2143 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2144
2145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2146
2147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2148 language_registry.add(rust_lang());
2149 let mut fake_servers = language_registry.register_fake_lsp(
2150 "Rust",
2151 FakeLspAdapter {
2152 name: "the-language-server",
2153 disk_based_diagnostics_sources: vec!["disk".into()],
2154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2155 ..Default::default()
2156 },
2157 );
2158
2159 let (buffer, _handle) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Simulate diagnostics starting to update.
2167 let mut fake_server = fake_servers.next().await.unwrap();
2168 fake_server
2169 .start_progress_with(
2170 "another-token",
2171 lsp::WorkDoneProgressBegin {
2172 cancellable: Some(false),
2173 ..Default::default()
2174 },
2175 )
2176 .await;
2177 fake_server
2178 .start_progress_with(
2179 progress_token,
2180 lsp::WorkDoneProgressBegin {
2181 cancellable: Some(true),
2182 ..Default::default()
2183 },
2184 )
2185 .await;
2186 cx.executor().run_until_parked();
2187
2188 project.update(cx, |project, cx| {
2189 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2190 });
2191
2192 let cancel_notification = fake_server
2193 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2194 .await;
2195 assert_eq!(
2196 cancel_notification.token,
2197 NumberOrString::String(progress_token.into())
2198 );
2199}
2200
2201#[gpui::test]
2202async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2203 init_test(cx);
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2207 .await;
2208
2209 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2211
2212 let mut fake_rust_servers = language_registry.register_fake_lsp(
2213 "Rust",
2214 FakeLspAdapter {
2215 name: "rust-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 let mut fake_js_servers = language_registry.register_fake_lsp(
2220 "JavaScript",
2221 FakeLspAdapter {
2222 name: "js-lsp",
2223 ..Default::default()
2224 },
2225 );
2226 language_registry.add(rust_lang());
2227 language_registry.add(js_lang());
2228
2229 let _rs_buffer = project
2230 .update(cx, |project, cx| {
2231 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2232 })
2233 .await
2234 .unwrap();
2235 let _js_buffer = project
2236 .update(cx, |project, cx| {
2237 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2238 })
2239 .await
2240 .unwrap();
2241
2242 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2243 assert_eq!(
2244 fake_rust_server_1
2245 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2246 .await
2247 .text_document
2248 .uri
2249 .as_str(),
2250 uri!("file:///dir/a.rs")
2251 );
2252
2253 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2254 assert_eq!(
2255 fake_js_server
2256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2257 .await
2258 .text_document
2259 .uri
2260 .as_str(),
2261 uri!("file:///dir/b.js")
2262 );
2263
2264 // Disable Rust language server, ensuring only that server gets stopped.
2265 cx.update(|cx| {
2266 SettingsStore::update_global(cx, |settings, cx| {
2267 settings.update_user_settings(cx, |settings| {
2268 settings.languages_mut().insert(
2269 "Rust".into(),
2270 LanguageSettingsContent {
2271 enable_language_server: Some(false),
2272 ..Default::default()
2273 },
2274 );
2275 });
2276 })
2277 });
2278 fake_rust_server_1
2279 .receive_notification::<lsp::notification::Exit>()
2280 .await;
2281
2282 // Enable Rust and disable JavaScript language servers, ensuring that the
2283 // former gets started again and that the latter stops.
2284 cx.update(|cx| {
2285 SettingsStore::update_global(cx, |settings, cx| {
2286 settings.update_user_settings(cx, |settings| {
2287 settings.languages_mut().insert(
2288 "Rust".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(true),
2291 ..Default::default()
2292 },
2293 );
2294 settings.languages_mut().insert(
2295 "JavaScript".into(),
2296 LanguageSettingsContent {
2297 enable_language_server: Some(false),
2298 ..Default::default()
2299 },
2300 );
2301 });
2302 })
2303 });
2304 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2305 assert_eq!(
2306 fake_rust_server_2
2307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2308 .await
2309 .text_document
2310 .uri
2311 .as_str(),
2312 uri!("file:///dir/a.rs")
2313 );
2314 fake_js_server
2315 .receive_notification::<lsp::notification::Exit>()
2316 .await;
2317}
2318
2319#[gpui::test(iterations = 3)]
2320async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2321 init_test(cx);
2322
2323 let text = "
2324 fn a() { A }
2325 fn b() { BB }
2326 fn c() { CCC }
2327 "
2328 .unindent();
2329
2330 let fs = FakeFs::new(cx.executor());
2331 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2332
2333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2334 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2335
2336 language_registry.add(rust_lang());
2337 let mut fake_servers = language_registry.register_fake_lsp(
2338 "Rust",
2339 FakeLspAdapter {
2340 disk_based_diagnostics_sources: vec!["disk".into()],
2341 ..Default::default()
2342 },
2343 );
2344
2345 let buffer = project
2346 .update(cx, |project, cx| {
2347 project.open_local_buffer(path!("/dir/a.rs"), cx)
2348 })
2349 .await
2350 .unwrap();
2351
2352 let _handle = project.update(cx, |project, cx| {
2353 project.register_buffer_with_language_servers(&buffer, cx)
2354 });
2355
2356 let mut fake_server = fake_servers.next().await.unwrap();
2357 let open_notification = fake_server
2358 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2359 .await;
2360
2361 // Edit the buffer, moving the content down
2362 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2363 let change_notification_1 = fake_server
2364 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2365 .await;
2366 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2367
2368 // Report some diagnostics for the initial version of the buffer
2369 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2370 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2371 version: Some(open_notification.text_document.version),
2372 diagnostics: vec![
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'A'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 message: "undefined variable 'BB'".to_string(),
2384 source: Some("disk".to_string()),
2385 ..Default::default()
2386 },
2387 lsp::Diagnostic {
2388 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2389 severity: Some(DiagnosticSeverity::ERROR),
2390 source: Some("disk".to_string()),
2391 message: "undefined variable 'CCC'".to_string(),
2392 ..Default::default()
2393 },
2394 ],
2395 });
2396
2397 // The diagnostics have moved down since they were created.
2398 cx.executor().run_until_parked();
2399 buffer.update(cx, |buffer, _| {
2400 assert_eq!(
2401 buffer
2402 .snapshot()
2403 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2404 .collect::<Vec<_>>(),
2405 &[
2406 DiagnosticEntry {
2407 range: Point::new(3, 9)..Point::new(3, 11),
2408 diagnostic: Diagnostic {
2409 source: Some("disk".into()),
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "undefined variable 'BB'".to_string(),
2412 is_disk_based: true,
2413 group_id: 1,
2414 is_primary: true,
2415 source_kind: DiagnosticSourceKind::Pushed,
2416 ..Diagnostic::default()
2417 },
2418 },
2419 DiagnosticEntry {
2420 range: Point::new(4, 9)..Point::new(4, 12),
2421 diagnostic: Diagnostic {
2422 source: Some("disk".into()),
2423 severity: DiagnosticSeverity::ERROR,
2424 message: "undefined variable 'CCC'".to_string(),
2425 is_disk_based: true,
2426 group_id: 2,
2427 is_primary: true,
2428 source_kind: DiagnosticSourceKind::Pushed,
2429 ..Diagnostic::default()
2430 }
2431 }
2432 ]
2433 );
2434 assert_eq!(
2435 chunks_with_diagnostics(buffer, 0..buffer.len()),
2436 [
2437 ("\n\nfn a() { ".to_string(), None),
2438 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2439 (" }\nfn b() { ".to_string(), None),
2440 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2441 (" }\nfn c() { ".to_string(), None),
2442 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\n".to_string(), None),
2444 ]
2445 );
2446 assert_eq!(
2447 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2448 [
2449 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2450 (" }\nfn c() { ".to_string(), None),
2451 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2452 ]
2453 );
2454 });
2455
2456 // Ensure overlapping diagnostics are highlighted correctly.
2457 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2458 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2459 version: Some(open_notification.text_document.version),
2460 diagnostics: vec![
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2463 severity: Some(DiagnosticSeverity::ERROR),
2464 message: "undefined variable 'A'".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 lsp::Diagnostic {
2469 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2470 severity: Some(DiagnosticSeverity::WARNING),
2471 message: "unreachable statement".to_string(),
2472 source: Some("disk".to_string()),
2473 ..Default::default()
2474 },
2475 ],
2476 });
2477
2478 cx.executor().run_until_parked();
2479 buffer.update(cx, |buffer, _| {
2480 assert_eq!(
2481 buffer
2482 .snapshot()
2483 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2484 .collect::<Vec<_>>(),
2485 &[
2486 DiagnosticEntry {
2487 range: Point::new(2, 9)..Point::new(2, 12),
2488 diagnostic: Diagnostic {
2489 source: Some("disk".into()),
2490 severity: DiagnosticSeverity::WARNING,
2491 message: "unreachable statement".to_string(),
2492 is_disk_based: true,
2493 group_id: 4,
2494 is_primary: true,
2495 source_kind: DiagnosticSourceKind::Pushed,
2496 ..Diagnostic::default()
2497 }
2498 },
2499 DiagnosticEntry {
2500 range: Point::new(2, 9)..Point::new(2, 10),
2501 diagnostic: Diagnostic {
2502 source: Some("disk".into()),
2503 severity: DiagnosticSeverity::ERROR,
2504 message: "undefined variable 'A'".to_string(),
2505 is_disk_based: true,
2506 group_id: 3,
2507 is_primary: true,
2508 source_kind: DiagnosticSourceKind::Pushed,
2509 ..Diagnostic::default()
2510 },
2511 }
2512 ]
2513 );
2514 assert_eq!(
2515 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2516 [
2517 ("fn a() { ".to_string(), None),
2518 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 assert_eq!(
2524 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2525 [
2526 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2527 ("\n".to_string(), None),
2528 ]
2529 );
2530 });
2531
2532 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2533 // changes since the last save.
2534 buffer.update(cx, |buffer, cx| {
2535 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2536 buffer.edit(
2537 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2538 None,
2539 cx,
2540 );
2541 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2542 });
2543 let change_notification_2 = fake_server
2544 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2545 .await;
2546 assert!(
2547 change_notification_2.text_document.version > change_notification_1.text_document.version
2548 );
2549
2550 // Handle out-of-order diagnostics
2551 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2552 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2553 version: Some(change_notification_2.text_document.version),
2554 diagnostics: vec![
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2557 severity: Some(DiagnosticSeverity::ERROR),
2558 message: "undefined variable 'BB'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 lsp::Diagnostic {
2563 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2564 severity: Some(DiagnosticSeverity::WARNING),
2565 message: "undefined variable 'A'".to_string(),
2566 source: Some("disk".to_string()),
2567 ..Default::default()
2568 },
2569 ],
2570 });
2571
2572 cx.executor().run_until_parked();
2573 buffer.update(cx, |buffer, _| {
2574 assert_eq!(
2575 buffer
2576 .snapshot()
2577 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2578 .collect::<Vec<_>>(),
2579 &[
2580 DiagnosticEntry {
2581 range: Point::new(2, 21)..Point::new(2, 22),
2582 diagnostic: Diagnostic {
2583 source: Some("disk".into()),
2584 severity: DiagnosticSeverity::WARNING,
2585 message: "undefined variable 'A'".to_string(),
2586 is_disk_based: true,
2587 group_id: 6,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 }
2592 },
2593 DiagnosticEntry {
2594 range: Point::new(3, 9)..Point::new(3, 14),
2595 diagnostic: Diagnostic {
2596 source: Some("disk".into()),
2597 severity: DiagnosticSeverity::ERROR,
2598 message: "undefined variable 'BB'".to_string(),
2599 is_disk_based: true,
2600 group_id: 5,
2601 is_primary: true,
2602 source_kind: DiagnosticSourceKind::Pushed,
2603 ..Diagnostic::default()
2604 },
2605 }
2606 ]
2607 );
2608 });
2609}
2610
2611#[gpui::test]
2612async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2613 init_test(cx);
2614
2615 let text = concat!(
2616 "let one = ;\n", //
2617 "let two = \n",
2618 "let three = 3;\n",
2619 );
2620
2621 let fs = FakeFs::new(cx.executor());
2622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2623
2624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2625 let buffer = project
2626 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2627 .await
2628 .unwrap();
2629
2630 project.update(cx, |project, cx| {
2631 project.lsp_store.update(cx, |lsp_store, cx| {
2632 lsp_store
2633 .update_diagnostic_entries(
2634 LanguageServerId(0),
2635 PathBuf::from("/dir/a.rs"),
2636 None,
2637 None,
2638 vec![
2639 DiagnosticEntry {
2640 range: Unclipped(PointUtf16::new(0, 10))
2641 ..Unclipped(PointUtf16::new(0, 10)),
2642 diagnostic: Diagnostic {
2643 severity: DiagnosticSeverity::ERROR,
2644 message: "syntax error 1".to_string(),
2645 source_kind: DiagnosticSourceKind::Pushed,
2646 ..Diagnostic::default()
2647 },
2648 },
2649 DiagnosticEntry {
2650 range: Unclipped(PointUtf16::new(1, 10))
2651 ..Unclipped(PointUtf16::new(1, 10)),
2652 diagnostic: Diagnostic {
2653 severity: DiagnosticSeverity::ERROR,
2654 message: "syntax error 2".to_string(),
2655 source_kind: DiagnosticSourceKind::Pushed,
2656 ..Diagnostic::default()
2657 },
2658 },
2659 ],
2660 cx,
2661 )
2662 .unwrap();
2663 })
2664 });
2665
2666 // An empty range is extended forward to include the following character.
2667 // At the end of a line, an empty range is extended backward to include
2668 // the preceding character.
2669 buffer.update(cx, |buffer, _| {
2670 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2671 assert_eq!(
2672 chunks
2673 .iter()
2674 .map(|(s, d)| (s.as_str(), *d))
2675 .collect::<Vec<_>>(),
2676 &[
2677 ("let one = ", None),
2678 (";", Some(DiagnosticSeverity::ERROR)),
2679 ("\nlet two =", None),
2680 (" ", Some(DiagnosticSeverity::ERROR)),
2681 ("\nlet three = 3;\n", None)
2682 ]
2683 );
2684 });
2685}
2686
2687#[gpui::test]
2688async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2689 init_test(cx);
2690
2691 let fs = FakeFs::new(cx.executor());
2692 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2693 .await;
2694
2695 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2696 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2697
2698 lsp_store.update(cx, |lsp_store, cx| {
2699 lsp_store
2700 .update_diagnostic_entries(
2701 LanguageServerId(0),
2702 Path::new("/dir/a.rs").to_owned(),
2703 None,
2704 None,
2705 vec![DiagnosticEntry {
2706 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2707 diagnostic: Diagnostic {
2708 severity: DiagnosticSeverity::ERROR,
2709 is_primary: true,
2710 message: "syntax error a1".to_string(),
2711 source_kind: DiagnosticSourceKind::Pushed,
2712 ..Diagnostic::default()
2713 },
2714 }],
2715 cx,
2716 )
2717 .unwrap();
2718 lsp_store
2719 .update_diagnostic_entries(
2720 LanguageServerId(1),
2721 Path::new("/dir/a.rs").to_owned(),
2722 None,
2723 None,
2724 vec![DiagnosticEntry {
2725 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2726 diagnostic: Diagnostic {
2727 severity: DiagnosticSeverity::ERROR,
2728 is_primary: true,
2729 message: "syntax error b1".to_string(),
2730 source_kind: DiagnosticSourceKind::Pushed,
2731 ..Diagnostic::default()
2732 },
2733 }],
2734 cx,
2735 )
2736 .unwrap();
2737
2738 assert_eq!(
2739 lsp_store.diagnostic_summary(false, cx),
2740 DiagnosticSummary {
2741 error_count: 2,
2742 warning_count: 0,
2743 }
2744 );
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2750 init_test(cx);
2751
2752 let text = "
2753 fn a() {
2754 f1();
2755 }
2756 fn b() {
2757 f2();
2758 }
2759 fn c() {
2760 f3();
2761 }
2762 "
2763 .unindent();
2764
2765 let fs = FakeFs::new(cx.executor());
2766 fs.insert_tree(
2767 path!("/dir"),
2768 json!({
2769 "a.rs": text.clone(),
2770 }),
2771 )
2772 .await;
2773
2774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2775 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2776
2777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2778 language_registry.add(rust_lang());
2779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2780
2781 let (buffer, _handle) = project
2782 .update(cx, |project, cx| {
2783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2784 })
2785 .await
2786 .unwrap();
2787
2788 let mut fake_server = fake_servers.next().await.unwrap();
2789 let lsp_document_version = fake_server
2790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2791 .await
2792 .text_document
2793 .version;
2794
2795 // Simulate editing the buffer after the language server computes some edits.
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit(
2798 [(
2799 Point::new(0, 0)..Point::new(0, 0),
2800 "// above first function\n",
2801 )],
2802 None,
2803 cx,
2804 );
2805 buffer.edit(
2806 [(
2807 Point::new(2, 0)..Point::new(2, 0),
2808 " // inside first function\n",
2809 )],
2810 None,
2811 cx,
2812 );
2813 buffer.edit(
2814 [(
2815 Point::new(6, 4)..Point::new(6, 4),
2816 "// inside second function ",
2817 )],
2818 None,
2819 cx,
2820 );
2821
2822 assert_eq!(
2823 buffer.text(),
2824 "
2825 // above first function
2826 fn a() {
2827 // inside first function
2828 f1();
2829 }
2830 fn b() {
2831 // inside second function f2();
2832 }
2833 fn c() {
2834 f3();
2835 }
2836 "
2837 .unindent()
2838 );
2839 });
2840
2841 let edits = lsp_store
2842 .update(cx, |lsp_store, cx| {
2843 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2844 &buffer,
2845 vec![
2846 // replace body of first function
2847 lsp::TextEdit {
2848 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2849 new_text: "
2850 fn a() {
2851 f10();
2852 }
2853 "
2854 .unindent(),
2855 },
2856 // edit inside second function
2857 lsp::TextEdit {
2858 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2859 new_text: "00".into(),
2860 },
2861 // edit inside third function via two distinct edits
2862 lsp::TextEdit {
2863 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2864 new_text: "4000".into(),
2865 },
2866 lsp::TextEdit {
2867 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2868 new_text: "".into(),
2869 },
2870 ],
2871 LanguageServerId(0),
2872 Some(lsp_document_version),
2873 cx,
2874 )
2875 })
2876 .await
2877 .unwrap();
2878
2879 buffer.update(cx, |buffer, cx| {
2880 for (range, new_text) in edits {
2881 buffer.edit([(range, new_text)], None, cx);
2882 }
2883 assert_eq!(
2884 buffer.text(),
2885 "
2886 // above first function
2887 fn a() {
2888 // inside first function
2889 f10();
2890 }
2891 fn b() {
2892 // inside second function f200();
2893 }
2894 fn c() {
2895 f4000();
2896 }
2897 "
2898 .unindent()
2899 );
2900 });
2901}
2902
2903#[gpui::test]
2904async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2905 init_test(cx);
2906
2907 let text = "
2908 use a::b;
2909 use a::c;
2910
2911 fn f() {
2912 b();
2913 c();
2914 }
2915 "
2916 .unindent();
2917
2918 let fs = FakeFs::new(cx.executor());
2919 fs.insert_tree(
2920 path!("/dir"),
2921 json!({
2922 "a.rs": text.clone(),
2923 }),
2924 )
2925 .await;
2926
2927 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2928 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2929 let buffer = project
2930 .update(cx, |project, cx| {
2931 project.open_local_buffer(path!("/dir/a.rs"), cx)
2932 })
2933 .await
2934 .unwrap();
2935
2936 // Simulate the language server sending us a small edit in the form of a very large diff.
2937 // Rust-analyzer does this when performing a merge-imports code action.
2938 let edits = lsp_store
2939 .update(cx, |lsp_store, cx| {
2940 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2941 &buffer,
2942 [
2943 // Replace the first use statement without editing the semicolon.
2944 lsp::TextEdit {
2945 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2946 new_text: "a::{b, c}".into(),
2947 },
2948 // Reinsert the remainder of the file between the semicolon and the final
2949 // newline of the file.
2950 lsp::TextEdit {
2951 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2952 new_text: "\n\n".into(),
2953 },
2954 lsp::TextEdit {
2955 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2956 new_text: "
2957 fn f() {
2958 b();
2959 c();
2960 }"
2961 .unindent(),
2962 },
2963 // Delete everything after the first newline of the file.
2964 lsp::TextEdit {
2965 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2966 new_text: "".into(),
2967 },
2968 ],
2969 LanguageServerId(0),
2970 None,
2971 cx,
2972 )
2973 })
2974 .await
2975 .unwrap();
2976
2977 buffer.update(cx, |buffer, cx| {
2978 let edits = edits
2979 .into_iter()
2980 .map(|(range, text)| {
2981 (
2982 range.start.to_point(buffer)..range.end.to_point(buffer),
2983 text,
2984 )
2985 })
2986 .collect::<Vec<_>>();
2987
2988 assert_eq!(
2989 edits,
2990 [
2991 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2992 (Point::new(1, 0)..Point::new(2, 0), "".into())
2993 ]
2994 );
2995
2996 for (range, new_text) in edits {
2997 buffer.edit([(range, new_text)], None, cx);
2998 }
2999 assert_eq!(
3000 buffer.text(),
3001 "
3002 use a::{b, c};
3003
3004 fn f() {
3005 b();
3006 c();
3007 }
3008 "
3009 .unindent()
3010 );
3011 });
3012}
3013
3014#[gpui::test]
3015async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3016 cx: &mut gpui::TestAppContext,
3017) {
3018 init_test(cx);
3019
3020 let text = "Path()";
3021
3022 let fs = FakeFs::new(cx.executor());
3023 fs.insert_tree(
3024 path!("/dir"),
3025 json!({
3026 "a.rs": text
3027 }),
3028 )
3029 .await;
3030
3031 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3032 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3033 let buffer = project
3034 .update(cx, |project, cx| {
3035 project.open_local_buffer(path!("/dir/a.rs"), cx)
3036 })
3037 .await
3038 .unwrap();
3039
3040 // Simulate the language server sending us a pair of edits at the same location,
3041 // with an insertion following a replacement (which violates the LSP spec).
3042 let edits = lsp_store
3043 .update(cx, |lsp_store, cx| {
3044 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3045 &buffer,
3046 [
3047 lsp::TextEdit {
3048 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3049 new_text: "Path".into(),
3050 },
3051 lsp::TextEdit {
3052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3053 new_text: "from path import Path\n\n\n".into(),
3054 },
3055 ],
3056 LanguageServerId(0),
3057 None,
3058 cx,
3059 )
3060 })
3061 .await
3062 .unwrap();
3063
3064 buffer.update(cx, |buffer, cx| {
3065 buffer.edit(edits, None, cx);
3066 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3067 });
3068}
3069
3070#[gpui::test]
3071async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let text = "
3075 use a::b;
3076 use a::c;
3077
3078 fn f() {
3079 b();
3080 c();
3081 }
3082 "
3083 .unindent();
3084
3085 let fs = FakeFs::new(cx.executor());
3086 fs.insert_tree(
3087 path!("/dir"),
3088 json!({
3089 "a.rs": text.clone(),
3090 }),
3091 )
3092 .await;
3093
3094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3095 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3096 let buffer = project
3097 .update(cx, |project, cx| {
3098 project.open_local_buffer(path!("/dir/a.rs"), cx)
3099 })
3100 .await
3101 .unwrap();
3102
3103 // Simulate the language server sending us edits in a non-ordered fashion,
3104 // with ranges sometimes being inverted or pointing to invalid locations.
3105 let edits = lsp_store
3106 .update(cx, |lsp_store, cx| {
3107 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3108 &buffer,
3109 [
3110 lsp::TextEdit {
3111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3112 new_text: "\n\n".into(),
3113 },
3114 lsp::TextEdit {
3115 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3116 new_text: "a::{b, c}".into(),
3117 },
3118 lsp::TextEdit {
3119 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3120 new_text: "".into(),
3121 },
3122 lsp::TextEdit {
3123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3124 new_text: "
3125 fn f() {
3126 b();
3127 c();
3128 }"
3129 .unindent(),
3130 },
3131 ],
3132 LanguageServerId(0),
3133 None,
3134 cx,
3135 )
3136 })
3137 .await
3138 .unwrap();
3139
3140 buffer.update(cx, |buffer, cx| {
3141 let edits = edits
3142 .into_iter()
3143 .map(|(range, text)| {
3144 (
3145 range.start.to_point(buffer)..range.end.to_point(buffer),
3146 text,
3147 )
3148 })
3149 .collect::<Vec<_>>();
3150
3151 assert_eq!(
3152 edits,
3153 [
3154 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3155 (Point::new(1, 0)..Point::new(2, 0), "".into())
3156 ]
3157 );
3158
3159 for (range, new_text) in edits {
3160 buffer.edit([(range, new_text)], None, cx);
3161 }
3162 assert_eq!(
3163 buffer.text(),
3164 "
3165 use a::{b, c};
3166
3167 fn f() {
3168 b();
3169 c();
3170 }
3171 "
3172 .unindent()
3173 );
3174 });
3175}
3176
3177fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3178 buffer: &Buffer,
3179 range: Range<T>,
3180) -> Vec<(String, Option<DiagnosticSeverity>)> {
3181 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3182 for chunk in buffer.snapshot().chunks(range, true) {
3183 if chunks
3184 .last()
3185 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3186 {
3187 chunks.last_mut().unwrap().0.push_str(chunk.text);
3188 } else {
3189 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3190 }
3191 }
3192 chunks
3193}
3194
3195#[gpui::test(iterations = 10)]
3196async fn test_definition(cx: &mut gpui::TestAppContext) {
3197 init_test(cx);
3198
3199 let fs = FakeFs::new(cx.executor());
3200 fs.insert_tree(
3201 path!("/dir"),
3202 json!({
3203 "a.rs": "const fn a() { A }",
3204 "b.rs": "const y: i32 = crate::a()",
3205 }),
3206 )
3207 .await;
3208
3209 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3210
3211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3212 language_registry.add(rust_lang());
3213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3214
3215 let (buffer, _handle) = project
3216 .update(cx, |project, cx| {
3217 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3218 })
3219 .await
3220 .unwrap();
3221
3222 let fake_server = fake_servers.next().await.unwrap();
3223 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3224 let params = params.text_document_position_params;
3225 assert_eq!(
3226 params.text_document.uri.to_file_path().unwrap(),
3227 Path::new(path!("/dir/b.rs")),
3228 );
3229 assert_eq!(params.position, lsp::Position::new(0, 22));
3230
3231 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3232 lsp::Location::new(
3233 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3235 ),
3236 )))
3237 });
3238 let mut definitions = project
3239 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3240 .await
3241 .unwrap()
3242 .unwrap();
3243
3244 // Assert no new language server started
3245 cx.executor().run_until_parked();
3246 assert!(fake_servers.try_next().is_err());
3247
3248 assert_eq!(definitions.len(), 1);
3249 let definition = definitions.pop().unwrap();
3250 cx.update(|cx| {
3251 let target_buffer = definition.target.buffer.read(cx);
3252 assert_eq!(
3253 target_buffer
3254 .file()
3255 .unwrap()
3256 .as_local()
3257 .unwrap()
3258 .abs_path(cx),
3259 Path::new(path!("/dir/a.rs")),
3260 );
3261 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3262 assert_eq!(
3263 list_worktrees(&project, cx),
3264 [
3265 (path!("/dir/a.rs").as_ref(), false),
3266 (path!("/dir/b.rs").as_ref(), true)
3267 ],
3268 );
3269
3270 drop(definition);
3271 });
3272 cx.update(|cx| {
3273 assert_eq!(
3274 list_worktrees(&project, cx),
3275 [(path!("/dir/b.rs").as_ref(), true)]
3276 );
3277 });
3278
3279 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3280 project
3281 .read(cx)
3282 .worktrees(cx)
3283 .map(|worktree| {
3284 let worktree = worktree.read(cx);
3285 (
3286 worktree.as_local().unwrap().abs_path().as_ref(),
3287 worktree.is_visible(),
3288 )
3289 })
3290 .collect::<Vec<_>>()
3291 }
3292}
3293
3294#[gpui::test]
3295async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3296 init_test(cx);
3297
3298 let fs = FakeFs::new(cx.executor());
3299 fs.insert_tree(
3300 path!("/dir"),
3301 json!({
3302 "a.ts": "",
3303 }),
3304 )
3305 .await;
3306
3307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3308
3309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3310 language_registry.add(typescript_lang());
3311 let mut fake_language_servers = language_registry.register_fake_lsp(
3312 "TypeScript",
3313 FakeLspAdapter {
3314 capabilities: lsp::ServerCapabilities {
3315 completion_provider: Some(lsp::CompletionOptions {
3316 trigger_characters: Some(vec![".".to_string()]),
3317 ..Default::default()
3318 }),
3319 ..Default::default()
3320 },
3321 ..Default::default()
3322 },
3323 );
3324
3325 let (buffer, _handle) = project
3326 .update(cx, |p, cx| {
3327 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3328 })
3329 .await
3330 .unwrap();
3331
3332 let fake_server = fake_language_servers.next().await.unwrap();
3333
3334 // When text_edit exists, it takes precedence over insert_text and label
3335 let text = "let a = obj.fqn";
3336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3337 let completions = project.update(cx, |project, cx| {
3338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3339 });
3340
3341 fake_server
3342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3343 Ok(Some(lsp::CompletionResponse::Array(vec![
3344 lsp::CompletionItem {
3345 label: "labelText".into(),
3346 insert_text: Some("insertText".into()),
3347 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3348 range: lsp::Range::new(
3349 lsp::Position::new(0, text.len() as u32 - 3),
3350 lsp::Position::new(0, text.len() as u32),
3351 ),
3352 new_text: "textEditText".into(),
3353 })),
3354 ..Default::default()
3355 },
3356 ])))
3357 })
3358 .next()
3359 .await;
3360
3361 let completions = completions
3362 .await
3363 .unwrap()
3364 .into_iter()
3365 .flat_map(|response| response.completions)
3366 .collect::<Vec<_>>();
3367 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3368
3369 assert_eq!(completions.len(), 1);
3370 assert_eq!(completions[0].new_text, "textEditText");
3371 assert_eq!(
3372 completions[0].replace_range.to_offset(&snapshot),
3373 text.len() - 3..text.len()
3374 );
3375}
3376
3377#[gpui::test]
3378async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3379 init_test(cx);
3380
3381 let fs = FakeFs::new(cx.executor());
3382 fs.insert_tree(
3383 path!("/dir"),
3384 json!({
3385 "a.ts": "",
3386 }),
3387 )
3388 .await;
3389
3390 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3391
3392 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3393 language_registry.add(typescript_lang());
3394 let mut fake_language_servers = language_registry.register_fake_lsp(
3395 "TypeScript",
3396 FakeLspAdapter {
3397 capabilities: lsp::ServerCapabilities {
3398 completion_provider: Some(lsp::CompletionOptions {
3399 trigger_characters: Some(vec![".".to_string()]),
3400 ..Default::default()
3401 }),
3402 ..Default::default()
3403 },
3404 ..Default::default()
3405 },
3406 );
3407
3408 let (buffer, _handle) = project
3409 .update(cx, |p, cx| {
3410 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3411 })
3412 .await
3413 .unwrap();
3414
3415 let fake_server = fake_language_servers.next().await.unwrap();
3416 let text = "let a = obj.fqn";
3417
3418 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3419 {
3420 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3421 let completions = project.update(cx, |project, cx| {
3422 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3423 });
3424
3425 fake_server
3426 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3427 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3428 is_incomplete: false,
3429 item_defaults: Some(lsp::CompletionListItemDefaults {
3430 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3431 lsp::Range::new(
3432 lsp::Position::new(0, text.len() as u32 - 3),
3433 lsp::Position::new(0, text.len() as u32),
3434 ),
3435 )),
3436 ..Default::default()
3437 }),
3438 items: vec![lsp::CompletionItem {
3439 label: "labelText".into(),
3440 text_edit_text: Some("textEditText".into()),
3441 text_edit: None,
3442 ..Default::default()
3443 }],
3444 })))
3445 })
3446 .next()
3447 .await;
3448
3449 let completions = completions
3450 .await
3451 .unwrap()
3452 .into_iter()
3453 .flat_map(|response| response.completions)
3454 .collect::<Vec<_>>();
3455 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3456
3457 assert_eq!(completions.len(), 1);
3458 assert_eq!(completions[0].new_text, "textEditText");
3459 assert_eq!(
3460 completions[0].replace_range.to_offset(&snapshot),
3461 text.len() - 3..text.len()
3462 );
3463 }
3464
3465 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3466 {
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3475 is_incomplete: false,
3476 item_defaults: Some(lsp::CompletionListItemDefaults {
3477 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3478 lsp::Range::new(
3479 lsp::Position::new(0, text.len() as u32 - 3),
3480 lsp::Position::new(0, text.len() as u32),
3481 ),
3482 )),
3483 ..Default::default()
3484 }),
3485 items: vec![lsp::CompletionItem {
3486 label: "labelText".into(),
3487 text_edit_text: None,
3488 insert_text: Some("irrelevant".into()),
3489 text_edit: None,
3490 ..Default::default()
3491 }],
3492 })))
3493 })
3494 .next()
3495 .await;
3496
3497 let completions = completions
3498 .await
3499 .unwrap()
3500 .into_iter()
3501 .flat_map(|response| response.completions)
3502 .collect::<Vec<_>>();
3503 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3504
3505 assert_eq!(completions.len(), 1);
3506 assert_eq!(completions[0].new_text, "labelText");
3507 assert_eq!(
3508 completions[0].replace_range.to_offset(&snapshot),
3509 text.len() - 3..text.len()
3510 );
3511 }
3512}
3513
3514#[gpui::test]
3515async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let fs = FakeFs::new(cx.executor());
3519 fs.insert_tree(
3520 path!("/dir"),
3521 json!({
3522 "a.ts": "",
3523 }),
3524 )
3525 .await;
3526
3527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3528
3529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3530 language_registry.add(typescript_lang());
3531 let mut fake_language_servers = language_registry.register_fake_lsp(
3532 "TypeScript",
3533 FakeLspAdapter {
3534 capabilities: lsp::ServerCapabilities {
3535 completion_provider: Some(lsp::CompletionOptions {
3536 trigger_characters: Some(vec![":".to_string()]),
3537 ..Default::default()
3538 }),
3539 ..Default::default()
3540 },
3541 ..Default::default()
3542 },
3543 );
3544
3545 let (buffer, _handle) = project
3546 .update(cx, |p, cx| {
3547 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3548 })
3549 .await
3550 .unwrap();
3551
3552 let fake_server = fake_language_servers.next().await.unwrap();
3553
3554 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3555 let text = "let a = b.fqn";
3556 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3557 let completions = project.update(cx, |project, cx| {
3558 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3559 });
3560
3561 fake_server
3562 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3563 Ok(Some(lsp::CompletionResponse::Array(vec![
3564 lsp::CompletionItem {
3565 label: "fullyQualifiedName?".into(),
3566 insert_text: Some("fullyQualifiedName".into()),
3567 ..Default::default()
3568 },
3569 ])))
3570 })
3571 .next()
3572 .await;
3573 let completions = completions
3574 .await
3575 .unwrap()
3576 .into_iter()
3577 .flat_map(|response| response.completions)
3578 .collect::<Vec<_>>();
3579 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3580 assert_eq!(completions.len(), 1);
3581 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3582 assert_eq!(
3583 completions[0].replace_range.to_offset(&snapshot),
3584 text.len() - 3..text.len()
3585 );
3586
3587 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3588 let text = "let a = \"atoms/cmp\"";
3589 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3590 let completions = project.update(cx, |project, cx| {
3591 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3592 });
3593
3594 fake_server
3595 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3596 Ok(Some(lsp::CompletionResponse::Array(vec![
3597 lsp::CompletionItem {
3598 label: "component".into(),
3599 ..Default::default()
3600 },
3601 ])))
3602 })
3603 .next()
3604 .await;
3605 let completions = completions
3606 .await
3607 .unwrap()
3608 .into_iter()
3609 .flat_map(|response| response.completions)
3610 .collect::<Vec<_>>();
3611 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3612 assert_eq!(completions.len(), 1);
3613 assert_eq!(completions[0].new_text, "component");
3614 assert_eq!(
3615 completions[0].replace_range.to_offset(&snapshot),
3616 text.len() - 4..text.len() - 1
3617 );
3618}
3619
3620#[gpui::test]
3621async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3622 init_test(cx);
3623
3624 let fs = FakeFs::new(cx.executor());
3625 fs.insert_tree(
3626 path!("/dir"),
3627 json!({
3628 "a.ts": "",
3629 }),
3630 )
3631 .await;
3632
3633 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3634
3635 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3636 language_registry.add(typescript_lang());
3637 let mut fake_language_servers = language_registry.register_fake_lsp(
3638 "TypeScript",
3639 FakeLspAdapter {
3640 capabilities: lsp::ServerCapabilities {
3641 completion_provider: Some(lsp::CompletionOptions {
3642 trigger_characters: Some(vec![":".to_string()]),
3643 ..Default::default()
3644 }),
3645 ..Default::default()
3646 },
3647 ..Default::default()
3648 },
3649 );
3650
3651 let (buffer, _handle) = project
3652 .update(cx, |p, cx| {
3653 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3654 })
3655 .await
3656 .unwrap();
3657
3658 let fake_server = fake_language_servers.next().await.unwrap();
3659
3660 let text = "let a = b.fqn";
3661 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3662 let completions = project.update(cx, |project, cx| {
3663 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3664 });
3665
3666 fake_server
3667 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3668 Ok(Some(lsp::CompletionResponse::Array(vec![
3669 lsp::CompletionItem {
3670 label: "fullyQualifiedName?".into(),
3671 insert_text: Some("fully\rQualified\r\nName".into()),
3672 ..Default::default()
3673 },
3674 ])))
3675 })
3676 .next()
3677 .await;
3678 let completions = completions
3679 .await
3680 .unwrap()
3681 .into_iter()
3682 .flat_map(|response| response.completions)
3683 .collect::<Vec<_>>();
3684 assert_eq!(completions.len(), 1);
3685 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3686}
3687
3688#[gpui::test(iterations = 10)]
3689async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3690 init_test(cx);
3691
3692 let fs = FakeFs::new(cx.executor());
3693 fs.insert_tree(
3694 path!("/dir"),
3695 json!({
3696 "a.ts": "a",
3697 }),
3698 )
3699 .await;
3700
3701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3702
3703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3704 language_registry.add(typescript_lang());
3705 let mut fake_language_servers = language_registry.register_fake_lsp(
3706 "TypeScript",
3707 FakeLspAdapter {
3708 capabilities: lsp::ServerCapabilities {
3709 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3710 lsp::CodeActionOptions {
3711 resolve_provider: Some(true),
3712 ..lsp::CodeActionOptions::default()
3713 },
3714 )),
3715 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3716 commands: vec!["_the/command".to_string()],
3717 ..lsp::ExecuteCommandOptions::default()
3718 }),
3719 ..lsp::ServerCapabilities::default()
3720 },
3721 ..FakeLspAdapter::default()
3722 },
3723 );
3724
3725 let (buffer, _handle) = project
3726 .update(cx, |p, cx| {
3727 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3728 })
3729 .await
3730 .unwrap();
3731
3732 let fake_server = fake_language_servers.next().await.unwrap();
3733
3734 // Language server returns code actions that contain commands, and not edits.
3735 let actions = project.update(cx, |project, cx| {
3736 project.code_actions(&buffer, 0..0, None, cx)
3737 });
3738 fake_server
3739 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3740 Ok(Some(vec![
3741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3742 title: "The code action".into(),
3743 data: Some(serde_json::json!({
3744 "command": "_the/command",
3745 })),
3746 ..lsp::CodeAction::default()
3747 }),
3748 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3749 title: "two".into(),
3750 ..lsp::CodeAction::default()
3751 }),
3752 ]))
3753 })
3754 .next()
3755 .await;
3756
3757 let action = actions.await.unwrap().unwrap()[0].clone();
3758 let apply = project.update(cx, |project, cx| {
3759 project.apply_code_action(buffer.clone(), action, true, cx)
3760 });
3761
3762 // Resolving the code action does not populate its edits. In absence of
3763 // edits, we must execute the given command.
3764 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3765 |mut action, _| async move {
3766 if action.data.is_some() {
3767 action.command = Some(lsp::Command {
3768 title: "The command".into(),
3769 command: "_the/command".into(),
3770 arguments: Some(vec![json!("the-argument")]),
3771 });
3772 }
3773 Ok(action)
3774 },
3775 );
3776
3777 // While executing the command, the language server sends the editor
3778 // a `workspaceEdit` request.
3779 fake_server
3780 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3781 let fake = fake_server.clone();
3782 move |params, _| {
3783 assert_eq!(params.command, "_the/command");
3784 let fake = fake.clone();
3785 async move {
3786 fake.server
3787 .request::<lsp::request::ApplyWorkspaceEdit>(
3788 lsp::ApplyWorkspaceEditParams {
3789 label: None,
3790 edit: lsp::WorkspaceEdit {
3791 changes: Some(
3792 [(
3793 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3794 vec![lsp::TextEdit {
3795 range: lsp::Range::new(
3796 lsp::Position::new(0, 0),
3797 lsp::Position::new(0, 0),
3798 ),
3799 new_text: "X".into(),
3800 }],
3801 )]
3802 .into_iter()
3803 .collect(),
3804 ),
3805 ..Default::default()
3806 },
3807 },
3808 )
3809 .await
3810 .into_response()
3811 .unwrap();
3812 Ok(Some(json!(null)))
3813 }
3814 }
3815 })
3816 .next()
3817 .await;
3818
3819 // Applying the code action returns a project transaction containing the edits
3820 // sent by the language server in its `workspaceEdit` request.
3821 let transaction = apply.await.unwrap();
3822 assert!(transaction.0.contains_key(&buffer));
3823 buffer.update(cx, |buffer, cx| {
3824 assert_eq!(buffer.text(), "Xa");
3825 buffer.undo(cx);
3826 assert_eq!(buffer.text(), "a");
3827 });
3828}
3829
3830#[gpui::test]
3831async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833 let fs = FakeFs::new(cx.background_executor.clone());
3834 let expected_contents = "content";
3835 fs.as_fake()
3836 .insert_tree(
3837 "/root",
3838 json!({
3839 "test.txt": expected_contents
3840 }),
3841 )
3842 .await;
3843
3844 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3845
3846 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3847 let worktree = project.worktrees(cx).next().unwrap();
3848 let entry_id = worktree
3849 .read(cx)
3850 .entry_for_path(rel_path("test.txt"))
3851 .unwrap()
3852 .id;
3853 (worktree, entry_id)
3854 });
3855 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3856 let _result = project
3857 .update(cx, |project, cx| {
3858 project.rename_entry(
3859 entry_id,
3860 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3861 cx,
3862 )
3863 })
3864 .await
3865 .unwrap();
3866 worktree.read_with(cx, |worktree, _| {
3867 assert!(
3868 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3869 "Old file should have been removed"
3870 );
3871 assert!(
3872 worktree
3873 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3874 .is_some(),
3875 "Whole directory hierarchy and the new file should have been created"
3876 );
3877 });
3878 assert_eq!(
3879 worktree
3880 .update(cx, |worktree, cx| {
3881 worktree.load_file(
3882 rel_path("dir1/dir2/dir3/test.txt"),
3883 None,
3884 false,
3885 true,
3886 None,
3887 cx,
3888 )
3889 })
3890 .await
3891 .unwrap()
3892 .text,
3893 expected_contents,
3894 "Moved file's contents should be preserved"
3895 );
3896
3897 let entry_id = worktree.read_with(cx, |worktree, _| {
3898 worktree
3899 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3900 .unwrap()
3901 .id
3902 });
3903
3904 let _result = project
3905 .update(cx, |project, cx| {
3906 project.rename_entry(
3907 entry_id,
3908 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3909 cx,
3910 )
3911 })
3912 .await
3913 .unwrap();
3914 worktree.read_with(cx, |worktree, _| {
3915 assert!(
3916 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3917 "First file should not reappear"
3918 );
3919 assert!(
3920 worktree
3921 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3922 .is_none(),
3923 "Old file should have been removed"
3924 );
3925 assert!(
3926 worktree
3927 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3928 .is_some(),
3929 "No error should have occurred after moving into existing directory"
3930 );
3931 });
3932 assert_eq!(
3933 worktree
3934 .update(cx, |worktree, cx| {
3935 worktree.load_file(rel_path("dir1/dir2/test.txt"), None, false, true, None, cx)
3936 })
3937 .await
3938 .unwrap()
3939 .text,
3940 expected_contents,
3941 "Moved file's contents should be preserved"
3942 );
3943}
3944
3945#[gpui::test(iterations = 10)]
3946async fn test_save_file(cx: &mut gpui::TestAppContext) {
3947 init_test(cx);
3948
3949 let fs = FakeFs::new(cx.executor());
3950 fs.insert_tree(
3951 path!("/dir"),
3952 json!({
3953 "file1": "the old contents",
3954 }),
3955 )
3956 .await;
3957
3958 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3959 let buffer = project
3960 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3961 .await
3962 .unwrap();
3963 buffer.update(cx, |buffer, cx| {
3964 assert_eq!(buffer.text(), "the old contents");
3965 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3966 });
3967
3968 project
3969 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3970 .await
3971 .unwrap();
3972
3973 let new_text = fs
3974 .load(Path::new(path!("/dir/file1")))
3975 .await
3976 .unwrap()
3977 .replace("\r\n", "\n");
3978 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3979}
3980
3981#[gpui::test(iterations = 10)]
3982async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3983 // Issue: #24349
3984 init_test(cx);
3985
3986 let fs = FakeFs::new(cx.executor());
3987 fs.insert_tree(path!("/dir"), json!({})).await;
3988
3989 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3990 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3991
3992 language_registry.add(rust_lang());
3993 let mut fake_rust_servers = language_registry.register_fake_lsp(
3994 "Rust",
3995 FakeLspAdapter {
3996 name: "the-rust-language-server",
3997 capabilities: lsp::ServerCapabilities {
3998 completion_provider: Some(lsp::CompletionOptions {
3999 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4000 ..Default::default()
4001 }),
4002 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4003 lsp::TextDocumentSyncOptions {
4004 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4005 ..Default::default()
4006 },
4007 )),
4008 ..Default::default()
4009 },
4010 ..Default::default()
4011 },
4012 );
4013
4014 let buffer = project
4015 .update(cx, |this, cx| this.create_buffer(false, cx))
4016 .unwrap()
4017 .await;
4018 project.update(cx, |this, cx| {
4019 this.register_buffer_with_language_servers(&buffer, cx);
4020 buffer.update(cx, |buffer, cx| {
4021 assert!(!this.has_language_servers_for(buffer, cx));
4022 })
4023 });
4024
4025 project
4026 .update(cx, |this, cx| {
4027 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4028 this.save_buffer_as(
4029 buffer.clone(),
4030 ProjectPath {
4031 worktree_id,
4032 path: rel_path("file.rs").into(),
4033 },
4034 cx,
4035 )
4036 })
4037 .await
4038 .unwrap();
4039 // A server is started up, and it is notified about Rust files.
4040 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4041 assert_eq!(
4042 fake_rust_server
4043 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4044 .await
4045 .text_document,
4046 lsp::TextDocumentItem {
4047 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4048 version: 0,
4049 text: "".to_string(),
4050 language_id: "rust".to_string(),
4051 }
4052 );
4053
4054 project.update(cx, |this, cx| {
4055 buffer.update(cx, |buffer, cx| {
4056 assert!(this.has_language_servers_for(buffer, cx));
4057 })
4058 });
4059}
4060
4061#[gpui::test(iterations = 30)]
4062async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4063 init_test(cx);
4064
4065 let fs = FakeFs::new(cx.executor());
4066 fs.insert_tree(
4067 path!("/dir"),
4068 json!({
4069 "file1": "the original contents",
4070 }),
4071 )
4072 .await;
4073
4074 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4075 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4076 let buffer = project
4077 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4078 .await
4079 .unwrap();
4080
4081 // Simulate buffer diffs being slow, so that they don't complete before
4082 // the next file change occurs.
4083 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4084
4085 let encoding = Encoding::default();
4086
4087 // Change the buffer's file on disk, and then wait for the file change
4088 // to be detected by the worktree, so that the buffer starts reloading.
4089 fs.save(
4090 path!("/dir/file1").as_ref(),
4091 &Rope::from_str("the first contents", cx.background_executor()),
4092 Default::default(),
4093 encoding.clone(),
4094 )
4095 .await
4096 .unwrap();
4097 worktree.next_event(cx).await;
4098
4099 // Change the buffer's file again. Depending on the random seed, the
4100 // previous file change may still be in progress.
4101 fs.save(
4102 path!("/dir/file1").as_ref(),
4103 &Rope::from_str("the second contents", cx.background_executor()),
4104 Default::default(),
4105 encoding,
4106 )
4107 .await
4108 .unwrap();
4109 worktree.next_event(cx).await;
4110
4111 cx.executor().run_until_parked();
4112 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4113 buffer.read_with(cx, |buffer, _| {
4114 assert_eq!(buffer.text(), on_disk_text);
4115 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4116 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4117 });
4118}
4119
4120#[gpui::test(iterations = 30)]
4121async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4122 init_test(cx);
4123
4124 let fs = FakeFs::new(cx.executor());
4125 fs.insert_tree(
4126 path!("/dir"),
4127 json!({
4128 "file1": "the original contents",
4129 }),
4130 )
4131 .await;
4132
4133 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4134 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4135 let buffer = project
4136 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4137 .await
4138 .unwrap();
4139
4140 // Simulate buffer diffs being slow, so that they don't complete before
4141 // the next file change occurs.
4142 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4143
4144 let encoding = Encoding::new(UTF_8);
4145
4146 // Change the buffer's file on disk, and then wait for the file change
4147 // to be detected by the worktree, so that the buffer starts reloading.
4148 fs.save(
4149 path!("/dir/file1").as_ref(),
4150 &Rope::from_str("the first contents", cx.background_executor()),
4151 Default::default(),
4152 encoding,
4153 )
4154 .await
4155 .unwrap();
4156 worktree.next_event(cx).await;
4157
4158 cx.executor()
4159 .spawn(cx.executor().simulate_random_delay())
4160 .await;
4161
4162 // Perform a noop edit, causing the buffer's version to increase.
4163 buffer.update(cx, |buffer, cx| {
4164 buffer.edit([(0..0, " ")], None, cx);
4165 buffer.undo(cx);
4166 });
4167
4168 cx.executor().run_until_parked();
4169 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4170 buffer.read_with(cx, |buffer, _| {
4171 let buffer_text = buffer.text();
4172 if buffer_text == on_disk_text {
4173 assert!(
4174 !buffer.is_dirty() && !buffer.has_conflict(),
4175 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4176 );
4177 }
4178 // If the file change occurred while the buffer was processing the first
4179 // change, the buffer will be in a conflicting state.
4180 else {
4181 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4182 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4183 }
4184 });
4185}
4186
4187#[gpui::test]
4188async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4189 init_test(cx);
4190
4191 let fs = FakeFs::new(cx.executor());
4192 fs.insert_tree(
4193 path!("/dir"),
4194 json!({
4195 "file1": "the old contents",
4196 }),
4197 )
4198 .await;
4199
4200 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4201 let buffer = project
4202 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4203 .await
4204 .unwrap();
4205 buffer.update(cx, |buffer, cx| {
4206 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4207 });
4208
4209 project
4210 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4211 .await
4212 .unwrap();
4213
4214 let new_text = fs
4215 .load(Path::new(path!("/dir/file1")))
4216 .await
4217 .unwrap()
4218 .replace("\r\n", "\n");
4219 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4220}
4221
4222#[gpui::test]
4223async fn test_save_as(cx: &mut gpui::TestAppContext) {
4224 init_test(cx);
4225
4226 let fs = FakeFs::new(cx.executor());
4227 fs.insert_tree("/dir", json!({})).await;
4228
4229 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4230
4231 let languages = project.update(cx, |project, _| project.languages().clone());
4232 languages.add(rust_lang());
4233
4234 let buffer = project.update(cx, |project, cx| {
4235 project.create_local_buffer("", None, false, cx)
4236 });
4237 buffer.update(cx, |buffer, cx| {
4238 buffer.edit([(0..0, "abc")], None, cx);
4239 assert!(buffer.is_dirty());
4240 assert!(!buffer.has_conflict());
4241 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4242 });
4243 project
4244 .update(cx, |project, cx| {
4245 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4246 let path = ProjectPath {
4247 worktree_id,
4248 path: rel_path("file1.rs").into(),
4249 };
4250 project.save_buffer_as(buffer.clone(), path, cx)
4251 })
4252 .await
4253 .unwrap();
4254 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4255
4256 cx.executor().run_until_parked();
4257 buffer.update(cx, |buffer, cx| {
4258 assert_eq!(
4259 buffer.file().unwrap().full_path(cx),
4260 Path::new("dir/file1.rs")
4261 );
4262 assert!(!buffer.is_dirty());
4263 assert!(!buffer.has_conflict());
4264 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4265 });
4266
4267 let opened_buffer = project
4268 .update(cx, |project, cx| {
4269 project.open_local_buffer("/dir/file1.rs", cx)
4270 })
4271 .await
4272 .unwrap();
4273 assert_eq!(opened_buffer, buffer);
4274}
4275
4276#[gpui::test]
4277async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4278 init_test(cx);
4279
4280 let fs = FakeFs::new(cx.executor());
4281 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4282
4283 fs.insert_tree(
4284 path!("/dir"),
4285 json!({
4286 "data_a.txt": "data about a"
4287 }),
4288 )
4289 .await;
4290
4291 let buffer = project
4292 .update(cx, |project, cx| {
4293 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4294 })
4295 .await
4296 .unwrap();
4297
4298 buffer.update(cx, |buffer, cx| {
4299 buffer.edit([(11..12, "b")], None, cx);
4300 });
4301
4302 // Save buffer's contents as a new file and confirm that the buffer's now
4303 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4304 // file associated with the buffer has now been updated to `data_b.txt`
4305 project
4306 .update(cx, |project, cx| {
4307 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4308 let new_path = ProjectPath {
4309 worktree_id,
4310 path: rel_path("data_b.txt").into(),
4311 };
4312
4313 project.save_buffer_as(buffer.clone(), new_path, cx)
4314 })
4315 .await
4316 .unwrap();
4317
4318 buffer.update(cx, |buffer, cx| {
4319 assert_eq!(
4320 buffer.file().unwrap().full_path(cx),
4321 Path::new("dir/data_b.txt")
4322 )
4323 });
4324
4325 // Open the original `data_a.txt` file, confirming that its contents are
4326 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4327 let original_buffer = project
4328 .update(cx, |project, cx| {
4329 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4330 })
4331 .await
4332 .unwrap();
4333
4334 original_buffer.update(cx, |buffer, cx| {
4335 assert_eq!(buffer.text(), "data about a");
4336 assert_eq!(
4337 buffer.file().unwrap().full_path(cx),
4338 Path::new("dir/data_a.txt")
4339 )
4340 });
4341}
4342
4343#[gpui::test(retries = 5)]
4344async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4345 use worktree::WorktreeModelHandle as _;
4346
4347 init_test(cx);
4348 cx.executor().allow_parking();
4349
4350 let dir = TempTree::new(json!({
4351 "a": {
4352 "file1": "",
4353 "file2": "",
4354 "file3": "",
4355 },
4356 "b": {
4357 "c": {
4358 "file4": "",
4359 "file5": "",
4360 }
4361 }
4362 }));
4363
4364 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4365
4366 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4367 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4368 async move { buffer.await.unwrap() }
4369 };
4370 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4371 project.update(cx, |project, cx| {
4372 let tree = project.worktrees(cx).next().unwrap();
4373 tree.read(cx)
4374 .entry_for_path(rel_path(path))
4375 .unwrap_or_else(|| panic!("no entry for path {}", path))
4376 .id
4377 })
4378 };
4379
4380 let buffer2 = buffer_for_path("a/file2", cx).await;
4381 let buffer3 = buffer_for_path("a/file3", cx).await;
4382 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4383 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4384
4385 let file2_id = id_for_path("a/file2", cx);
4386 let file3_id = id_for_path("a/file3", cx);
4387 let file4_id = id_for_path("b/c/file4", cx);
4388
4389 // Create a remote copy of this worktree.
4390 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4391 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4392
4393 let updates = Arc::new(Mutex::new(Vec::new()));
4394 tree.update(cx, |tree, cx| {
4395 let updates = updates.clone();
4396 tree.observe_updates(0, cx, move |update| {
4397 updates.lock().push(update);
4398 async { true }
4399 });
4400 });
4401
4402 let remote = cx.update(|cx| {
4403 Worktree::remote(
4404 0,
4405 ReplicaId::REMOTE_SERVER,
4406 metadata,
4407 project.read(cx).client().into(),
4408 project.read(cx).path_style(cx),
4409 cx,
4410 )
4411 });
4412
4413 cx.executor().run_until_parked();
4414
4415 cx.update(|cx| {
4416 assert!(!buffer2.read(cx).is_dirty());
4417 assert!(!buffer3.read(cx).is_dirty());
4418 assert!(!buffer4.read(cx).is_dirty());
4419 assert!(!buffer5.read(cx).is_dirty());
4420 });
4421
4422 // Rename and delete files and directories.
4423 tree.flush_fs_events(cx).await;
4424 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4425 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4426 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4427 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4428 tree.flush_fs_events(cx).await;
4429
4430 cx.update(|app| {
4431 assert_eq!(
4432 tree.read(app).paths().collect::<Vec<_>>(),
4433 vec![
4434 rel_path("a"),
4435 rel_path("a/file1"),
4436 rel_path("a/file2.new"),
4437 rel_path("b"),
4438 rel_path("d"),
4439 rel_path("d/file3"),
4440 rel_path("d/file4"),
4441 ]
4442 );
4443 });
4444
4445 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4446 assert_eq!(id_for_path("d/file3", cx), file3_id);
4447 assert_eq!(id_for_path("d/file4", cx), file4_id);
4448
4449 cx.update(|cx| {
4450 assert_eq!(
4451 buffer2.read(cx).file().unwrap().path().as_ref(),
4452 rel_path("a/file2.new")
4453 );
4454 assert_eq!(
4455 buffer3.read(cx).file().unwrap().path().as_ref(),
4456 rel_path("d/file3")
4457 );
4458 assert_eq!(
4459 buffer4.read(cx).file().unwrap().path().as_ref(),
4460 rel_path("d/file4")
4461 );
4462 assert_eq!(
4463 buffer5.read(cx).file().unwrap().path().as_ref(),
4464 rel_path("b/c/file5")
4465 );
4466
4467 assert_matches!(
4468 buffer2.read(cx).file().unwrap().disk_state(),
4469 DiskState::Present { .. }
4470 );
4471 assert_matches!(
4472 buffer3.read(cx).file().unwrap().disk_state(),
4473 DiskState::Present { .. }
4474 );
4475 assert_matches!(
4476 buffer4.read(cx).file().unwrap().disk_state(),
4477 DiskState::Present { .. }
4478 );
4479 assert_eq!(
4480 buffer5.read(cx).file().unwrap().disk_state(),
4481 DiskState::Deleted
4482 );
4483 });
4484
4485 // Update the remote worktree. Check that it becomes consistent with the
4486 // local worktree.
4487 cx.executor().run_until_parked();
4488
4489 remote.update(cx, |remote, _| {
4490 for update in updates.lock().drain(..) {
4491 remote.as_remote_mut().unwrap().update_from_remote(update);
4492 }
4493 });
4494 cx.executor().run_until_parked();
4495 remote.update(cx, |remote, _| {
4496 assert_eq!(
4497 remote.paths().collect::<Vec<_>>(),
4498 vec![
4499 rel_path("a"),
4500 rel_path("a/file1"),
4501 rel_path("a/file2.new"),
4502 rel_path("b"),
4503 rel_path("d"),
4504 rel_path("d/file3"),
4505 rel_path("d/file4"),
4506 ]
4507 );
4508 });
4509}
4510
4511#[gpui::test(iterations = 10)]
4512async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4513 init_test(cx);
4514
4515 let fs = FakeFs::new(cx.executor());
4516 fs.insert_tree(
4517 path!("/dir"),
4518 json!({
4519 "a": {
4520 "file1": "",
4521 }
4522 }),
4523 )
4524 .await;
4525
4526 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4527 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4528 let tree_id = tree.update(cx, |tree, _| tree.id());
4529
4530 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4531 project.update(cx, |project, cx| {
4532 let tree = project.worktrees(cx).next().unwrap();
4533 tree.read(cx)
4534 .entry_for_path(rel_path(path))
4535 .unwrap_or_else(|| panic!("no entry for path {}", path))
4536 .id
4537 })
4538 };
4539
4540 let dir_id = id_for_path("a", cx);
4541 let file_id = id_for_path("a/file1", cx);
4542 let buffer = project
4543 .update(cx, |p, cx| {
4544 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4545 })
4546 .await
4547 .unwrap();
4548 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4549
4550 project
4551 .update(cx, |project, cx| {
4552 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4553 })
4554 .unwrap()
4555 .await
4556 .into_included()
4557 .unwrap();
4558 cx.executor().run_until_parked();
4559
4560 assert_eq!(id_for_path("b", cx), dir_id);
4561 assert_eq!(id_for_path("b/file1", cx), file_id);
4562 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4563}
4564
4565#[gpui::test]
4566async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4567 init_test(cx);
4568
4569 let fs = FakeFs::new(cx.executor());
4570 fs.insert_tree(
4571 "/dir",
4572 json!({
4573 "a.txt": "a-contents",
4574 "b.txt": "b-contents",
4575 }),
4576 )
4577 .await;
4578
4579 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4580
4581 // Spawn multiple tasks to open paths, repeating some paths.
4582 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4583 (
4584 p.open_local_buffer("/dir/a.txt", cx),
4585 p.open_local_buffer("/dir/b.txt", cx),
4586 p.open_local_buffer("/dir/a.txt", cx),
4587 )
4588 });
4589
4590 let buffer_a_1 = buffer_a_1.await.unwrap();
4591 let buffer_a_2 = buffer_a_2.await.unwrap();
4592 let buffer_b = buffer_b.await.unwrap();
4593 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4594 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4595
4596 // There is only one buffer per path.
4597 let buffer_a_id = buffer_a_1.entity_id();
4598 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4599
4600 // Open the same path again while it is still open.
4601 drop(buffer_a_1);
4602 let buffer_a_3 = project
4603 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4604 .await
4605 .unwrap();
4606
4607 // There's still only one buffer per path.
4608 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4609}
4610
4611#[gpui::test]
4612async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4613 init_test(cx);
4614
4615 let fs = FakeFs::new(cx.executor());
4616 fs.insert_tree(
4617 path!("/dir"),
4618 json!({
4619 "file1": "abc",
4620 "file2": "def",
4621 "file3": "ghi",
4622 }),
4623 )
4624 .await;
4625
4626 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4627
4628 let buffer1 = project
4629 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4630 .await
4631 .unwrap();
4632 let events = Arc::new(Mutex::new(Vec::new()));
4633
4634 // initially, the buffer isn't dirty.
4635 buffer1.update(cx, |buffer, cx| {
4636 cx.subscribe(&buffer1, {
4637 let events = events.clone();
4638 move |_, _, event, _| match event {
4639 BufferEvent::Operation { .. } => {}
4640 _ => events.lock().push(event.clone()),
4641 }
4642 })
4643 .detach();
4644
4645 assert!(!buffer.is_dirty());
4646 assert!(events.lock().is_empty());
4647
4648 buffer.edit([(1..2, "")], None, cx);
4649 });
4650
4651 // after the first edit, the buffer is dirty, and emits a dirtied event.
4652 buffer1.update(cx, |buffer, cx| {
4653 assert!(buffer.text() == "ac");
4654 assert!(buffer.is_dirty());
4655 assert_eq!(
4656 *events.lock(),
4657 &[
4658 language::BufferEvent::Edited,
4659 language::BufferEvent::DirtyChanged
4660 ]
4661 );
4662 events.lock().clear();
4663 buffer.did_save(
4664 buffer.version(),
4665 buffer.file().unwrap().disk_state().mtime(),
4666 cx,
4667 );
4668 });
4669
4670 // after saving, the buffer is not dirty, and emits a saved event.
4671 buffer1.update(cx, |buffer, cx| {
4672 assert!(!buffer.is_dirty());
4673 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4674 events.lock().clear();
4675
4676 buffer.edit([(1..1, "B")], None, cx);
4677 buffer.edit([(2..2, "D")], None, cx);
4678 });
4679
4680 // after editing again, the buffer is dirty, and emits another dirty event.
4681 buffer1.update(cx, |buffer, cx| {
4682 assert!(buffer.text() == "aBDc");
4683 assert!(buffer.is_dirty());
4684 assert_eq!(
4685 *events.lock(),
4686 &[
4687 language::BufferEvent::Edited,
4688 language::BufferEvent::DirtyChanged,
4689 language::BufferEvent::Edited,
4690 ],
4691 );
4692 events.lock().clear();
4693
4694 // After restoring the buffer to its previously-saved state,
4695 // the buffer is not considered dirty anymore.
4696 buffer.edit([(1..3, "")], None, cx);
4697 assert!(buffer.text() == "ac");
4698 assert!(!buffer.is_dirty());
4699 });
4700
4701 assert_eq!(
4702 *events.lock(),
4703 &[
4704 language::BufferEvent::Edited,
4705 language::BufferEvent::DirtyChanged
4706 ]
4707 );
4708
4709 // When a file is deleted, it is not considered dirty.
4710 let events = Arc::new(Mutex::new(Vec::new()));
4711 let buffer2 = project
4712 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4713 .await
4714 .unwrap();
4715 buffer2.update(cx, |_, cx| {
4716 cx.subscribe(&buffer2, {
4717 let events = events.clone();
4718 move |_, _, event, _| match event {
4719 BufferEvent::Operation { .. } => {}
4720 _ => events.lock().push(event.clone()),
4721 }
4722 })
4723 .detach();
4724 });
4725
4726 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4727 .await
4728 .unwrap();
4729 cx.executor().run_until_parked();
4730 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4731 assert_eq!(
4732 mem::take(&mut *events.lock()),
4733 &[language::BufferEvent::FileHandleChanged]
4734 );
4735
4736 // Buffer becomes dirty when edited.
4737 buffer2.update(cx, |buffer, cx| {
4738 buffer.edit([(2..3, "")], None, cx);
4739 assert_eq!(buffer.is_dirty(), true);
4740 });
4741 assert_eq!(
4742 mem::take(&mut *events.lock()),
4743 &[
4744 language::BufferEvent::Edited,
4745 language::BufferEvent::DirtyChanged
4746 ]
4747 );
4748
4749 // Buffer becomes clean again when all of its content is removed, because
4750 // the file was deleted.
4751 buffer2.update(cx, |buffer, cx| {
4752 buffer.edit([(0..2, "")], None, cx);
4753 assert_eq!(buffer.is_empty(), true);
4754 assert_eq!(buffer.is_dirty(), false);
4755 });
4756 assert_eq!(
4757 *events.lock(),
4758 &[
4759 language::BufferEvent::Edited,
4760 language::BufferEvent::DirtyChanged
4761 ]
4762 );
4763
4764 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4765 let events = Arc::new(Mutex::new(Vec::new()));
4766 let buffer3 = project
4767 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4768 .await
4769 .unwrap();
4770 buffer3.update(cx, |_, cx| {
4771 cx.subscribe(&buffer3, {
4772 let events = events.clone();
4773 move |_, _, event, _| match event {
4774 BufferEvent::Operation { .. } => {}
4775 _ => events.lock().push(event.clone()),
4776 }
4777 })
4778 .detach();
4779 });
4780
4781 buffer3.update(cx, |buffer, cx| {
4782 buffer.edit([(0..0, "x")], None, cx);
4783 });
4784 events.lock().clear();
4785 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4786 .await
4787 .unwrap();
4788 cx.executor().run_until_parked();
4789 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4790 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4791}
4792
4793#[gpui::test]
4794async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4795 init_test(cx);
4796
4797 let (initial_contents, initial_offsets) =
4798 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4799 let fs = FakeFs::new(cx.executor());
4800 fs.insert_tree(
4801 path!("/dir"),
4802 json!({
4803 "the-file": initial_contents,
4804 }),
4805 )
4806 .await;
4807 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4808 let buffer = project
4809 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4810 .await
4811 .unwrap();
4812
4813 let anchors = initial_offsets
4814 .iter()
4815 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4816 .collect::<Vec<_>>();
4817
4818 // Change the file on disk, adding two new lines of text, and removing
4819 // one line.
4820 buffer.update(cx, |buffer, _| {
4821 assert!(!buffer.is_dirty());
4822 assert!(!buffer.has_conflict());
4823 });
4824
4825 let (new_contents, new_offsets) =
4826 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4827
4828 let encoding = Encoding::new(UTF_8);
4829
4830 fs.save(
4831 path!("/dir/the-file").as_ref(),
4832 &Rope::from_str(new_contents.as_str(), cx.background_executor()),
4833 LineEnding::Unix,
4834 encoding,
4835 )
4836 .await
4837 .unwrap();
4838
4839 // Because the buffer was not modified, it is reloaded from disk. Its
4840 // contents are edited according to the diff between the old and new
4841 // file contents.
4842 cx.executor().run_until_parked();
4843 buffer.update(cx, |buffer, _| {
4844 assert_eq!(buffer.text(), new_contents);
4845 assert!(!buffer.is_dirty());
4846 assert!(!buffer.has_conflict());
4847
4848 let anchor_offsets = anchors
4849 .iter()
4850 .map(|anchor| anchor.to_offset(&*buffer))
4851 .collect::<Vec<_>>();
4852 assert_eq!(anchor_offsets, new_offsets);
4853 });
4854
4855 // Modify the buffer
4856 buffer.update(cx, |buffer, cx| {
4857 buffer.edit([(0..0, " ")], None, cx);
4858 assert!(buffer.is_dirty());
4859 assert!(!buffer.has_conflict());
4860 });
4861
4862 let encoding = Encoding::new(UTF_8);
4863
4864 // Change the file on disk again, adding blank lines to the beginning.
4865 fs.save(
4866 path!("/dir/the-file").as_ref(),
4867 &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
4868 LineEnding::Unix,
4869 encoding,
4870 )
4871 .await
4872 .unwrap();
4873
4874 // Because the buffer is modified, it doesn't reload from disk, but is
4875 // marked as having a conflict.
4876 cx.executor().run_until_parked();
4877 buffer.update(cx, |buffer, _| {
4878 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4879 assert!(buffer.has_conflict());
4880 });
4881}
4882
4883#[gpui::test]
4884async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4885 init_test(cx);
4886
4887 let fs = FakeFs::new(cx.executor());
4888 fs.insert_tree(
4889 path!("/dir"),
4890 json!({
4891 "file1": "a\nb\nc\n",
4892 "file2": "one\r\ntwo\r\nthree\r\n",
4893 }),
4894 )
4895 .await;
4896
4897 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4898 let buffer1 = project
4899 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4900 .await
4901 .unwrap();
4902 let buffer2 = project
4903 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4904 .await
4905 .unwrap();
4906
4907 buffer1.update(cx, |buffer, _| {
4908 assert_eq!(buffer.text(), "a\nb\nc\n");
4909 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4910 });
4911 buffer2.update(cx, |buffer, _| {
4912 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4913 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4914 });
4915
4916 let encoding = Encoding::new(UTF_8);
4917
4918 // Change a file's line endings on disk from unix to windows. The buffer's
4919 // state updates correctly.
4920 fs.save(
4921 path!("/dir/file1").as_ref(),
4922 &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
4923 LineEnding::Windows,
4924 encoding,
4925 )
4926 .await
4927 .unwrap();
4928 cx.executor().run_until_parked();
4929 buffer1.update(cx, |buffer, _| {
4930 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4931 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4932 });
4933
4934 // Save a file with windows line endings. The file is written correctly.
4935 buffer2.update(cx, |buffer, cx| {
4936 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4937 });
4938 project
4939 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4940 .await
4941 .unwrap();
4942 assert_eq!(
4943 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4944 "one\r\ntwo\r\nthree\r\nfour\r\n",
4945 );
4946}
4947
4948#[gpui::test]
4949async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4950 init_test(cx);
4951
4952 let fs = FakeFs::new(cx.executor());
4953 fs.insert_tree(
4954 path!("/dir"),
4955 json!({
4956 "a.rs": "
4957 fn foo(mut v: Vec<usize>) {
4958 for x in &v {
4959 v.push(1);
4960 }
4961 }
4962 "
4963 .unindent(),
4964 }),
4965 )
4966 .await;
4967
4968 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4969 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4970 let buffer = project
4971 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4972 .await
4973 .unwrap();
4974
4975 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4976 let message = lsp::PublishDiagnosticsParams {
4977 uri: buffer_uri.clone(),
4978 diagnostics: vec![
4979 lsp::Diagnostic {
4980 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4981 severity: Some(DiagnosticSeverity::WARNING),
4982 message: "error 1".to_string(),
4983 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4984 location: lsp::Location {
4985 uri: buffer_uri.clone(),
4986 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4987 },
4988 message: "error 1 hint 1".to_string(),
4989 }]),
4990 ..Default::default()
4991 },
4992 lsp::Diagnostic {
4993 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4994 severity: Some(DiagnosticSeverity::HINT),
4995 message: "error 1 hint 1".to_string(),
4996 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4997 location: lsp::Location {
4998 uri: buffer_uri.clone(),
4999 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5000 },
5001 message: "original diagnostic".to_string(),
5002 }]),
5003 ..Default::default()
5004 },
5005 lsp::Diagnostic {
5006 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5007 severity: Some(DiagnosticSeverity::ERROR),
5008 message: "error 2".to_string(),
5009 related_information: Some(vec![
5010 lsp::DiagnosticRelatedInformation {
5011 location: lsp::Location {
5012 uri: buffer_uri.clone(),
5013 range: lsp::Range::new(
5014 lsp::Position::new(1, 13),
5015 lsp::Position::new(1, 15),
5016 ),
5017 },
5018 message: "error 2 hint 1".to_string(),
5019 },
5020 lsp::DiagnosticRelatedInformation {
5021 location: lsp::Location {
5022 uri: buffer_uri.clone(),
5023 range: lsp::Range::new(
5024 lsp::Position::new(1, 13),
5025 lsp::Position::new(1, 15),
5026 ),
5027 },
5028 message: "error 2 hint 2".to_string(),
5029 },
5030 ]),
5031 ..Default::default()
5032 },
5033 lsp::Diagnostic {
5034 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5035 severity: Some(DiagnosticSeverity::HINT),
5036 message: "error 2 hint 1".to_string(),
5037 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5038 location: lsp::Location {
5039 uri: buffer_uri.clone(),
5040 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5041 },
5042 message: "original diagnostic".to_string(),
5043 }]),
5044 ..Default::default()
5045 },
5046 lsp::Diagnostic {
5047 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5048 severity: Some(DiagnosticSeverity::HINT),
5049 message: "error 2 hint 2".to_string(),
5050 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5051 location: lsp::Location {
5052 uri: buffer_uri,
5053 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5054 },
5055 message: "original diagnostic".to_string(),
5056 }]),
5057 ..Default::default()
5058 },
5059 ],
5060 version: None,
5061 };
5062
5063 lsp_store
5064 .update(cx, |lsp_store, cx| {
5065 lsp_store.update_diagnostics(
5066 LanguageServerId(0),
5067 message,
5068 None,
5069 DiagnosticSourceKind::Pushed,
5070 &[],
5071 cx,
5072 )
5073 })
5074 .unwrap();
5075 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5076
5077 assert_eq!(
5078 buffer
5079 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5080 .collect::<Vec<_>>(),
5081 &[
5082 DiagnosticEntry {
5083 range: Point::new(1, 8)..Point::new(1, 9),
5084 diagnostic: Diagnostic {
5085 severity: DiagnosticSeverity::WARNING,
5086 message: "error 1".to_string(),
5087 group_id: 1,
5088 is_primary: true,
5089 source_kind: DiagnosticSourceKind::Pushed,
5090 ..Diagnostic::default()
5091 }
5092 },
5093 DiagnosticEntry {
5094 range: Point::new(1, 8)..Point::new(1, 9),
5095 diagnostic: Diagnostic {
5096 severity: DiagnosticSeverity::HINT,
5097 message: "error 1 hint 1".to_string(),
5098 group_id: 1,
5099 is_primary: false,
5100 source_kind: DiagnosticSourceKind::Pushed,
5101 ..Diagnostic::default()
5102 }
5103 },
5104 DiagnosticEntry {
5105 range: Point::new(1, 13)..Point::new(1, 15),
5106 diagnostic: Diagnostic {
5107 severity: DiagnosticSeverity::HINT,
5108 message: "error 2 hint 1".to_string(),
5109 group_id: 0,
5110 is_primary: false,
5111 source_kind: DiagnosticSourceKind::Pushed,
5112 ..Diagnostic::default()
5113 }
5114 },
5115 DiagnosticEntry {
5116 range: Point::new(1, 13)..Point::new(1, 15),
5117 diagnostic: Diagnostic {
5118 severity: DiagnosticSeverity::HINT,
5119 message: "error 2 hint 2".to_string(),
5120 group_id: 0,
5121 is_primary: false,
5122 source_kind: DiagnosticSourceKind::Pushed,
5123 ..Diagnostic::default()
5124 }
5125 },
5126 DiagnosticEntry {
5127 range: Point::new(2, 8)..Point::new(2, 17),
5128 diagnostic: Diagnostic {
5129 severity: DiagnosticSeverity::ERROR,
5130 message: "error 2".to_string(),
5131 group_id: 0,
5132 is_primary: true,
5133 source_kind: DiagnosticSourceKind::Pushed,
5134 ..Diagnostic::default()
5135 }
5136 }
5137 ]
5138 );
5139
5140 assert_eq!(
5141 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5142 &[
5143 DiagnosticEntry {
5144 range: Point::new(1, 13)..Point::new(1, 15),
5145 diagnostic: Diagnostic {
5146 severity: DiagnosticSeverity::HINT,
5147 message: "error 2 hint 1".to_string(),
5148 group_id: 0,
5149 is_primary: false,
5150 source_kind: DiagnosticSourceKind::Pushed,
5151 ..Diagnostic::default()
5152 }
5153 },
5154 DiagnosticEntry {
5155 range: Point::new(1, 13)..Point::new(1, 15),
5156 diagnostic: Diagnostic {
5157 severity: DiagnosticSeverity::HINT,
5158 message: "error 2 hint 2".to_string(),
5159 group_id: 0,
5160 is_primary: false,
5161 source_kind: DiagnosticSourceKind::Pushed,
5162 ..Diagnostic::default()
5163 }
5164 },
5165 DiagnosticEntry {
5166 range: Point::new(2, 8)..Point::new(2, 17),
5167 diagnostic: Diagnostic {
5168 severity: DiagnosticSeverity::ERROR,
5169 message: "error 2".to_string(),
5170 group_id: 0,
5171 is_primary: true,
5172 source_kind: DiagnosticSourceKind::Pushed,
5173 ..Diagnostic::default()
5174 }
5175 }
5176 ]
5177 );
5178
5179 assert_eq!(
5180 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5181 &[
5182 DiagnosticEntry {
5183 range: Point::new(1, 8)..Point::new(1, 9),
5184 diagnostic: Diagnostic {
5185 severity: DiagnosticSeverity::WARNING,
5186 message: "error 1".to_string(),
5187 group_id: 1,
5188 is_primary: true,
5189 source_kind: DiagnosticSourceKind::Pushed,
5190 ..Diagnostic::default()
5191 }
5192 },
5193 DiagnosticEntry {
5194 range: Point::new(1, 8)..Point::new(1, 9),
5195 diagnostic: Diagnostic {
5196 severity: DiagnosticSeverity::HINT,
5197 message: "error 1 hint 1".to_string(),
5198 group_id: 1,
5199 is_primary: false,
5200 source_kind: DiagnosticSourceKind::Pushed,
5201 ..Diagnostic::default()
5202 }
5203 },
5204 ]
5205 );
5206}
5207
5208#[gpui::test]
5209async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5210 init_test(cx);
5211
5212 let fs = FakeFs::new(cx.executor());
5213 fs.insert_tree(
5214 path!("/dir"),
5215 json!({
5216 "one.rs": "const ONE: usize = 1;",
5217 "two": {
5218 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5219 }
5220
5221 }),
5222 )
5223 .await;
5224 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5225
5226 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5227 language_registry.add(rust_lang());
5228 let watched_paths = lsp::FileOperationRegistrationOptions {
5229 filters: vec![
5230 FileOperationFilter {
5231 scheme: Some("file".to_owned()),
5232 pattern: lsp::FileOperationPattern {
5233 glob: "**/*.rs".to_owned(),
5234 matches: Some(lsp::FileOperationPatternKind::File),
5235 options: None,
5236 },
5237 },
5238 FileOperationFilter {
5239 scheme: Some("file".to_owned()),
5240 pattern: lsp::FileOperationPattern {
5241 glob: "**/**".to_owned(),
5242 matches: Some(lsp::FileOperationPatternKind::Folder),
5243 options: None,
5244 },
5245 },
5246 ],
5247 };
5248 let mut fake_servers = language_registry.register_fake_lsp(
5249 "Rust",
5250 FakeLspAdapter {
5251 capabilities: lsp::ServerCapabilities {
5252 workspace: Some(lsp::WorkspaceServerCapabilities {
5253 workspace_folders: None,
5254 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5255 did_rename: Some(watched_paths.clone()),
5256 will_rename: Some(watched_paths),
5257 ..Default::default()
5258 }),
5259 }),
5260 ..Default::default()
5261 },
5262 ..Default::default()
5263 },
5264 );
5265
5266 let _ = project
5267 .update(cx, |project, cx| {
5268 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5269 })
5270 .await
5271 .unwrap();
5272
5273 let fake_server = fake_servers.next().await.unwrap();
5274 let response = project.update(cx, |project, cx| {
5275 let worktree = project.worktrees(cx).next().unwrap();
5276 let entry = worktree
5277 .read(cx)
5278 .entry_for_path(rel_path("one.rs"))
5279 .unwrap();
5280 project.rename_entry(
5281 entry.id,
5282 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5283 cx,
5284 )
5285 });
5286 let expected_edit = lsp::WorkspaceEdit {
5287 changes: None,
5288 document_changes: Some(DocumentChanges::Edits({
5289 vec![TextDocumentEdit {
5290 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5291 range: lsp::Range {
5292 start: lsp::Position {
5293 line: 0,
5294 character: 1,
5295 },
5296 end: lsp::Position {
5297 line: 0,
5298 character: 3,
5299 },
5300 },
5301 new_text: "This is not a drill".to_owned(),
5302 })],
5303 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5304 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5305 version: Some(1337),
5306 },
5307 }]
5308 })),
5309 change_annotations: None,
5310 };
5311 let resolved_workspace_edit = Arc::new(OnceLock::new());
5312 fake_server
5313 .set_request_handler::<WillRenameFiles, _, _>({
5314 let resolved_workspace_edit = resolved_workspace_edit.clone();
5315 let expected_edit = expected_edit.clone();
5316 move |params, _| {
5317 let resolved_workspace_edit = resolved_workspace_edit.clone();
5318 let expected_edit = expected_edit.clone();
5319 async move {
5320 assert_eq!(params.files.len(), 1);
5321 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5322 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5323 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5324 Ok(Some(expected_edit))
5325 }
5326 }
5327 })
5328 .next()
5329 .await
5330 .unwrap();
5331 let _ = response.await.unwrap();
5332 fake_server
5333 .handle_notification::<DidRenameFiles, _>(|params, _| {
5334 assert_eq!(params.files.len(), 1);
5335 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5336 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5337 })
5338 .next()
5339 .await
5340 .unwrap();
5341 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5342}
5343
5344#[gpui::test]
5345async fn test_rename(cx: &mut gpui::TestAppContext) {
5346 // hi
5347 init_test(cx);
5348
5349 let fs = FakeFs::new(cx.executor());
5350 fs.insert_tree(
5351 path!("/dir"),
5352 json!({
5353 "one.rs": "const ONE: usize = 1;",
5354 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5355 }),
5356 )
5357 .await;
5358
5359 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5360
5361 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5362 language_registry.add(rust_lang());
5363 let mut fake_servers = language_registry.register_fake_lsp(
5364 "Rust",
5365 FakeLspAdapter {
5366 capabilities: lsp::ServerCapabilities {
5367 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5368 prepare_provider: Some(true),
5369 work_done_progress_options: Default::default(),
5370 })),
5371 ..Default::default()
5372 },
5373 ..Default::default()
5374 },
5375 );
5376
5377 let (buffer, _handle) = project
5378 .update(cx, |project, cx| {
5379 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5380 })
5381 .await
5382 .unwrap();
5383
5384 let fake_server = fake_servers.next().await.unwrap();
5385
5386 let response = project.update(cx, |project, cx| {
5387 project.prepare_rename(buffer.clone(), 7, cx)
5388 });
5389 fake_server
5390 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5391 assert_eq!(
5392 params.text_document.uri.as_str(),
5393 uri!("file:///dir/one.rs")
5394 );
5395 assert_eq!(params.position, lsp::Position::new(0, 7));
5396 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5397 lsp::Position::new(0, 6),
5398 lsp::Position::new(0, 9),
5399 ))))
5400 })
5401 .next()
5402 .await
5403 .unwrap();
5404 let response = response.await.unwrap();
5405 let PrepareRenameResponse::Success(range) = response else {
5406 panic!("{:?}", response);
5407 };
5408 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5409 assert_eq!(range, 6..9);
5410
5411 let response = project.update(cx, |project, cx| {
5412 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5413 });
5414 fake_server
5415 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5416 assert_eq!(
5417 params.text_document_position.text_document.uri.as_str(),
5418 uri!("file:///dir/one.rs")
5419 );
5420 assert_eq!(
5421 params.text_document_position.position,
5422 lsp::Position::new(0, 7)
5423 );
5424 assert_eq!(params.new_name, "THREE");
5425 Ok(Some(lsp::WorkspaceEdit {
5426 changes: Some(
5427 [
5428 (
5429 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5430 vec![lsp::TextEdit::new(
5431 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5432 "THREE".to_string(),
5433 )],
5434 ),
5435 (
5436 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5437 vec![
5438 lsp::TextEdit::new(
5439 lsp::Range::new(
5440 lsp::Position::new(0, 24),
5441 lsp::Position::new(0, 27),
5442 ),
5443 "THREE".to_string(),
5444 ),
5445 lsp::TextEdit::new(
5446 lsp::Range::new(
5447 lsp::Position::new(0, 35),
5448 lsp::Position::new(0, 38),
5449 ),
5450 "THREE".to_string(),
5451 ),
5452 ],
5453 ),
5454 ]
5455 .into_iter()
5456 .collect(),
5457 ),
5458 ..Default::default()
5459 }))
5460 })
5461 .next()
5462 .await
5463 .unwrap();
5464 let mut transaction = response.await.unwrap().0;
5465 assert_eq!(transaction.len(), 2);
5466 assert_eq!(
5467 transaction
5468 .remove_entry(&buffer)
5469 .unwrap()
5470 .0
5471 .update(cx, |buffer, _| buffer.text()),
5472 "const THREE: usize = 1;"
5473 );
5474 assert_eq!(
5475 transaction
5476 .into_keys()
5477 .next()
5478 .unwrap()
5479 .update(cx, |buffer, _| buffer.text()),
5480 "const TWO: usize = one::THREE + one::THREE;"
5481 );
5482}
5483
5484#[gpui::test]
5485async fn test_search(cx: &mut gpui::TestAppContext) {
5486 init_test(cx);
5487
5488 let fs = FakeFs::new(cx.executor());
5489 fs.insert_tree(
5490 path!("/dir"),
5491 json!({
5492 "one.rs": "const ONE: usize = 1;",
5493 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5494 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5495 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5496 }),
5497 )
5498 .await;
5499 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5500 assert_eq!(
5501 search(
5502 &project,
5503 SearchQuery::text(
5504 "TWO",
5505 false,
5506 true,
5507 false,
5508 Default::default(),
5509 Default::default(),
5510 false,
5511 None
5512 )
5513 .unwrap(),
5514 cx
5515 )
5516 .await
5517 .unwrap(),
5518 HashMap::from_iter([
5519 (path!("dir/two.rs").to_string(), vec![6..9]),
5520 (path!("dir/three.rs").to_string(), vec![37..40])
5521 ])
5522 );
5523
5524 let buffer_4 = project
5525 .update(cx, |project, cx| {
5526 project.open_local_buffer(path!("/dir/four.rs"), cx)
5527 })
5528 .await
5529 .unwrap();
5530 buffer_4.update(cx, |buffer, cx| {
5531 let text = "two::TWO";
5532 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5533 });
5534
5535 assert_eq!(
5536 search(
5537 &project,
5538 SearchQuery::text(
5539 "TWO",
5540 false,
5541 true,
5542 false,
5543 Default::default(),
5544 Default::default(),
5545 false,
5546 None,
5547 )
5548 .unwrap(),
5549 cx
5550 )
5551 .await
5552 .unwrap(),
5553 HashMap::from_iter([
5554 (path!("dir/two.rs").to_string(), vec![6..9]),
5555 (path!("dir/three.rs").to_string(), vec![37..40]),
5556 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5557 ])
5558 );
5559}
5560
5561#[gpui::test]
5562async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5563 init_test(cx);
5564
5565 let search_query = "file";
5566
5567 let fs = FakeFs::new(cx.executor());
5568 fs.insert_tree(
5569 path!("/dir"),
5570 json!({
5571 "one.rs": r#"// Rust file one"#,
5572 "one.ts": r#"// TypeScript file one"#,
5573 "two.rs": r#"// Rust file two"#,
5574 "two.ts": r#"// TypeScript file two"#,
5575 }),
5576 )
5577 .await;
5578 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5579
5580 assert!(
5581 search(
5582 &project,
5583 SearchQuery::text(
5584 search_query,
5585 false,
5586 true,
5587 false,
5588 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5589 Default::default(),
5590 false,
5591 None
5592 )
5593 .unwrap(),
5594 cx
5595 )
5596 .await
5597 .unwrap()
5598 .is_empty(),
5599 "If no inclusions match, no files should be returned"
5600 );
5601
5602 assert_eq!(
5603 search(
5604 &project,
5605 SearchQuery::text(
5606 search_query,
5607 false,
5608 true,
5609 false,
5610 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5611 Default::default(),
5612 false,
5613 None
5614 )
5615 .unwrap(),
5616 cx
5617 )
5618 .await
5619 .unwrap(),
5620 HashMap::from_iter([
5621 (path!("dir/one.rs").to_string(), vec![8..12]),
5622 (path!("dir/two.rs").to_string(), vec![8..12]),
5623 ]),
5624 "Rust only search should give only Rust files"
5625 );
5626
5627 assert_eq!(
5628 search(
5629 &project,
5630 SearchQuery::text(
5631 search_query,
5632 false,
5633 true,
5634 false,
5635 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5636 .unwrap(),
5637 Default::default(),
5638 false,
5639 None,
5640 )
5641 .unwrap(),
5642 cx
5643 )
5644 .await
5645 .unwrap(),
5646 HashMap::from_iter([
5647 (path!("dir/one.ts").to_string(), vec![14..18]),
5648 (path!("dir/two.ts").to_string(), vec![14..18]),
5649 ]),
5650 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5651 );
5652
5653 assert_eq!(
5654 search(
5655 &project,
5656 SearchQuery::text(
5657 search_query,
5658 false,
5659 true,
5660 false,
5661 PathMatcher::new(
5662 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5663 PathStyle::local()
5664 )
5665 .unwrap(),
5666 Default::default(),
5667 false,
5668 None,
5669 )
5670 .unwrap(),
5671 cx
5672 )
5673 .await
5674 .unwrap(),
5675 HashMap::from_iter([
5676 (path!("dir/two.ts").to_string(), vec![14..18]),
5677 (path!("dir/one.rs").to_string(), vec![8..12]),
5678 (path!("dir/one.ts").to_string(), vec![14..18]),
5679 (path!("dir/two.rs").to_string(), vec![8..12]),
5680 ]),
5681 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5682 );
5683}
5684
5685#[gpui::test]
5686async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5687 init_test(cx);
5688
5689 let search_query = "file";
5690
5691 let fs = FakeFs::new(cx.executor());
5692 fs.insert_tree(
5693 path!("/dir"),
5694 json!({
5695 "one.rs": r#"// Rust file one"#,
5696 "one.ts": r#"// TypeScript file one"#,
5697 "two.rs": r#"// Rust file two"#,
5698 "two.ts": r#"// TypeScript file two"#,
5699 }),
5700 )
5701 .await;
5702 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5703
5704 assert_eq!(
5705 search(
5706 &project,
5707 SearchQuery::text(
5708 search_query,
5709 false,
5710 true,
5711 false,
5712 Default::default(),
5713 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5714 false,
5715 None,
5716 )
5717 .unwrap(),
5718 cx
5719 )
5720 .await
5721 .unwrap(),
5722 HashMap::from_iter([
5723 (path!("dir/one.rs").to_string(), vec![8..12]),
5724 (path!("dir/one.ts").to_string(), vec![14..18]),
5725 (path!("dir/two.rs").to_string(), vec![8..12]),
5726 (path!("dir/two.ts").to_string(), vec![14..18]),
5727 ]),
5728 "If no exclusions match, all files should be returned"
5729 );
5730
5731 assert_eq!(
5732 search(
5733 &project,
5734 SearchQuery::text(
5735 search_query,
5736 false,
5737 true,
5738 false,
5739 Default::default(),
5740 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5741 false,
5742 None,
5743 )
5744 .unwrap(),
5745 cx
5746 )
5747 .await
5748 .unwrap(),
5749 HashMap::from_iter([
5750 (path!("dir/one.ts").to_string(), vec![14..18]),
5751 (path!("dir/two.ts").to_string(), vec![14..18]),
5752 ]),
5753 "Rust exclusion search should give only TypeScript files"
5754 );
5755
5756 assert_eq!(
5757 search(
5758 &project,
5759 SearchQuery::text(
5760 search_query,
5761 false,
5762 true,
5763 false,
5764 Default::default(),
5765 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5766 .unwrap(),
5767 false,
5768 None,
5769 )
5770 .unwrap(),
5771 cx
5772 )
5773 .await
5774 .unwrap(),
5775 HashMap::from_iter([
5776 (path!("dir/one.rs").to_string(), vec![8..12]),
5777 (path!("dir/two.rs").to_string(), vec![8..12]),
5778 ]),
5779 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5780 );
5781
5782 assert!(
5783 search(
5784 &project,
5785 SearchQuery::text(
5786 search_query,
5787 false,
5788 true,
5789 false,
5790 Default::default(),
5791 PathMatcher::new(
5792 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5793 PathStyle::local(),
5794 )
5795 .unwrap(),
5796 false,
5797 None,
5798 )
5799 .unwrap(),
5800 cx
5801 )
5802 .await
5803 .unwrap()
5804 .is_empty(),
5805 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5806 );
5807}
5808
5809#[gpui::test]
5810async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5811 init_test(cx);
5812
5813 let search_query = "file";
5814
5815 let fs = FakeFs::new(cx.executor());
5816 fs.insert_tree(
5817 path!("/dir"),
5818 json!({
5819 "one.rs": r#"// Rust file one"#,
5820 "one.ts": r#"// TypeScript file one"#,
5821 "two.rs": r#"// Rust file two"#,
5822 "two.ts": r#"// TypeScript file two"#,
5823 }),
5824 )
5825 .await;
5826
5827 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5828 let path_style = PathStyle::local();
5829 let _buffer = project.update(cx, |project, cx| {
5830 project.create_local_buffer("file", None, false, cx)
5831 });
5832
5833 assert_eq!(
5834 search(
5835 &project,
5836 SearchQuery::text(
5837 search_query,
5838 false,
5839 true,
5840 false,
5841 Default::default(),
5842 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5843 false,
5844 None,
5845 )
5846 .unwrap(),
5847 cx
5848 )
5849 .await
5850 .unwrap(),
5851 HashMap::from_iter([
5852 (path!("dir/one.rs").to_string(), vec![8..12]),
5853 (path!("dir/one.ts").to_string(), vec![14..18]),
5854 (path!("dir/two.rs").to_string(), vec![8..12]),
5855 (path!("dir/two.ts").to_string(), vec![14..18]),
5856 ]),
5857 "If no exclusions match, all files should be returned"
5858 );
5859
5860 assert_eq!(
5861 search(
5862 &project,
5863 SearchQuery::text(
5864 search_query,
5865 false,
5866 true,
5867 false,
5868 Default::default(),
5869 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5870 false,
5871 None,
5872 )
5873 .unwrap(),
5874 cx
5875 )
5876 .await
5877 .unwrap(),
5878 HashMap::from_iter([
5879 (path!("dir/one.ts").to_string(), vec![14..18]),
5880 (path!("dir/two.ts").to_string(), vec![14..18]),
5881 ]),
5882 "Rust exclusion search should give only TypeScript files"
5883 );
5884
5885 assert_eq!(
5886 search(
5887 &project,
5888 SearchQuery::text(
5889 search_query,
5890 false,
5891 true,
5892 false,
5893 Default::default(),
5894 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5895 false,
5896 None,
5897 )
5898 .unwrap(),
5899 cx
5900 )
5901 .await
5902 .unwrap(),
5903 HashMap::from_iter([
5904 (path!("dir/one.rs").to_string(), vec![8..12]),
5905 (path!("dir/two.rs").to_string(), vec![8..12]),
5906 ]),
5907 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5908 );
5909
5910 assert!(
5911 search(
5912 &project,
5913 SearchQuery::text(
5914 search_query,
5915 false,
5916 true,
5917 false,
5918 Default::default(),
5919 PathMatcher::new(
5920 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5921 PathStyle::local(),
5922 )
5923 .unwrap(),
5924 false,
5925 None,
5926 )
5927 .unwrap(),
5928 cx
5929 )
5930 .await
5931 .unwrap()
5932 .is_empty(),
5933 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5934 );
5935}
5936
5937#[gpui::test]
5938async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5939 init_test(cx);
5940
5941 let search_query = "file";
5942
5943 let fs = FakeFs::new(cx.executor());
5944 fs.insert_tree(
5945 path!("/dir"),
5946 json!({
5947 "one.rs": r#"// Rust file one"#,
5948 "one.ts": r#"// TypeScript file one"#,
5949 "two.rs": r#"// Rust file two"#,
5950 "two.ts": r#"// TypeScript file two"#,
5951 }),
5952 )
5953 .await;
5954 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5955 assert!(
5956 search(
5957 &project,
5958 SearchQuery::text(
5959 search_query,
5960 false,
5961 true,
5962 false,
5963 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5964 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5965 false,
5966 None,
5967 )
5968 .unwrap(),
5969 cx
5970 )
5971 .await
5972 .unwrap()
5973 .is_empty(),
5974 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5975 );
5976
5977 assert!(
5978 search(
5979 &project,
5980 SearchQuery::text(
5981 search_query,
5982 false,
5983 true,
5984 false,
5985 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5986 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5987 false,
5988 None,
5989 )
5990 .unwrap(),
5991 cx
5992 )
5993 .await
5994 .unwrap()
5995 .is_empty(),
5996 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5997 );
5998
5999 assert!(
6000 search(
6001 &project,
6002 SearchQuery::text(
6003 search_query,
6004 false,
6005 true,
6006 false,
6007 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6008 .unwrap(),
6009 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6010 .unwrap(),
6011 false,
6012 None,
6013 )
6014 .unwrap(),
6015 cx
6016 )
6017 .await
6018 .unwrap()
6019 .is_empty(),
6020 "Non-matching inclusions and exclusions should not change that."
6021 );
6022
6023 assert_eq!(
6024 search(
6025 &project,
6026 SearchQuery::text(
6027 search_query,
6028 false,
6029 true,
6030 false,
6031 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6032 .unwrap(),
6033 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6034 .unwrap(),
6035 false,
6036 None,
6037 )
6038 .unwrap(),
6039 cx
6040 )
6041 .await
6042 .unwrap(),
6043 HashMap::from_iter([
6044 (path!("dir/one.ts").to_string(), vec![14..18]),
6045 (path!("dir/two.ts").to_string(), vec![14..18]),
6046 ]),
6047 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6048 );
6049}
6050
6051#[gpui::test]
6052async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6053 init_test(cx);
6054
6055 let fs = FakeFs::new(cx.executor());
6056 fs.insert_tree(
6057 path!("/worktree-a"),
6058 json!({
6059 "haystack.rs": r#"// NEEDLE"#,
6060 "haystack.ts": r#"// NEEDLE"#,
6061 }),
6062 )
6063 .await;
6064 fs.insert_tree(
6065 path!("/worktree-b"),
6066 json!({
6067 "haystack.rs": r#"// NEEDLE"#,
6068 "haystack.ts": r#"// NEEDLE"#,
6069 }),
6070 )
6071 .await;
6072
6073 let path_style = PathStyle::local();
6074 let project = Project::test(
6075 fs.clone(),
6076 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6077 cx,
6078 )
6079 .await;
6080
6081 assert_eq!(
6082 search(
6083 &project,
6084 SearchQuery::text(
6085 "NEEDLE",
6086 false,
6087 true,
6088 false,
6089 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6090 Default::default(),
6091 true,
6092 None,
6093 )
6094 .unwrap(),
6095 cx
6096 )
6097 .await
6098 .unwrap(),
6099 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6100 "should only return results from included worktree"
6101 );
6102 assert_eq!(
6103 search(
6104 &project,
6105 SearchQuery::text(
6106 "NEEDLE",
6107 false,
6108 true,
6109 false,
6110 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6111 Default::default(),
6112 true,
6113 None,
6114 )
6115 .unwrap(),
6116 cx
6117 )
6118 .await
6119 .unwrap(),
6120 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6121 "should only return results from included worktree"
6122 );
6123
6124 assert_eq!(
6125 search(
6126 &project,
6127 SearchQuery::text(
6128 "NEEDLE",
6129 false,
6130 true,
6131 false,
6132 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6133 Default::default(),
6134 false,
6135 None,
6136 )
6137 .unwrap(),
6138 cx
6139 )
6140 .await
6141 .unwrap(),
6142 HashMap::from_iter([
6143 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6144 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6145 ]),
6146 "should return results from both worktrees"
6147 );
6148}
6149
6150#[gpui::test]
6151async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6152 init_test(cx);
6153
6154 let fs = FakeFs::new(cx.background_executor.clone());
6155 fs.insert_tree(
6156 path!("/dir"),
6157 json!({
6158 ".git": {},
6159 ".gitignore": "**/target\n/node_modules\n",
6160 "target": {
6161 "index.txt": "index_key:index_value"
6162 },
6163 "node_modules": {
6164 "eslint": {
6165 "index.ts": "const eslint_key = 'eslint value'",
6166 "package.json": r#"{ "some_key": "some value" }"#,
6167 },
6168 "prettier": {
6169 "index.ts": "const prettier_key = 'prettier value'",
6170 "package.json": r#"{ "other_key": "other value" }"#,
6171 },
6172 },
6173 "package.json": r#"{ "main_key": "main value" }"#,
6174 }),
6175 )
6176 .await;
6177 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6178
6179 let query = "key";
6180 assert_eq!(
6181 search(
6182 &project,
6183 SearchQuery::text(
6184 query,
6185 false,
6186 false,
6187 false,
6188 Default::default(),
6189 Default::default(),
6190 false,
6191 None,
6192 )
6193 .unwrap(),
6194 cx
6195 )
6196 .await
6197 .unwrap(),
6198 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6199 "Only one non-ignored file should have the query"
6200 );
6201
6202 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6203 let path_style = PathStyle::local();
6204 assert_eq!(
6205 search(
6206 &project,
6207 SearchQuery::text(
6208 query,
6209 false,
6210 false,
6211 true,
6212 Default::default(),
6213 Default::default(),
6214 false,
6215 None,
6216 )
6217 .unwrap(),
6218 cx
6219 )
6220 .await
6221 .unwrap(),
6222 HashMap::from_iter([
6223 (path!("dir/package.json").to_string(), vec![8..11]),
6224 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6225 (
6226 path!("dir/node_modules/prettier/package.json").to_string(),
6227 vec![9..12]
6228 ),
6229 (
6230 path!("dir/node_modules/prettier/index.ts").to_string(),
6231 vec![15..18]
6232 ),
6233 (
6234 path!("dir/node_modules/eslint/index.ts").to_string(),
6235 vec![13..16]
6236 ),
6237 (
6238 path!("dir/node_modules/eslint/package.json").to_string(),
6239 vec![8..11]
6240 ),
6241 ]),
6242 "Unrestricted search with ignored directories should find every file with the query"
6243 );
6244
6245 let files_to_include =
6246 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6247 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6248 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6249 assert_eq!(
6250 search(
6251 &project,
6252 SearchQuery::text(
6253 query,
6254 false,
6255 false,
6256 true,
6257 files_to_include,
6258 files_to_exclude,
6259 false,
6260 None,
6261 )
6262 .unwrap(),
6263 cx
6264 )
6265 .await
6266 .unwrap(),
6267 HashMap::from_iter([(
6268 path!("dir/node_modules/prettier/package.json").to_string(),
6269 vec![9..12]
6270 )]),
6271 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6272 );
6273}
6274
6275#[gpui::test]
6276async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6277 init_test(cx);
6278
6279 let fs = FakeFs::new(cx.executor());
6280 fs.insert_tree(
6281 path!("/dir"),
6282 json!({
6283 "one.rs": "// ПРИВЕТ? привет!",
6284 "two.rs": "// ПРИВЕТ.",
6285 "three.rs": "// привет",
6286 }),
6287 )
6288 .await;
6289 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6290 let unicode_case_sensitive_query = SearchQuery::text(
6291 "привет",
6292 false,
6293 true,
6294 false,
6295 Default::default(),
6296 Default::default(),
6297 false,
6298 None,
6299 );
6300 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6301 assert_eq!(
6302 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6303 .await
6304 .unwrap(),
6305 HashMap::from_iter([
6306 (path!("dir/one.rs").to_string(), vec![17..29]),
6307 (path!("dir/three.rs").to_string(), vec![3..15]),
6308 ])
6309 );
6310
6311 let unicode_case_insensitive_query = SearchQuery::text(
6312 "привет",
6313 false,
6314 false,
6315 false,
6316 Default::default(),
6317 Default::default(),
6318 false,
6319 None,
6320 );
6321 assert_matches!(
6322 unicode_case_insensitive_query,
6323 Ok(SearchQuery::Regex { .. })
6324 );
6325 assert_eq!(
6326 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6327 .await
6328 .unwrap(),
6329 HashMap::from_iter([
6330 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6331 (path!("dir/two.rs").to_string(), vec![3..15]),
6332 (path!("dir/three.rs").to_string(), vec![3..15]),
6333 ])
6334 );
6335
6336 assert_eq!(
6337 search(
6338 &project,
6339 SearchQuery::text(
6340 "привет.",
6341 false,
6342 false,
6343 false,
6344 Default::default(),
6345 Default::default(),
6346 false,
6347 None,
6348 )
6349 .unwrap(),
6350 cx
6351 )
6352 .await
6353 .unwrap(),
6354 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6355 );
6356}
6357
6358#[gpui::test]
6359async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6360 init_test(cx);
6361
6362 let fs = FakeFs::new(cx.executor());
6363 fs.insert_tree(
6364 "/one/two",
6365 json!({
6366 "three": {
6367 "a.txt": "",
6368 "four": {}
6369 },
6370 "c.rs": ""
6371 }),
6372 )
6373 .await;
6374
6375 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6376 project
6377 .update(cx, |project, cx| {
6378 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6379 project.create_entry((id, rel_path("b..")), true, cx)
6380 })
6381 .await
6382 .unwrap()
6383 .into_included()
6384 .unwrap();
6385
6386 assert_eq!(
6387 fs.paths(true),
6388 vec![
6389 PathBuf::from(path!("/")),
6390 PathBuf::from(path!("/one")),
6391 PathBuf::from(path!("/one/two")),
6392 PathBuf::from(path!("/one/two/c.rs")),
6393 PathBuf::from(path!("/one/two/three")),
6394 PathBuf::from(path!("/one/two/three/a.txt")),
6395 PathBuf::from(path!("/one/two/three/b..")),
6396 PathBuf::from(path!("/one/two/three/four")),
6397 ]
6398 );
6399}
6400
6401#[gpui::test]
6402async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6403 init_test(cx);
6404
6405 let fs = FakeFs::new(cx.executor());
6406 fs.insert_tree(
6407 path!("/dir"),
6408 json!({
6409 "a.tsx": "a",
6410 }),
6411 )
6412 .await;
6413
6414 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6415
6416 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6417 language_registry.add(tsx_lang());
6418 let language_server_names = [
6419 "TypeScriptServer",
6420 "TailwindServer",
6421 "ESLintServer",
6422 "NoHoverCapabilitiesServer",
6423 ];
6424 let mut language_servers = [
6425 language_registry.register_fake_lsp(
6426 "tsx",
6427 FakeLspAdapter {
6428 name: language_server_names[0],
6429 capabilities: lsp::ServerCapabilities {
6430 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6431 ..lsp::ServerCapabilities::default()
6432 },
6433 ..FakeLspAdapter::default()
6434 },
6435 ),
6436 language_registry.register_fake_lsp(
6437 "tsx",
6438 FakeLspAdapter {
6439 name: language_server_names[1],
6440 capabilities: lsp::ServerCapabilities {
6441 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6442 ..lsp::ServerCapabilities::default()
6443 },
6444 ..FakeLspAdapter::default()
6445 },
6446 ),
6447 language_registry.register_fake_lsp(
6448 "tsx",
6449 FakeLspAdapter {
6450 name: language_server_names[2],
6451 capabilities: lsp::ServerCapabilities {
6452 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6453 ..lsp::ServerCapabilities::default()
6454 },
6455 ..FakeLspAdapter::default()
6456 },
6457 ),
6458 language_registry.register_fake_lsp(
6459 "tsx",
6460 FakeLspAdapter {
6461 name: language_server_names[3],
6462 capabilities: lsp::ServerCapabilities {
6463 hover_provider: None,
6464 ..lsp::ServerCapabilities::default()
6465 },
6466 ..FakeLspAdapter::default()
6467 },
6468 ),
6469 ];
6470
6471 let (buffer, _handle) = project
6472 .update(cx, |p, cx| {
6473 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6474 })
6475 .await
6476 .unwrap();
6477 cx.executor().run_until_parked();
6478
6479 let mut servers_with_hover_requests = HashMap::default();
6480 for i in 0..language_server_names.len() {
6481 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6482 panic!(
6483 "Failed to get language server #{i} with name {}",
6484 &language_server_names[i]
6485 )
6486 });
6487 let new_server_name = new_server.server.name();
6488 assert!(
6489 !servers_with_hover_requests.contains_key(&new_server_name),
6490 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6491 );
6492 match new_server_name.as_ref() {
6493 "TailwindServer" | "TypeScriptServer" => {
6494 servers_with_hover_requests.insert(
6495 new_server_name.clone(),
6496 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6497 move |_, _| {
6498 let name = new_server_name.clone();
6499 async move {
6500 Ok(Some(lsp::Hover {
6501 contents: lsp::HoverContents::Scalar(
6502 lsp::MarkedString::String(format!("{name} hover")),
6503 ),
6504 range: None,
6505 }))
6506 }
6507 },
6508 ),
6509 );
6510 }
6511 "ESLintServer" => {
6512 servers_with_hover_requests.insert(
6513 new_server_name,
6514 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6515 |_, _| async move { Ok(None) },
6516 ),
6517 );
6518 }
6519 "NoHoverCapabilitiesServer" => {
6520 let _never_handled = new_server
6521 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6522 panic!(
6523 "Should not call for hovers server with no corresponding capabilities"
6524 )
6525 });
6526 }
6527 unexpected => panic!("Unexpected server name: {unexpected}"),
6528 }
6529 }
6530
6531 let hover_task = project.update(cx, |project, cx| {
6532 project.hover(&buffer, Point::new(0, 0), cx)
6533 });
6534 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6535 |mut hover_request| async move {
6536 hover_request
6537 .next()
6538 .await
6539 .expect("All hover requests should have been triggered")
6540 },
6541 ))
6542 .await;
6543 assert_eq!(
6544 vec!["TailwindServer hover", "TypeScriptServer hover"],
6545 hover_task
6546 .await
6547 .into_iter()
6548 .flatten()
6549 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6550 .sorted()
6551 .collect::<Vec<_>>(),
6552 "Should receive hover responses from all related servers with hover capabilities"
6553 );
6554}
6555
6556#[gpui::test]
6557async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6558 init_test(cx);
6559
6560 let fs = FakeFs::new(cx.executor());
6561 fs.insert_tree(
6562 path!("/dir"),
6563 json!({
6564 "a.ts": "a",
6565 }),
6566 )
6567 .await;
6568
6569 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6570
6571 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6572 language_registry.add(typescript_lang());
6573 let mut fake_language_servers = language_registry.register_fake_lsp(
6574 "TypeScript",
6575 FakeLspAdapter {
6576 capabilities: lsp::ServerCapabilities {
6577 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6578 ..lsp::ServerCapabilities::default()
6579 },
6580 ..FakeLspAdapter::default()
6581 },
6582 );
6583
6584 let (buffer, _handle) = project
6585 .update(cx, |p, cx| {
6586 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6587 })
6588 .await
6589 .unwrap();
6590 cx.executor().run_until_parked();
6591
6592 let fake_server = fake_language_servers
6593 .next()
6594 .await
6595 .expect("failed to get the language server");
6596
6597 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6598 move |_, _| async move {
6599 Ok(Some(lsp::Hover {
6600 contents: lsp::HoverContents::Array(vec![
6601 lsp::MarkedString::String("".to_string()),
6602 lsp::MarkedString::String(" ".to_string()),
6603 lsp::MarkedString::String("\n\n\n".to_string()),
6604 ]),
6605 range: None,
6606 }))
6607 },
6608 );
6609
6610 let hover_task = project.update(cx, |project, cx| {
6611 project.hover(&buffer, Point::new(0, 0), cx)
6612 });
6613 let () = request_handled
6614 .next()
6615 .await
6616 .expect("All hover requests should have been triggered");
6617 assert_eq!(
6618 Vec::<String>::new(),
6619 hover_task
6620 .await
6621 .into_iter()
6622 .flatten()
6623 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6624 .sorted()
6625 .collect::<Vec<_>>(),
6626 "Empty hover parts should be ignored"
6627 );
6628}
6629
6630#[gpui::test]
6631async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6632 init_test(cx);
6633
6634 let fs = FakeFs::new(cx.executor());
6635 fs.insert_tree(
6636 path!("/dir"),
6637 json!({
6638 "a.ts": "a",
6639 }),
6640 )
6641 .await;
6642
6643 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6644
6645 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6646 language_registry.add(typescript_lang());
6647 let mut fake_language_servers = language_registry.register_fake_lsp(
6648 "TypeScript",
6649 FakeLspAdapter {
6650 capabilities: lsp::ServerCapabilities {
6651 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6652 ..lsp::ServerCapabilities::default()
6653 },
6654 ..FakeLspAdapter::default()
6655 },
6656 );
6657
6658 let (buffer, _handle) = project
6659 .update(cx, |p, cx| {
6660 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6661 })
6662 .await
6663 .unwrap();
6664 cx.executor().run_until_parked();
6665
6666 let fake_server = fake_language_servers
6667 .next()
6668 .await
6669 .expect("failed to get the language server");
6670
6671 let mut request_handled = fake_server
6672 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6673 Ok(Some(vec![
6674 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6675 title: "organize imports".to_string(),
6676 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6677 ..lsp::CodeAction::default()
6678 }),
6679 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6680 title: "fix code".to_string(),
6681 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6682 ..lsp::CodeAction::default()
6683 }),
6684 ]))
6685 });
6686
6687 let code_actions_task = project.update(cx, |project, cx| {
6688 project.code_actions(
6689 &buffer,
6690 0..buffer.read(cx).len(),
6691 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6692 cx,
6693 )
6694 });
6695
6696 let () = request_handled
6697 .next()
6698 .await
6699 .expect("The code action request should have been triggered");
6700
6701 let code_actions = code_actions_task.await.unwrap().unwrap();
6702 assert_eq!(code_actions.len(), 1);
6703 assert_eq!(
6704 code_actions[0].lsp_action.action_kind(),
6705 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6706 );
6707}
6708
6709#[gpui::test]
6710async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6711 init_test(cx);
6712
6713 let fs = FakeFs::new(cx.executor());
6714 fs.insert_tree(
6715 path!("/dir"),
6716 json!({
6717 "a.tsx": "a",
6718 }),
6719 )
6720 .await;
6721
6722 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6723
6724 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6725 language_registry.add(tsx_lang());
6726 let language_server_names = [
6727 "TypeScriptServer",
6728 "TailwindServer",
6729 "ESLintServer",
6730 "NoActionsCapabilitiesServer",
6731 ];
6732
6733 let mut language_server_rxs = [
6734 language_registry.register_fake_lsp(
6735 "tsx",
6736 FakeLspAdapter {
6737 name: language_server_names[0],
6738 capabilities: lsp::ServerCapabilities {
6739 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6740 ..lsp::ServerCapabilities::default()
6741 },
6742 ..FakeLspAdapter::default()
6743 },
6744 ),
6745 language_registry.register_fake_lsp(
6746 "tsx",
6747 FakeLspAdapter {
6748 name: language_server_names[1],
6749 capabilities: lsp::ServerCapabilities {
6750 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6751 ..lsp::ServerCapabilities::default()
6752 },
6753 ..FakeLspAdapter::default()
6754 },
6755 ),
6756 language_registry.register_fake_lsp(
6757 "tsx",
6758 FakeLspAdapter {
6759 name: language_server_names[2],
6760 capabilities: lsp::ServerCapabilities {
6761 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6762 ..lsp::ServerCapabilities::default()
6763 },
6764 ..FakeLspAdapter::default()
6765 },
6766 ),
6767 language_registry.register_fake_lsp(
6768 "tsx",
6769 FakeLspAdapter {
6770 name: language_server_names[3],
6771 capabilities: lsp::ServerCapabilities {
6772 code_action_provider: None,
6773 ..lsp::ServerCapabilities::default()
6774 },
6775 ..FakeLspAdapter::default()
6776 },
6777 ),
6778 ];
6779
6780 let (buffer, _handle) = project
6781 .update(cx, |p, cx| {
6782 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6783 })
6784 .await
6785 .unwrap();
6786 cx.executor().run_until_parked();
6787
6788 let mut servers_with_actions_requests = HashMap::default();
6789 for i in 0..language_server_names.len() {
6790 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6791 panic!(
6792 "Failed to get language server #{i} with name {}",
6793 &language_server_names[i]
6794 )
6795 });
6796 let new_server_name = new_server.server.name();
6797
6798 assert!(
6799 !servers_with_actions_requests.contains_key(&new_server_name),
6800 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6801 );
6802 match new_server_name.0.as_ref() {
6803 "TailwindServer" | "TypeScriptServer" => {
6804 servers_with_actions_requests.insert(
6805 new_server_name.clone(),
6806 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6807 move |_, _| {
6808 let name = new_server_name.clone();
6809 async move {
6810 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6811 lsp::CodeAction {
6812 title: format!("{name} code action"),
6813 ..lsp::CodeAction::default()
6814 },
6815 )]))
6816 }
6817 },
6818 ),
6819 );
6820 }
6821 "ESLintServer" => {
6822 servers_with_actions_requests.insert(
6823 new_server_name,
6824 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6825 |_, _| async move { Ok(None) },
6826 ),
6827 );
6828 }
6829 "NoActionsCapabilitiesServer" => {
6830 let _never_handled = new_server
6831 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6832 panic!(
6833 "Should not call for code actions server with no corresponding capabilities"
6834 )
6835 });
6836 }
6837 unexpected => panic!("Unexpected server name: {unexpected}"),
6838 }
6839 }
6840
6841 let code_actions_task = project.update(cx, |project, cx| {
6842 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6843 });
6844
6845 // cx.run_until_parked();
6846 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6847 |mut code_actions_request| async move {
6848 code_actions_request
6849 .next()
6850 .await
6851 .expect("All code actions requests should have been triggered")
6852 },
6853 ))
6854 .await;
6855 assert_eq!(
6856 vec!["TailwindServer code action", "TypeScriptServer code action"],
6857 code_actions_task
6858 .await
6859 .unwrap()
6860 .unwrap()
6861 .into_iter()
6862 .map(|code_action| code_action.lsp_action.title().to_owned())
6863 .sorted()
6864 .collect::<Vec<_>>(),
6865 "Should receive code actions responses from all related servers with hover capabilities"
6866 );
6867}
6868
6869#[gpui::test]
6870async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6871 init_test(cx);
6872
6873 let fs = FakeFs::new(cx.executor());
6874 fs.insert_tree(
6875 "/dir",
6876 json!({
6877 "a.rs": "let a = 1;",
6878 "b.rs": "let b = 2;",
6879 "c.rs": "let c = 2;",
6880 }),
6881 )
6882 .await;
6883
6884 let project = Project::test(
6885 fs,
6886 [
6887 "/dir/a.rs".as_ref(),
6888 "/dir/b.rs".as_ref(),
6889 "/dir/c.rs".as_ref(),
6890 ],
6891 cx,
6892 )
6893 .await;
6894
6895 // check the initial state and get the worktrees
6896 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6897 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6898 assert_eq!(worktrees.len(), 3);
6899
6900 let worktree_a = worktrees[0].read(cx);
6901 let worktree_b = worktrees[1].read(cx);
6902 let worktree_c = worktrees[2].read(cx);
6903
6904 // check they start in the right order
6905 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6906 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6907 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6908
6909 (
6910 worktrees[0].clone(),
6911 worktrees[1].clone(),
6912 worktrees[2].clone(),
6913 )
6914 });
6915
6916 // move first worktree to after the second
6917 // [a, b, c] -> [b, a, c]
6918 project
6919 .update(cx, |project, cx| {
6920 let first = worktree_a.read(cx);
6921 let second = worktree_b.read(cx);
6922 project.move_worktree(first.id(), second.id(), cx)
6923 })
6924 .expect("moving first after second");
6925
6926 // check the state after moving
6927 project.update(cx, |project, cx| {
6928 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6929 assert_eq!(worktrees.len(), 3);
6930
6931 let first = worktrees[0].read(cx);
6932 let second = worktrees[1].read(cx);
6933 let third = worktrees[2].read(cx);
6934
6935 // check they are now in the right order
6936 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6937 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6938 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6939 });
6940
6941 // move the second worktree to before the first
6942 // [b, a, c] -> [a, b, c]
6943 project
6944 .update(cx, |project, cx| {
6945 let second = worktree_a.read(cx);
6946 let first = worktree_b.read(cx);
6947 project.move_worktree(first.id(), second.id(), cx)
6948 })
6949 .expect("moving second before first");
6950
6951 // check the state after moving
6952 project.update(cx, |project, cx| {
6953 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6954 assert_eq!(worktrees.len(), 3);
6955
6956 let first = worktrees[0].read(cx);
6957 let second = worktrees[1].read(cx);
6958 let third = worktrees[2].read(cx);
6959
6960 // check they are now in the right order
6961 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6962 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6963 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6964 });
6965
6966 // move the second worktree to after the third
6967 // [a, b, c] -> [a, c, b]
6968 project
6969 .update(cx, |project, cx| {
6970 let second = worktree_b.read(cx);
6971 let third = worktree_c.read(cx);
6972 project.move_worktree(second.id(), third.id(), cx)
6973 })
6974 .expect("moving second after third");
6975
6976 // check the state after moving
6977 project.update(cx, |project, cx| {
6978 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6979 assert_eq!(worktrees.len(), 3);
6980
6981 let first = worktrees[0].read(cx);
6982 let second = worktrees[1].read(cx);
6983 let third = worktrees[2].read(cx);
6984
6985 // check they are now in the right order
6986 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6987 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6988 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6989 });
6990
6991 // move the third worktree to before the second
6992 // [a, c, b] -> [a, b, c]
6993 project
6994 .update(cx, |project, cx| {
6995 let third = worktree_c.read(cx);
6996 let second = worktree_b.read(cx);
6997 project.move_worktree(third.id(), second.id(), cx)
6998 })
6999 .expect("moving third before second");
7000
7001 // check the state after moving
7002 project.update(cx, |project, cx| {
7003 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7004 assert_eq!(worktrees.len(), 3);
7005
7006 let first = worktrees[0].read(cx);
7007 let second = worktrees[1].read(cx);
7008 let third = worktrees[2].read(cx);
7009
7010 // check they are now in the right order
7011 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7012 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7013 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7014 });
7015
7016 // move the first worktree to after the third
7017 // [a, b, c] -> [b, c, a]
7018 project
7019 .update(cx, |project, cx| {
7020 let first = worktree_a.read(cx);
7021 let third = worktree_c.read(cx);
7022 project.move_worktree(first.id(), third.id(), cx)
7023 })
7024 .expect("moving first after third");
7025
7026 // check the state after moving
7027 project.update(cx, |project, cx| {
7028 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7029 assert_eq!(worktrees.len(), 3);
7030
7031 let first = worktrees[0].read(cx);
7032 let second = worktrees[1].read(cx);
7033 let third = worktrees[2].read(cx);
7034
7035 // check they are now in the right order
7036 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7037 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7038 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7039 });
7040
7041 // move the third worktree to before the first
7042 // [b, c, a] -> [a, b, c]
7043 project
7044 .update(cx, |project, cx| {
7045 let third = worktree_a.read(cx);
7046 let first = worktree_b.read(cx);
7047 project.move_worktree(third.id(), first.id(), cx)
7048 })
7049 .expect("moving third before first");
7050
7051 // check the state after moving
7052 project.update(cx, |project, cx| {
7053 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7054 assert_eq!(worktrees.len(), 3);
7055
7056 let first = worktrees[0].read(cx);
7057 let second = worktrees[1].read(cx);
7058 let third = worktrees[2].read(cx);
7059
7060 // check they are now in the right order
7061 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7062 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7063 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7064 });
7065}
7066
7067#[gpui::test]
7068async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7069 init_test(cx);
7070
7071 let staged_contents = r#"
7072 fn main() {
7073 println!("hello world");
7074 }
7075 "#
7076 .unindent();
7077 let file_contents = r#"
7078 // print goodbye
7079 fn main() {
7080 println!("goodbye world");
7081 }
7082 "#
7083 .unindent();
7084
7085 let fs = FakeFs::new(cx.background_executor.clone());
7086 fs.insert_tree(
7087 "/dir",
7088 json!({
7089 ".git": {},
7090 "src": {
7091 "main.rs": file_contents,
7092 }
7093 }),
7094 )
7095 .await;
7096
7097 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7098
7099 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7100
7101 let buffer = project
7102 .update(cx, |project, cx| {
7103 project.open_local_buffer("/dir/src/main.rs", cx)
7104 })
7105 .await
7106 .unwrap();
7107 let unstaged_diff = project
7108 .update(cx, |project, cx| {
7109 project.open_unstaged_diff(buffer.clone(), cx)
7110 })
7111 .await
7112 .unwrap();
7113
7114 cx.run_until_parked();
7115 unstaged_diff.update(cx, |unstaged_diff, cx| {
7116 let snapshot = buffer.read(cx).snapshot();
7117 assert_hunks(
7118 unstaged_diff.hunks(&snapshot, cx),
7119 &snapshot,
7120 &unstaged_diff.base_text_string().unwrap(),
7121 &[
7122 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7123 (
7124 2..3,
7125 " println!(\"hello world\");\n",
7126 " println!(\"goodbye world\");\n",
7127 DiffHunkStatus::modified_none(),
7128 ),
7129 ],
7130 );
7131 });
7132
7133 let staged_contents = r#"
7134 // print goodbye
7135 fn main() {
7136 }
7137 "#
7138 .unindent();
7139
7140 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7141
7142 cx.run_until_parked();
7143 unstaged_diff.update(cx, |unstaged_diff, cx| {
7144 let snapshot = buffer.read(cx).snapshot();
7145 assert_hunks(
7146 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7147 &snapshot,
7148 &unstaged_diff.base_text().text(),
7149 &[(
7150 2..3,
7151 "",
7152 " println!(\"goodbye world\");\n",
7153 DiffHunkStatus::added_none(),
7154 )],
7155 );
7156 });
7157}
7158
7159#[gpui::test]
7160async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7161 init_test(cx);
7162
7163 let committed_contents = r#"
7164 fn main() {
7165 println!("hello world");
7166 }
7167 "#
7168 .unindent();
7169 let staged_contents = r#"
7170 fn main() {
7171 println!("goodbye world");
7172 }
7173 "#
7174 .unindent();
7175 let file_contents = r#"
7176 // print goodbye
7177 fn main() {
7178 println!("goodbye world");
7179 }
7180 "#
7181 .unindent();
7182
7183 let fs = FakeFs::new(cx.background_executor.clone());
7184 fs.insert_tree(
7185 "/dir",
7186 json!({
7187 ".git": {},
7188 "src": {
7189 "modification.rs": file_contents,
7190 }
7191 }),
7192 )
7193 .await;
7194
7195 fs.set_head_for_repo(
7196 Path::new("/dir/.git"),
7197 &[
7198 ("src/modification.rs", committed_contents),
7199 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7200 ],
7201 "deadbeef",
7202 );
7203 fs.set_index_for_repo(
7204 Path::new("/dir/.git"),
7205 &[
7206 ("src/modification.rs", staged_contents),
7207 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7208 ],
7209 );
7210
7211 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7212 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7213 let language = rust_lang();
7214 language_registry.add(language.clone());
7215
7216 let buffer_1 = project
7217 .update(cx, |project, cx| {
7218 project.open_local_buffer("/dir/src/modification.rs", cx)
7219 })
7220 .await
7221 .unwrap();
7222 let diff_1 = project
7223 .update(cx, |project, cx| {
7224 project.open_uncommitted_diff(buffer_1.clone(), cx)
7225 })
7226 .await
7227 .unwrap();
7228 diff_1.read_with(cx, |diff, _| {
7229 assert_eq!(diff.base_text().language().cloned(), Some(language))
7230 });
7231 cx.run_until_parked();
7232 diff_1.update(cx, |diff, cx| {
7233 let snapshot = buffer_1.read(cx).snapshot();
7234 assert_hunks(
7235 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7236 &snapshot,
7237 &diff.base_text_string().unwrap(),
7238 &[
7239 (
7240 0..1,
7241 "",
7242 "// print goodbye\n",
7243 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7244 ),
7245 (
7246 2..3,
7247 " println!(\"hello world\");\n",
7248 " println!(\"goodbye world\");\n",
7249 DiffHunkStatus::modified_none(),
7250 ),
7251 ],
7252 );
7253 });
7254
7255 // Reset HEAD to a version that differs from both the buffer and the index.
7256 let committed_contents = r#"
7257 // print goodbye
7258 fn main() {
7259 }
7260 "#
7261 .unindent();
7262 fs.set_head_for_repo(
7263 Path::new("/dir/.git"),
7264 &[
7265 ("src/modification.rs", committed_contents.clone()),
7266 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7267 ],
7268 "deadbeef",
7269 );
7270
7271 // Buffer now has an unstaged hunk.
7272 cx.run_until_parked();
7273 diff_1.update(cx, |diff, cx| {
7274 let snapshot = buffer_1.read(cx).snapshot();
7275 assert_hunks(
7276 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7277 &snapshot,
7278 &diff.base_text().text(),
7279 &[(
7280 2..3,
7281 "",
7282 " println!(\"goodbye world\");\n",
7283 DiffHunkStatus::added_none(),
7284 )],
7285 );
7286 });
7287
7288 // Open a buffer for a file that's been deleted.
7289 let buffer_2 = project
7290 .update(cx, |project, cx| {
7291 project.open_local_buffer("/dir/src/deletion.rs", cx)
7292 })
7293 .await
7294 .unwrap();
7295 let diff_2 = project
7296 .update(cx, |project, cx| {
7297 project.open_uncommitted_diff(buffer_2.clone(), cx)
7298 })
7299 .await
7300 .unwrap();
7301 cx.run_until_parked();
7302 diff_2.update(cx, |diff, cx| {
7303 let snapshot = buffer_2.read(cx).snapshot();
7304 assert_hunks(
7305 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7306 &snapshot,
7307 &diff.base_text_string().unwrap(),
7308 &[(
7309 0..0,
7310 "// the-deleted-contents\n",
7311 "",
7312 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7313 )],
7314 );
7315 });
7316
7317 // Stage the deletion of this file
7318 fs.set_index_for_repo(
7319 Path::new("/dir/.git"),
7320 &[("src/modification.rs", committed_contents.clone())],
7321 );
7322 cx.run_until_parked();
7323 diff_2.update(cx, |diff, cx| {
7324 let snapshot = buffer_2.read(cx).snapshot();
7325 assert_hunks(
7326 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7327 &snapshot,
7328 &diff.base_text_string().unwrap(),
7329 &[(
7330 0..0,
7331 "// the-deleted-contents\n",
7332 "",
7333 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7334 )],
7335 );
7336 });
7337}
7338
7339#[gpui::test]
7340async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7341 use DiffHunkSecondaryStatus::*;
7342 init_test(cx);
7343
7344 let committed_contents = r#"
7345 zero
7346 one
7347 two
7348 three
7349 four
7350 five
7351 "#
7352 .unindent();
7353 let file_contents = r#"
7354 one
7355 TWO
7356 three
7357 FOUR
7358 five
7359 "#
7360 .unindent();
7361
7362 let fs = FakeFs::new(cx.background_executor.clone());
7363 fs.insert_tree(
7364 "/dir",
7365 json!({
7366 ".git": {},
7367 "file.txt": file_contents.clone()
7368 }),
7369 )
7370 .await;
7371
7372 fs.set_head_and_index_for_repo(
7373 path!("/dir/.git").as_ref(),
7374 &[("file.txt", committed_contents.clone())],
7375 );
7376
7377 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7378
7379 let buffer = project
7380 .update(cx, |project, cx| {
7381 project.open_local_buffer("/dir/file.txt", cx)
7382 })
7383 .await
7384 .unwrap();
7385 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7386 let uncommitted_diff = project
7387 .update(cx, |project, cx| {
7388 project.open_uncommitted_diff(buffer.clone(), cx)
7389 })
7390 .await
7391 .unwrap();
7392 let mut diff_events = cx.events(&uncommitted_diff);
7393
7394 // The hunks are initially unstaged.
7395 uncommitted_diff.read_with(cx, |diff, cx| {
7396 assert_hunks(
7397 diff.hunks(&snapshot, cx),
7398 &snapshot,
7399 &diff.base_text_string().unwrap(),
7400 &[
7401 (
7402 0..0,
7403 "zero\n",
7404 "",
7405 DiffHunkStatus::deleted(HasSecondaryHunk),
7406 ),
7407 (
7408 1..2,
7409 "two\n",
7410 "TWO\n",
7411 DiffHunkStatus::modified(HasSecondaryHunk),
7412 ),
7413 (
7414 3..4,
7415 "four\n",
7416 "FOUR\n",
7417 DiffHunkStatus::modified(HasSecondaryHunk),
7418 ),
7419 ],
7420 );
7421 });
7422
7423 // Stage a hunk. It appears as optimistically staged.
7424 uncommitted_diff.update(cx, |diff, cx| {
7425 let range =
7426 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7427 let hunks = diff
7428 .hunks_intersecting_range(range, &snapshot, cx)
7429 .collect::<Vec<_>>();
7430 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7431
7432 assert_hunks(
7433 diff.hunks(&snapshot, cx),
7434 &snapshot,
7435 &diff.base_text_string().unwrap(),
7436 &[
7437 (
7438 0..0,
7439 "zero\n",
7440 "",
7441 DiffHunkStatus::deleted(HasSecondaryHunk),
7442 ),
7443 (
7444 1..2,
7445 "two\n",
7446 "TWO\n",
7447 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7448 ),
7449 (
7450 3..4,
7451 "four\n",
7452 "FOUR\n",
7453 DiffHunkStatus::modified(HasSecondaryHunk),
7454 ),
7455 ],
7456 );
7457 });
7458
7459 // The diff emits a change event for the range of the staged hunk.
7460 assert!(matches!(
7461 diff_events.next().await.unwrap(),
7462 BufferDiffEvent::HunksStagedOrUnstaged(_)
7463 ));
7464 let event = diff_events.next().await.unwrap();
7465 if let BufferDiffEvent::DiffChanged {
7466 changed_range: Some(changed_range),
7467 } = event
7468 {
7469 let changed_range = changed_range.to_point(&snapshot);
7470 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7471 } else {
7472 panic!("Unexpected event {event:?}");
7473 }
7474
7475 // When the write to the index completes, it appears as staged.
7476 cx.run_until_parked();
7477 uncommitted_diff.update(cx, |diff, cx| {
7478 assert_hunks(
7479 diff.hunks(&snapshot, cx),
7480 &snapshot,
7481 &diff.base_text_string().unwrap(),
7482 &[
7483 (
7484 0..0,
7485 "zero\n",
7486 "",
7487 DiffHunkStatus::deleted(HasSecondaryHunk),
7488 ),
7489 (
7490 1..2,
7491 "two\n",
7492 "TWO\n",
7493 DiffHunkStatus::modified(NoSecondaryHunk),
7494 ),
7495 (
7496 3..4,
7497 "four\n",
7498 "FOUR\n",
7499 DiffHunkStatus::modified(HasSecondaryHunk),
7500 ),
7501 ],
7502 );
7503 });
7504
7505 // The diff emits a change event for the changed index text.
7506 let event = diff_events.next().await.unwrap();
7507 if let BufferDiffEvent::DiffChanged {
7508 changed_range: Some(changed_range),
7509 } = event
7510 {
7511 let changed_range = changed_range.to_point(&snapshot);
7512 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7513 } else {
7514 panic!("Unexpected event {event:?}");
7515 }
7516
7517 // Simulate a problem writing to the git index.
7518 fs.set_error_message_for_index_write(
7519 "/dir/.git".as_ref(),
7520 Some("failed to write git index".into()),
7521 );
7522
7523 // Stage another hunk.
7524 uncommitted_diff.update(cx, |diff, cx| {
7525 let range =
7526 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7527 let hunks = diff
7528 .hunks_intersecting_range(range, &snapshot, cx)
7529 .collect::<Vec<_>>();
7530 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7531
7532 assert_hunks(
7533 diff.hunks(&snapshot, cx),
7534 &snapshot,
7535 &diff.base_text_string().unwrap(),
7536 &[
7537 (
7538 0..0,
7539 "zero\n",
7540 "",
7541 DiffHunkStatus::deleted(HasSecondaryHunk),
7542 ),
7543 (
7544 1..2,
7545 "two\n",
7546 "TWO\n",
7547 DiffHunkStatus::modified(NoSecondaryHunk),
7548 ),
7549 (
7550 3..4,
7551 "four\n",
7552 "FOUR\n",
7553 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7554 ),
7555 ],
7556 );
7557 });
7558 assert!(matches!(
7559 diff_events.next().await.unwrap(),
7560 BufferDiffEvent::HunksStagedOrUnstaged(_)
7561 ));
7562 let event = diff_events.next().await.unwrap();
7563 if let BufferDiffEvent::DiffChanged {
7564 changed_range: Some(changed_range),
7565 } = event
7566 {
7567 let changed_range = changed_range.to_point(&snapshot);
7568 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7569 } else {
7570 panic!("Unexpected event {event:?}");
7571 }
7572
7573 // When the write fails, the hunk returns to being unstaged.
7574 cx.run_until_parked();
7575 uncommitted_diff.update(cx, |diff, cx| {
7576 assert_hunks(
7577 diff.hunks(&snapshot, cx),
7578 &snapshot,
7579 &diff.base_text_string().unwrap(),
7580 &[
7581 (
7582 0..0,
7583 "zero\n",
7584 "",
7585 DiffHunkStatus::deleted(HasSecondaryHunk),
7586 ),
7587 (
7588 1..2,
7589 "two\n",
7590 "TWO\n",
7591 DiffHunkStatus::modified(NoSecondaryHunk),
7592 ),
7593 (
7594 3..4,
7595 "four\n",
7596 "FOUR\n",
7597 DiffHunkStatus::modified(HasSecondaryHunk),
7598 ),
7599 ],
7600 );
7601 });
7602
7603 let event = diff_events.next().await.unwrap();
7604 if let BufferDiffEvent::DiffChanged {
7605 changed_range: Some(changed_range),
7606 } = event
7607 {
7608 let changed_range = changed_range.to_point(&snapshot);
7609 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7610 } else {
7611 panic!("Unexpected event {event:?}");
7612 }
7613
7614 // Allow writing to the git index to succeed again.
7615 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7616
7617 // Stage two hunks with separate operations.
7618 uncommitted_diff.update(cx, |diff, cx| {
7619 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7620 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7621 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7622 });
7623
7624 // Both staged hunks appear as pending.
7625 uncommitted_diff.update(cx, |diff, cx| {
7626 assert_hunks(
7627 diff.hunks(&snapshot, cx),
7628 &snapshot,
7629 &diff.base_text_string().unwrap(),
7630 &[
7631 (
7632 0..0,
7633 "zero\n",
7634 "",
7635 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7636 ),
7637 (
7638 1..2,
7639 "two\n",
7640 "TWO\n",
7641 DiffHunkStatus::modified(NoSecondaryHunk),
7642 ),
7643 (
7644 3..4,
7645 "four\n",
7646 "FOUR\n",
7647 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7648 ),
7649 ],
7650 );
7651 });
7652
7653 // Both staging operations take effect.
7654 cx.run_until_parked();
7655 uncommitted_diff.update(cx, |diff, cx| {
7656 assert_hunks(
7657 diff.hunks(&snapshot, cx),
7658 &snapshot,
7659 &diff.base_text_string().unwrap(),
7660 &[
7661 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7662 (
7663 1..2,
7664 "two\n",
7665 "TWO\n",
7666 DiffHunkStatus::modified(NoSecondaryHunk),
7667 ),
7668 (
7669 3..4,
7670 "four\n",
7671 "FOUR\n",
7672 DiffHunkStatus::modified(NoSecondaryHunk),
7673 ),
7674 ],
7675 );
7676 });
7677}
7678
7679#[gpui::test(seeds(340, 472))]
7680async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7681 use DiffHunkSecondaryStatus::*;
7682 init_test(cx);
7683
7684 let committed_contents = r#"
7685 zero
7686 one
7687 two
7688 three
7689 four
7690 five
7691 "#
7692 .unindent();
7693 let file_contents = r#"
7694 one
7695 TWO
7696 three
7697 FOUR
7698 five
7699 "#
7700 .unindent();
7701
7702 let fs = FakeFs::new(cx.background_executor.clone());
7703 fs.insert_tree(
7704 "/dir",
7705 json!({
7706 ".git": {},
7707 "file.txt": file_contents.clone()
7708 }),
7709 )
7710 .await;
7711
7712 fs.set_head_for_repo(
7713 "/dir/.git".as_ref(),
7714 &[("file.txt", committed_contents.clone())],
7715 "deadbeef",
7716 );
7717 fs.set_index_for_repo(
7718 "/dir/.git".as_ref(),
7719 &[("file.txt", committed_contents.clone())],
7720 );
7721
7722 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7723
7724 let buffer = project
7725 .update(cx, |project, cx| {
7726 project.open_local_buffer("/dir/file.txt", cx)
7727 })
7728 .await
7729 .unwrap();
7730 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7731 let uncommitted_diff = project
7732 .update(cx, |project, cx| {
7733 project.open_uncommitted_diff(buffer.clone(), cx)
7734 })
7735 .await
7736 .unwrap();
7737
7738 // The hunks are initially unstaged.
7739 uncommitted_diff.read_with(cx, |diff, cx| {
7740 assert_hunks(
7741 diff.hunks(&snapshot, cx),
7742 &snapshot,
7743 &diff.base_text_string().unwrap(),
7744 &[
7745 (
7746 0..0,
7747 "zero\n",
7748 "",
7749 DiffHunkStatus::deleted(HasSecondaryHunk),
7750 ),
7751 (
7752 1..2,
7753 "two\n",
7754 "TWO\n",
7755 DiffHunkStatus::modified(HasSecondaryHunk),
7756 ),
7757 (
7758 3..4,
7759 "four\n",
7760 "FOUR\n",
7761 DiffHunkStatus::modified(HasSecondaryHunk),
7762 ),
7763 ],
7764 );
7765 });
7766
7767 // Pause IO events
7768 fs.pause_events();
7769
7770 // Stage the first hunk.
7771 uncommitted_diff.update(cx, |diff, cx| {
7772 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7773 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7774 assert_hunks(
7775 diff.hunks(&snapshot, cx),
7776 &snapshot,
7777 &diff.base_text_string().unwrap(),
7778 &[
7779 (
7780 0..0,
7781 "zero\n",
7782 "",
7783 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7784 ),
7785 (
7786 1..2,
7787 "two\n",
7788 "TWO\n",
7789 DiffHunkStatus::modified(HasSecondaryHunk),
7790 ),
7791 (
7792 3..4,
7793 "four\n",
7794 "FOUR\n",
7795 DiffHunkStatus::modified(HasSecondaryHunk),
7796 ),
7797 ],
7798 );
7799 });
7800
7801 // Stage the second hunk *before* receiving the FS event for the first hunk.
7802 cx.run_until_parked();
7803 uncommitted_diff.update(cx, |diff, cx| {
7804 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7805 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7806 assert_hunks(
7807 diff.hunks(&snapshot, cx),
7808 &snapshot,
7809 &diff.base_text_string().unwrap(),
7810 &[
7811 (
7812 0..0,
7813 "zero\n",
7814 "",
7815 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7816 ),
7817 (
7818 1..2,
7819 "two\n",
7820 "TWO\n",
7821 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7822 ),
7823 (
7824 3..4,
7825 "four\n",
7826 "FOUR\n",
7827 DiffHunkStatus::modified(HasSecondaryHunk),
7828 ),
7829 ],
7830 );
7831 });
7832
7833 // Process the FS event for staging the first hunk (second event is still pending).
7834 fs.flush_events(1);
7835 cx.run_until_parked();
7836
7837 // Stage the third hunk before receiving the second FS event.
7838 uncommitted_diff.update(cx, |diff, cx| {
7839 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7840 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7841 });
7842
7843 // Wait for all remaining IO.
7844 cx.run_until_parked();
7845 fs.flush_events(fs.buffered_event_count());
7846
7847 // Now all hunks are staged.
7848 cx.run_until_parked();
7849 uncommitted_diff.update(cx, |diff, cx| {
7850 assert_hunks(
7851 diff.hunks(&snapshot, cx),
7852 &snapshot,
7853 &diff.base_text_string().unwrap(),
7854 &[
7855 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7856 (
7857 1..2,
7858 "two\n",
7859 "TWO\n",
7860 DiffHunkStatus::modified(NoSecondaryHunk),
7861 ),
7862 (
7863 3..4,
7864 "four\n",
7865 "FOUR\n",
7866 DiffHunkStatus::modified(NoSecondaryHunk),
7867 ),
7868 ],
7869 );
7870 });
7871}
7872
7873#[gpui::test(iterations = 25)]
7874async fn test_staging_random_hunks(
7875 mut rng: StdRng,
7876 executor: BackgroundExecutor,
7877 cx: &mut gpui::TestAppContext,
7878) {
7879 let operations = env::var("OPERATIONS")
7880 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7881 .unwrap_or(20);
7882
7883 // Try to induce races between diff recalculation and index writes.
7884 if rng.random_bool(0.5) {
7885 executor.deprioritize(*CALCULATE_DIFF_TASK);
7886 }
7887
7888 use DiffHunkSecondaryStatus::*;
7889 init_test(cx);
7890
7891 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7892 let index_text = committed_text.clone();
7893 let buffer_text = (0..30)
7894 .map(|i| match i % 5 {
7895 0 => format!("line {i} (modified)\n"),
7896 _ => format!("line {i}\n"),
7897 })
7898 .collect::<String>();
7899
7900 let fs = FakeFs::new(cx.background_executor.clone());
7901 fs.insert_tree(
7902 path!("/dir"),
7903 json!({
7904 ".git": {},
7905 "file.txt": buffer_text.clone()
7906 }),
7907 )
7908 .await;
7909 fs.set_head_for_repo(
7910 path!("/dir/.git").as_ref(),
7911 &[("file.txt", committed_text.clone())],
7912 "deadbeef",
7913 );
7914 fs.set_index_for_repo(
7915 path!("/dir/.git").as_ref(),
7916 &[("file.txt", index_text.clone())],
7917 );
7918 let repo = fs
7919 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7920 .unwrap();
7921
7922 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7923 let buffer = project
7924 .update(cx, |project, cx| {
7925 project.open_local_buffer(path!("/dir/file.txt"), cx)
7926 })
7927 .await
7928 .unwrap();
7929 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7930 let uncommitted_diff = project
7931 .update(cx, |project, cx| {
7932 project.open_uncommitted_diff(buffer.clone(), cx)
7933 })
7934 .await
7935 .unwrap();
7936
7937 let mut hunks =
7938 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7939 assert_eq!(hunks.len(), 6);
7940
7941 for _i in 0..operations {
7942 let hunk_ix = rng.random_range(0..hunks.len());
7943 let hunk = &mut hunks[hunk_ix];
7944 let row = hunk.range.start.row;
7945
7946 if hunk.status().has_secondary_hunk() {
7947 log::info!("staging hunk at {row}");
7948 uncommitted_diff.update(cx, |diff, cx| {
7949 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7950 });
7951 hunk.secondary_status = SecondaryHunkRemovalPending;
7952 } else {
7953 log::info!("unstaging hunk at {row}");
7954 uncommitted_diff.update(cx, |diff, cx| {
7955 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7956 });
7957 hunk.secondary_status = SecondaryHunkAdditionPending;
7958 }
7959
7960 for _ in 0..rng.random_range(0..10) {
7961 log::info!("yielding");
7962 cx.executor().simulate_random_delay().await;
7963 }
7964 }
7965
7966 cx.executor().run_until_parked();
7967
7968 for hunk in &mut hunks {
7969 if hunk.secondary_status == SecondaryHunkRemovalPending {
7970 hunk.secondary_status = NoSecondaryHunk;
7971 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7972 hunk.secondary_status = HasSecondaryHunk;
7973 }
7974 }
7975
7976 log::info!(
7977 "index text:\n{}",
7978 repo.load_index_text(rel_path("file.txt").into())
7979 .await
7980 .unwrap()
7981 );
7982
7983 uncommitted_diff.update(cx, |diff, cx| {
7984 let expected_hunks = hunks
7985 .iter()
7986 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7987 .collect::<Vec<_>>();
7988 let actual_hunks = diff
7989 .hunks(&snapshot, cx)
7990 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7991 .collect::<Vec<_>>();
7992 assert_eq!(actual_hunks, expected_hunks);
7993 });
7994}
7995
7996#[gpui::test]
7997async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7998 init_test(cx);
7999
8000 let committed_contents = r#"
8001 fn main() {
8002 println!("hello from HEAD");
8003 }
8004 "#
8005 .unindent();
8006 let file_contents = r#"
8007 fn main() {
8008 println!("hello from the working copy");
8009 }
8010 "#
8011 .unindent();
8012
8013 let fs = FakeFs::new(cx.background_executor.clone());
8014 fs.insert_tree(
8015 "/dir",
8016 json!({
8017 ".git": {},
8018 "src": {
8019 "main.rs": file_contents,
8020 }
8021 }),
8022 )
8023 .await;
8024
8025 fs.set_head_for_repo(
8026 Path::new("/dir/.git"),
8027 &[("src/main.rs", committed_contents.clone())],
8028 "deadbeef",
8029 );
8030 fs.set_index_for_repo(
8031 Path::new("/dir/.git"),
8032 &[("src/main.rs", committed_contents.clone())],
8033 );
8034
8035 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8036
8037 let buffer = project
8038 .update(cx, |project, cx| {
8039 project.open_local_buffer("/dir/src/main.rs", cx)
8040 })
8041 .await
8042 .unwrap();
8043 let uncommitted_diff = project
8044 .update(cx, |project, cx| {
8045 project.open_uncommitted_diff(buffer.clone(), cx)
8046 })
8047 .await
8048 .unwrap();
8049
8050 cx.run_until_parked();
8051 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8052 let snapshot = buffer.read(cx).snapshot();
8053 assert_hunks(
8054 uncommitted_diff.hunks(&snapshot, cx),
8055 &snapshot,
8056 &uncommitted_diff.base_text_string().unwrap(),
8057 &[(
8058 1..2,
8059 " println!(\"hello from HEAD\");\n",
8060 " println!(\"hello from the working copy\");\n",
8061 DiffHunkStatus {
8062 kind: DiffHunkStatusKind::Modified,
8063 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8064 },
8065 )],
8066 );
8067 });
8068}
8069
8070#[gpui::test]
8071async fn test_repository_and_path_for_project_path(
8072 background_executor: BackgroundExecutor,
8073 cx: &mut gpui::TestAppContext,
8074) {
8075 init_test(cx);
8076 let fs = FakeFs::new(background_executor);
8077 fs.insert_tree(
8078 path!("/root"),
8079 json!({
8080 "c.txt": "",
8081 "dir1": {
8082 ".git": {},
8083 "deps": {
8084 "dep1": {
8085 ".git": {},
8086 "src": {
8087 "a.txt": ""
8088 }
8089 }
8090 },
8091 "src": {
8092 "b.txt": ""
8093 }
8094 },
8095 }),
8096 )
8097 .await;
8098
8099 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8100 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8101 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8102 project
8103 .update(cx, |project, cx| project.git_scans_complete(cx))
8104 .await;
8105 cx.run_until_parked();
8106
8107 project.read_with(cx, |project, cx| {
8108 let git_store = project.git_store().read(cx);
8109 let pairs = [
8110 ("c.txt", None),
8111 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8112 (
8113 "dir1/deps/dep1/src/a.txt",
8114 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8115 ),
8116 ];
8117 let expected = pairs
8118 .iter()
8119 .map(|(path, result)| {
8120 (
8121 path,
8122 result.map(|(repo, repo_path)| {
8123 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8124 }),
8125 )
8126 })
8127 .collect::<Vec<_>>();
8128 let actual = pairs
8129 .iter()
8130 .map(|(path, _)| {
8131 let project_path = (tree_id, rel_path(path)).into();
8132 let result = maybe!({
8133 let (repo, repo_path) =
8134 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8135 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8136 });
8137 (path, result)
8138 })
8139 .collect::<Vec<_>>();
8140 pretty_assertions::assert_eq!(expected, actual);
8141 });
8142
8143 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8144 .await
8145 .unwrap();
8146 cx.run_until_parked();
8147
8148 project.read_with(cx, |project, cx| {
8149 let git_store = project.git_store().read(cx);
8150 assert_eq!(
8151 git_store.repository_and_path_for_project_path(
8152 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8153 cx
8154 ),
8155 None
8156 );
8157 });
8158}
8159
8160#[gpui::test]
8161async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8162 init_test(cx);
8163 let fs = FakeFs::new(cx.background_executor.clone());
8164 let home = paths::home_dir();
8165 fs.insert_tree(
8166 home,
8167 json!({
8168 ".git": {},
8169 "project": {
8170 "a.txt": "A"
8171 },
8172 }),
8173 )
8174 .await;
8175
8176 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8177 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8178 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8179
8180 project
8181 .update(cx, |project, cx| project.git_scans_complete(cx))
8182 .await;
8183 tree.flush_fs_events(cx).await;
8184
8185 project.read_with(cx, |project, cx| {
8186 let containing = project
8187 .git_store()
8188 .read(cx)
8189 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8190 assert!(containing.is_none());
8191 });
8192
8193 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8194 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8195 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8196 project
8197 .update(cx, |project, cx| project.git_scans_complete(cx))
8198 .await;
8199 tree.flush_fs_events(cx).await;
8200
8201 project.read_with(cx, |project, cx| {
8202 let containing = project
8203 .git_store()
8204 .read(cx)
8205 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8206 assert_eq!(
8207 containing
8208 .unwrap()
8209 .0
8210 .read(cx)
8211 .work_directory_abs_path
8212 .as_ref(),
8213 home,
8214 );
8215 });
8216}
8217
8218#[gpui::test]
8219async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8220 init_test(cx);
8221 cx.executor().allow_parking();
8222
8223 let root = TempTree::new(json!({
8224 "project": {
8225 "a.txt": "a", // Modified
8226 "b.txt": "bb", // Added
8227 "c.txt": "ccc", // Unchanged
8228 "d.txt": "dddd", // Deleted
8229 },
8230 }));
8231
8232 // Set up git repository before creating the project.
8233 let work_dir = root.path().join("project");
8234 let repo = git_init(work_dir.as_path());
8235 git_add("a.txt", &repo);
8236 git_add("c.txt", &repo);
8237 git_add("d.txt", &repo);
8238 git_commit("Initial commit", &repo);
8239 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8240 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8241
8242 let project = Project::test(
8243 Arc::new(RealFs::new(None, cx.executor())),
8244 [root.path()],
8245 cx,
8246 )
8247 .await;
8248
8249 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8250 tree.flush_fs_events(cx).await;
8251 project
8252 .update(cx, |project, cx| project.git_scans_complete(cx))
8253 .await;
8254 cx.executor().run_until_parked();
8255
8256 let repository = project.read_with(cx, |project, cx| {
8257 project.repositories(cx).values().next().unwrap().clone()
8258 });
8259
8260 // Check that the right git state is observed on startup
8261 repository.read_with(cx, |repository, _| {
8262 let entries = repository.cached_status().collect::<Vec<_>>();
8263 assert_eq!(
8264 entries,
8265 [
8266 StatusEntry {
8267 repo_path: repo_path("a.txt"),
8268 status: StatusCode::Modified.worktree(),
8269 },
8270 StatusEntry {
8271 repo_path: repo_path("b.txt"),
8272 status: FileStatus::Untracked,
8273 },
8274 StatusEntry {
8275 repo_path: repo_path("d.txt"),
8276 status: StatusCode::Deleted.worktree(),
8277 },
8278 ]
8279 );
8280 });
8281
8282 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8283
8284 tree.flush_fs_events(cx).await;
8285 project
8286 .update(cx, |project, cx| project.git_scans_complete(cx))
8287 .await;
8288 cx.executor().run_until_parked();
8289
8290 repository.read_with(cx, |repository, _| {
8291 let entries = repository.cached_status().collect::<Vec<_>>();
8292 assert_eq!(
8293 entries,
8294 [
8295 StatusEntry {
8296 repo_path: repo_path("a.txt"),
8297 status: StatusCode::Modified.worktree(),
8298 },
8299 StatusEntry {
8300 repo_path: repo_path("b.txt"),
8301 status: FileStatus::Untracked,
8302 },
8303 StatusEntry {
8304 repo_path: repo_path("c.txt"),
8305 status: StatusCode::Modified.worktree(),
8306 },
8307 StatusEntry {
8308 repo_path: repo_path("d.txt"),
8309 status: StatusCode::Deleted.worktree(),
8310 },
8311 ]
8312 );
8313 });
8314
8315 git_add("a.txt", &repo);
8316 git_add("c.txt", &repo);
8317 git_remove_index(Path::new("d.txt"), &repo);
8318 git_commit("Another commit", &repo);
8319 tree.flush_fs_events(cx).await;
8320 project
8321 .update(cx, |project, cx| project.git_scans_complete(cx))
8322 .await;
8323 cx.executor().run_until_parked();
8324
8325 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8326 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8327 tree.flush_fs_events(cx).await;
8328 project
8329 .update(cx, |project, cx| project.git_scans_complete(cx))
8330 .await;
8331 cx.executor().run_until_parked();
8332
8333 repository.read_with(cx, |repository, _cx| {
8334 let entries = repository.cached_status().collect::<Vec<_>>();
8335
8336 // Deleting an untracked entry, b.txt, should leave no status
8337 // a.txt was tracked, and so should have a status
8338 assert_eq!(
8339 entries,
8340 [StatusEntry {
8341 repo_path: repo_path("a.txt"),
8342 status: StatusCode::Deleted.worktree(),
8343 }]
8344 );
8345 });
8346}
8347
8348#[gpui::test]
8349async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8350 init_test(cx);
8351 cx.executor().allow_parking();
8352
8353 let root = TempTree::new(json!({
8354 "project": {
8355 "sub": {},
8356 "a.txt": "",
8357 },
8358 }));
8359
8360 let work_dir = root.path().join("project");
8361 let repo = git_init(work_dir.as_path());
8362 // a.txt exists in HEAD and the working copy but is deleted in the index.
8363 git_add("a.txt", &repo);
8364 git_commit("Initial commit", &repo);
8365 git_remove_index("a.txt".as_ref(), &repo);
8366 // `sub` is a nested git repository.
8367 let _sub = git_init(&work_dir.join("sub"));
8368
8369 let project = Project::test(
8370 Arc::new(RealFs::new(None, cx.executor())),
8371 [root.path()],
8372 cx,
8373 )
8374 .await;
8375
8376 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8377 tree.flush_fs_events(cx).await;
8378 project
8379 .update(cx, |project, cx| project.git_scans_complete(cx))
8380 .await;
8381 cx.executor().run_until_parked();
8382
8383 let repository = project.read_with(cx, |project, cx| {
8384 project
8385 .repositories(cx)
8386 .values()
8387 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8388 .unwrap()
8389 .clone()
8390 });
8391
8392 repository.read_with(cx, |repository, _cx| {
8393 let entries = repository.cached_status().collect::<Vec<_>>();
8394
8395 // `sub` doesn't appear in our computed statuses.
8396 // a.txt appears with a combined `DA` status.
8397 assert_eq!(
8398 entries,
8399 [StatusEntry {
8400 repo_path: repo_path("a.txt"),
8401 status: TrackedStatus {
8402 index_status: StatusCode::Deleted,
8403 worktree_status: StatusCode::Added
8404 }
8405 .into(),
8406 }]
8407 )
8408 });
8409}
8410
8411#[gpui::test]
8412async fn test_repository_subfolder_git_status(
8413 executor: gpui::BackgroundExecutor,
8414 cx: &mut gpui::TestAppContext,
8415) {
8416 init_test(cx);
8417
8418 let fs = FakeFs::new(executor);
8419 fs.insert_tree(
8420 path!("/root"),
8421 json!({
8422 "my-repo": {
8423 ".git": {},
8424 "a.txt": "a",
8425 "sub-folder-1": {
8426 "sub-folder-2": {
8427 "c.txt": "cc",
8428 "d": {
8429 "e.txt": "eee"
8430 }
8431 },
8432 }
8433 },
8434 }),
8435 )
8436 .await;
8437
8438 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8439 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8440
8441 fs.set_status_for_repo(
8442 path!("/root/my-repo/.git").as_ref(),
8443 &[(E_TXT, FileStatus::Untracked)],
8444 );
8445
8446 let project = Project::test(
8447 fs.clone(),
8448 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8449 cx,
8450 )
8451 .await;
8452
8453 project
8454 .update(cx, |project, cx| project.git_scans_complete(cx))
8455 .await;
8456 cx.run_until_parked();
8457
8458 let repository = project.read_with(cx, |project, cx| {
8459 project.repositories(cx).values().next().unwrap().clone()
8460 });
8461
8462 // Ensure that the git status is loaded correctly
8463 repository.read_with(cx, |repository, _cx| {
8464 assert_eq!(
8465 repository.work_directory_abs_path,
8466 Path::new(path!("/root/my-repo")).into()
8467 );
8468
8469 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8470 assert_eq!(
8471 repository
8472 .status_for_path(&repo_path(E_TXT))
8473 .unwrap()
8474 .status,
8475 FileStatus::Untracked
8476 );
8477 });
8478
8479 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8480 project
8481 .update(cx, |project, cx| project.git_scans_complete(cx))
8482 .await;
8483 cx.run_until_parked();
8484
8485 repository.read_with(cx, |repository, _cx| {
8486 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8487 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8488 });
8489}
8490
8491// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8492#[cfg(any())]
8493#[gpui::test]
8494async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8495 init_test(cx);
8496 cx.executor().allow_parking();
8497
8498 let root = TempTree::new(json!({
8499 "project": {
8500 "a.txt": "a",
8501 },
8502 }));
8503 let root_path = root.path();
8504
8505 let repo = git_init(&root_path.join("project"));
8506 git_add("a.txt", &repo);
8507 git_commit("init", &repo);
8508
8509 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8510
8511 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8512 tree.flush_fs_events(cx).await;
8513 project
8514 .update(cx, |project, cx| project.git_scans_complete(cx))
8515 .await;
8516 cx.executor().run_until_parked();
8517
8518 let repository = project.read_with(cx, |project, cx| {
8519 project.repositories(cx).values().next().unwrap().clone()
8520 });
8521
8522 git_branch("other-branch", &repo);
8523 git_checkout("refs/heads/other-branch", &repo);
8524 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8525 git_add("a.txt", &repo);
8526 git_commit("capitalize", &repo);
8527 let commit = repo
8528 .head()
8529 .expect("Failed to get HEAD")
8530 .peel_to_commit()
8531 .expect("HEAD is not a commit");
8532 git_checkout("refs/heads/main", &repo);
8533 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8534 git_add("a.txt", &repo);
8535 git_commit("improve letter", &repo);
8536 git_cherry_pick(&commit, &repo);
8537 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8538 .expect("No CHERRY_PICK_HEAD");
8539 pretty_assertions::assert_eq!(
8540 git_status(&repo),
8541 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8542 );
8543 tree.flush_fs_events(cx).await;
8544 project
8545 .update(cx, |project, cx| project.git_scans_complete(cx))
8546 .await;
8547 cx.executor().run_until_parked();
8548 let conflicts = repository.update(cx, |repository, _| {
8549 repository
8550 .merge_conflicts
8551 .iter()
8552 .cloned()
8553 .collect::<Vec<_>>()
8554 });
8555 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8556
8557 git_add("a.txt", &repo);
8558 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8559 git_commit("whatevs", &repo);
8560 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8561 .expect("Failed to remove CHERRY_PICK_HEAD");
8562 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8563 tree.flush_fs_events(cx).await;
8564 let conflicts = repository.update(cx, |repository, _| {
8565 repository
8566 .merge_conflicts
8567 .iter()
8568 .cloned()
8569 .collect::<Vec<_>>()
8570 });
8571 pretty_assertions::assert_eq!(conflicts, []);
8572}
8573
8574#[gpui::test]
8575async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8576 init_test(cx);
8577 let fs = FakeFs::new(cx.background_executor.clone());
8578 fs.insert_tree(
8579 path!("/root"),
8580 json!({
8581 ".git": {},
8582 ".gitignore": "*.txt\n",
8583 "a.xml": "<a></a>",
8584 "b.txt": "Some text"
8585 }),
8586 )
8587 .await;
8588
8589 fs.set_head_and_index_for_repo(
8590 path!("/root/.git").as_ref(),
8591 &[
8592 (".gitignore", "*.txt\n".into()),
8593 ("a.xml", "<a></a>".into()),
8594 ],
8595 );
8596
8597 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8598
8599 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8600 tree.flush_fs_events(cx).await;
8601 project
8602 .update(cx, |project, cx| project.git_scans_complete(cx))
8603 .await;
8604 cx.executor().run_until_parked();
8605
8606 let repository = project.read_with(cx, |project, cx| {
8607 project.repositories(cx).values().next().unwrap().clone()
8608 });
8609
8610 // One file is unmodified, the other is ignored.
8611 cx.read(|cx| {
8612 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8613 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8614 });
8615
8616 // Change the gitignore, and stage the newly non-ignored file.
8617 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8618 .await
8619 .unwrap();
8620 fs.set_index_for_repo(
8621 Path::new(path!("/root/.git")),
8622 &[
8623 (".gitignore", "*.txt\n".into()),
8624 ("a.xml", "<a></a>".into()),
8625 ("b.txt", "Some text".into()),
8626 ],
8627 );
8628
8629 cx.executor().run_until_parked();
8630 cx.read(|cx| {
8631 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8632 assert_entry_git_state(
8633 tree.read(cx),
8634 repository.read(cx),
8635 "b.txt",
8636 Some(StatusCode::Added),
8637 false,
8638 );
8639 });
8640}
8641
8642// NOTE:
8643// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8644// a directory which some program has already open.
8645// This is a limitation of the Windows.
8646// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8647// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8648#[gpui::test]
8649#[cfg_attr(target_os = "windows", ignore)]
8650async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8651 init_test(cx);
8652 cx.executor().allow_parking();
8653 let root = TempTree::new(json!({
8654 "projects": {
8655 "project1": {
8656 "a": "",
8657 "b": "",
8658 }
8659 },
8660
8661 }));
8662 let root_path = root.path();
8663
8664 let repo = git_init(&root_path.join("projects/project1"));
8665 git_add("a", &repo);
8666 git_commit("init", &repo);
8667 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8668
8669 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8670
8671 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8672 tree.flush_fs_events(cx).await;
8673 project
8674 .update(cx, |project, cx| project.git_scans_complete(cx))
8675 .await;
8676 cx.executor().run_until_parked();
8677
8678 let repository = project.read_with(cx, |project, cx| {
8679 project.repositories(cx).values().next().unwrap().clone()
8680 });
8681
8682 repository.read_with(cx, |repository, _| {
8683 assert_eq!(
8684 repository.work_directory_abs_path.as_ref(),
8685 root_path.join("projects/project1").as_path()
8686 );
8687 assert_eq!(
8688 repository
8689 .status_for_path(&repo_path("a"))
8690 .map(|entry| entry.status),
8691 Some(StatusCode::Modified.worktree()),
8692 );
8693 assert_eq!(
8694 repository
8695 .status_for_path(&repo_path("b"))
8696 .map(|entry| entry.status),
8697 Some(FileStatus::Untracked),
8698 );
8699 });
8700
8701 std::fs::rename(
8702 root_path.join("projects/project1"),
8703 root_path.join("projects/project2"),
8704 )
8705 .unwrap();
8706 tree.flush_fs_events(cx).await;
8707
8708 repository.read_with(cx, |repository, _| {
8709 assert_eq!(
8710 repository.work_directory_abs_path.as_ref(),
8711 root_path.join("projects/project2").as_path()
8712 );
8713 assert_eq!(
8714 repository.status_for_path(&repo_path("a")).unwrap().status,
8715 StatusCode::Modified.worktree(),
8716 );
8717 assert_eq!(
8718 repository.status_for_path(&repo_path("b")).unwrap().status,
8719 FileStatus::Untracked,
8720 );
8721 });
8722}
8723
8724// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8725// you can't rename a directory which some program has already open. This is a
8726// limitation of the Windows. See:
8727// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8728// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8729#[gpui::test]
8730#[cfg_attr(target_os = "windows", ignore)]
8731async fn test_file_status(cx: &mut gpui::TestAppContext) {
8732 init_test(cx);
8733 cx.executor().allow_parking();
8734 const IGNORE_RULE: &str = "**/target";
8735
8736 let root = TempTree::new(json!({
8737 "project": {
8738 "a.txt": "a",
8739 "b.txt": "bb",
8740 "c": {
8741 "d": {
8742 "e.txt": "eee"
8743 }
8744 },
8745 "f.txt": "ffff",
8746 "target": {
8747 "build_file": "???"
8748 },
8749 ".gitignore": IGNORE_RULE
8750 },
8751
8752 }));
8753 let root_path = root.path();
8754
8755 const A_TXT: &str = "a.txt";
8756 const B_TXT: &str = "b.txt";
8757 const E_TXT: &str = "c/d/e.txt";
8758 const F_TXT: &str = "f.txt";
8759 const DOTGITIGNORE: &str = ".gitignore";
8760 const BUILD_FILE: &str = "target/build_file";
8761
8762 // Set up git repository before creating the worktree.
8763 let work_dir = root.path().join("project");
8764 let mut repo = git_init(work_dir.as_path());
8765 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8766 git_add(A_TXT, &repo);
8767 git_add(E_TXT, &repo);
8768 git_add(DOTGITIGNORE, &repo);
8769 git_commit("Initial commit", &repo);
8770
8771 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8772
8773 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8774 tree.flush_fs_events(cx).await;
8775 project
8776 .update(cx, |project, cx| project.git_scans_complete(cx))
8777 .await;
8778 cx.executor().run_until_parked();
8779
8780 let repository = project.read_with(cx, |project, cx| {
8781 project.repositories(cx).values().next().unwrap().clone()
8782 });
8783
8784 // Check that the right git state is observed on startup
8785 repository.read_with(cx, |repository, _cx| {
8786 assert_eq!(
8787 repository.work_directory_abs_path.as_ref(),
8788 root_path.join("project").as_path()
8789 );
8790
8791 assert_eq!(
8792 repository
8793 .status_for_path(&repo_path(B_TXT))
8794 .unwrap()
8795 .status,
8796 FileStatus::Untracked,
8797 );
8798 assert_eq!(
8799 repository
8800 .status_for_path(&repo_path(F_TXT))
8801 .unwrap()
8802 .status,
8803 FileStatus::Untracked,
8804 );
8805 });
8806
8807 // Modify a file in the working copy.
8808 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8809 tree.flush_fs_events(cx).await;
8810 project
8811 .update(cx, |project, cx| project.git_scans_complete(cx))
8812 .await;
8813 cx.executor().run_until_parked();
8814
8815 // The worktree detects that the file's git status has changed.
8816 repository.read_with(cx, |repository, _| {
8817 assert_eq!(
8818 repository
8819 .status_for_path(&repo_path(A_TXT))
8820 .unwrap()
8821 .status,
8822 StatusCode::Modified.worktree(),
8823 );
8824 });
8825
8826 // Create a commit in the git repository.
8827 git_add(A_TXT, &repo);
8828 git_add(B_TXT, &repo);
8829 git_commit("Committing modified and added", &repo);
8830 tree.flush_fs_events(cx).await;
8831 project
8832 .update(cx, |project, cx| project.git_scans_complete(cx))
8833 .await;
8834 cx.executor().run_until_parked();
8835
8836 // The worktree detects that the files' git status have changed.
8837 repository.read_with(cx, |repository, _cx| {
8838 assert_eq!(
8839 repository
8840 .status_for_path(&repo_path(F_TXT))
8841 .unwrap()
8842 .status,
8843 FileStatus::Untracked,
8844 );
8845 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8846 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8847 });
8848
8849 // Modify files in the working copy and perform git operations on other files.
8850 git_reset(0, &repo);
8851 git_remove_index(Path::new(B_TXT), &repo);
8852 git_stash(&mut repo);
8853 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8854 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8855 tree.flush_fs_events(cx).await;
8856 project
8857 .update(cx, |project, cx| project.git_scans_complete(cx))
8858 .await;
8859 cx.executor().run_until_parked();
8860
8861 // Check that more complex repo changes are tracked
8862 repository.read_with(cx, |repository, _cx| {
8863 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8864 assert_eq!(
8865 repository
8866 .status_for_path(&repo_path(B_TXT))
8867 .unwrap()
8868 .status,
8869 FileStatus::Untracked,
8870 );
8871 assert_eq!(
8872 repository
8873 .status_for_path(&repo_path(E_TXT))
8874 .unwrap()
8875 .status,
8876 StatusCode::Modified.worktree(),
8877 );
8878 });
8879
8880 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8881 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8882 std::fs::write(
8883 work_dir.join(DOTGITIGNORE),
8884 [IGNORE_RULE, "f.txt"].join("\n"),
8885 )
8886 .unwrap();
8887
8888 git_add(Path::new(DOTGITIGNORE), &repo);
8889 git_commit("Committing modified git ignore", &repo);
8890
8891 tree.flush_fs_events(cx).await;
8892 cx.executor().run_until_parked();
8893
8894 let mut renamed_dir_name = "first_directory/second_directory";
8895 const RENAMED_FILE: &str = "rf.txt";
8896
8897 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8898 std::fs::write(
8899 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8900 "new-contents",
8901 )
8902 .unwrap();
8903
8904 tree.flush_fs_events(cx).await;
8905 project
8906 .update(cx, |project, cx| project.git_scans_complete(cx))
8907 .await;
8908 cx.executor().run_until_parked();
8909
8910 repository.read_with(cx, |repository, _cx| {
8911 assert_eq!(
8912 repository
8913 .status_for_path(
8914 &rel_path(renamed_dir_name)
8915 .join(rel_path(RENAMED_FILE))
8916 .into()
8917 )
8918 .unwrap()
8919 .status,
8920 FileStatus::Untracked,
8921 );
8922 });
8923
8924 renamed_dir_name = "new_first_directory/second_directory";
8925
8926 std::fs::rename(
8927 work_dir.join("first_directory"),
8928 work_dir.join("new_first_directory"),
8929 )
8930 .unwrap();
8931
8932 tree.flush_fs_events(cx).await;
8933 project
8934 .update(cx, |project, cx| project.git_scans_complete(cx))
8935 .await;
8936 cx.executor().run_until_parked();
8937
8938 repository.read_with(cx, |repository, _cx| {
8939 assert_eq!(
8940 repository
8941 .status_for_path(
8942 &rel_path(renamed_dir_name)
8943 .join(rel_path(RENAMED_FILE))
8944 .into()
8945 )
8946 .unwrap()
8947 .status,
8948 FileStatus::Untracked,
8949 );
8950 });
8951}
8952
8953#[gpui::test]
8954#[ignore]
8955async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8956 init_test(cx);
8957 cx.executor().allow_parking();
8958
8959 const IGNORE_RULE: &str = "**/target";
8960
8961 let root = TempTree::new(json!({
8962 "project": {
8963 "src": {
8964 "main.rs": "fn main() {}"
8965 },
8966 "target": {
8967 "debug": {
8968 "important_text.txt": "important text",
8969 },
8970 },
8971 ".gitignore": IGNORE_RULE
8972 },
8973
8974 }));
8975 let root_path = root.path();
8976
8977 // Set up git repository before creating the worktree.
8978 let work_dir = root.path().join("project");
8979 let repo = git_init(work_dir.as_path());
8980 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8981 git_add("src/main.rs", &repo);
8982 git_add(".gitignore", &repo);
8983 git_commit("Initial commit", &repo);
8984
8985 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8986 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8987 let project_events = Arc::new(Mutex::new(Vec::new()));
8988 project.update(cx, |project, cx| {
8989 let repo_events = repository_updates.clone();
8990 cx.subscribe(project.git_store(), move |_, _, e, _| {
8991 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8992 repo_events.lock().push(e.clone());
8993 }
8994 })
8995 .detach();
8996 let project_events = project_events.clone();
8997 cx.subscribe_self(move |_, e, _| {
8998 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8999 project_events.lock().extend(
9000 updates
9001 .iter()
9002 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9003 .filter(|(path, _)| path != "fs-event-sentinel"),
9004 );
9005 }
9006 })
9007 .detach();
9008 });
9009
9010 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9011 tree.flush_fs_events(cx).await;
9012 tree.update(cx, |tree, cx| {
9013 tree.load_file(
9014 rel_path("project/target/debug/important_text.txt"),
9015 None,
9016 false,
9017 true,
9018 None,
9019 cx,
9020 )
9021 })
9022 .await
9023 .unwrap();
9024 tree.update(cx, |tree, _| {
9025 assert_eq!(
9026 tree.entries(true, 0)
9027 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9028 .collect::<Vec<_>>(),
9029 vec![
9030 (rel_path(""), false),
9031 (rel_path("project/"), false),
9032 (rel_path("project/.gitignore"), false),
9033 (rel_path("project/src"), false),
9034 (rel_path("project/src/main.rs"), false),
9035 (rel_path("project/target"), true),
9036 (rel_path("project/target/debug"), true),
9037 (rel_path("project/target/debug/important_text.txt"), true),
9038 ]
9039 );
9040 });
9041
9042 assert_eq!(
9043 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9044 vec![
9045 RepositoryEvent::StatusesChanged { full_scan: true },
9046 RepositoryEvent::MergeHeadsChanged,
9047 ],
9048 "Initial worktree scan should produce a repo update event"
9049 );
9050 assert_eq!(
9051 project_events.lock().drain(..).collect::<Vec<_>>(),
9052 vec![
9053 ("project/target".to_string(), PathChange::Loaded),
9054 ("project/target/debug".to_string(), PathChange::Loaded),
9055 (
9056 "project/target/debug/important_text.txt".to_string(),
9057 PathChange::Loaded
9058 ),
9059 ],
9060 "Initial project changes should show that all not-ignored and all opened files are loaded"
9061 );
9062
9063 let deps_dir = work_dir.join("target").join("debug").join("deps");
9064 std::fs::create_dir_all(&deps_dir).unwrap();
9065 tree.flush_fs_events(cx).await;
9066 project
9067 .update(cx, |project, cx| project.git_scans_complete(cx))
9068 .await;
9069 cx.executor().run_until_parked();
9070 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9071 tree.flush_fs_events(cx).await;
9072 project
9073 .update(cx, |project, cx| project.git_scans_complete(cx))
9074 .await;
9075 cx.executor().run_until_parked();
9076 std::fs::remove_dir_all(&deps_dir).unwrap();
9077 tree.flush_fs_events(cx).await;
9078 project
9079 .update(cx, |project, cx| project.git_scans_complete(cx))
9080 .await;
9081 cx.executor().run_until_parked();
9082
9083 tree.update(cx, |tree, _| {
9084 assert_eq!(
9085 tree.entries(true, 0)
9086 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9087 .collect::<Vec<_>>(),
9088 vec![
9089 (rel_path(""), false),
9090 (rel_path("project/"), false),
9091 (rel_path("project/.gitignore"), false),
9092 (rel_path("project/src"), false),
9093 (rel_path("project/src/main.rs"), false),
9094 (rel_path("project/target"), true),
9095 (rel_path("project/target/debug"), true),
9096 (rel_path("project/target/debug/important_text.txt"), true),
9097 ],
9098 "No stray temp files should be left after the flycheck changes"
9099 );
9100 });
9101
9102 assert_eq!(
9103 repository_updates
9104 .lock()
9105 .iter()
9106 .cloned()
9107 .collect::<Vec<_>>(),
9108 Vec::new(),
9109 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9110 );
9111 assert_eq!(
9112 project_events.lock().as_slice(),
9113 vec![
9114 ("project/target/debug/deps".to_string(), PathChange::Added),
9115 ("project/target/debug/deps".to_string(), PathChange::Removed),
9116 ],
9117 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9118 No updates for more nested directories should happen as those are ignored",
9119 );
9120}
9121
9122#[gpui::test]
9123async fn test_odd_events_for_ignored_dirs(
9124 executor: BackgroundExecutor,
9125 cx: &mut gpui::TestAppContext,
9126) {
9127 init_test(cx);
9128 let fs = FakeFs::new(executor);
9129 fs.insert_tree(
9130 path!("/root"),
9131 json!({
9132 ".git": {},
9133 ".gitignore": "**/target/",
9134 "src": {
9135 "main.rs": "fn main() {}",
9136 },
9137 "target": {
9138 "debug": {
9139 "foo.txt": "foo",
9140 "deps": {}
9141 }
9142 }
9143 }),
9144 )
9145 .await;
9146 fs.set_head_and_index_for_repo(
9147 path!("/root/.git").as_ref(),
9148 &[
9149 (".gitignore", "**/target/".into()),
9150 ("src/main.rs", "fn main() {}".into()),
9151 ],
9152 );
9153
9154 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9155 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9156 let project_events = Arc::new(Mutex::new(Vec::new()));
9157 project.update(cx, |project, cx| {
9158 let repository_updates = repository_updates.clone();
9159 cx.subscribe(project.git_store(), move |_, _, e, _| {
9160 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9161 repository_updates.lock().push(e.clone());
9162 }
9163 })
9164 .detach();
9165 let project_events = project_events.clone();
9166 cx.subscribe_self(move |_, e, _| {
9167 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9168 project_events.lock().extend(
9169 updates
9170 .iter()
9171 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9172 .filter(|(path, _)| path != "fs-event-sentinel"),
9173 );
9174 }
9175 })
9176 .detach();
9177 });
9178
9179 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9180 tree.update(cx, |tree, cx| {
9181 tree.load_file(
9182 rel_path("target/debug/foo.txt"),
9183 None,
9184 false,
9185 true,
9186 None,
9187 cx,
9188 )
9189 })
9190 .await
9191 .unwrap();
9192 tree.flush_fs_events(cx).await;
9193 project
9194 .update(cx, |project, cx| project.git_scans_complete(cx))
9195 .await;
9196 cx.run_until_parked();
9197 tree.update(cx, |tree, _| {
9198 assert_eq!(
9199 tree.entries(true, 0)
9200 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9201 .collect::<Vec<_>>(),
9202 vec![
9203 (rel_path(""), false),
9204 (rel_path(".gitignore"), false),
9205 (rel_path("src"), false),
9206 (rel_path("src/main.rs"), false),
9207 (rel_path("target"), true),
9208 (rel_path("target/debug"), true),
9209 (rel_path("target/debug/deps"), true),
9210 (rel_path("target/debug/foo.txt"), true),
9211 ]
9212 );
9213 });
9214
9215 assert_eq!(
9216 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9217 vec![
9218 RepositoryEvent::MergeHeadsChanged,
9219 RepositoryEvent::BranchChanged,
9220 RepositoryEvent::StatusesChanged { full_scan: false },
9221 RepositoryEvent::StatusesChanged { full_scan: false },
9222 ],
9223 "Initial worktree scan should produce a repo update event"
9224 );
9225 assert_eq!(
9226 project_events.lock().drain(..).collect::<Vec<_>>(),
9227 vec![
9228 ("target".to_string(), PathChange::Loaded),
9229 ("target/debug".to_string(), PathChange::Loaded),
9230 ("target/debug/deps".to_string(), PathChange::Loaded),
9231 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9232 ],
9233 "All non-ignored entries and all opened firs should be getting a project event",
9234 );
9235
9236 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9237 // This may happen multiple times during a single flycheck, but once is enough for testing.
9238 fs.emit_fs_event("/root/target/debug/deps", None);
9239 tree.flush_fs_events(cx).await;
9240 project
9241 .update(cx, |project, cx| project.git_scans_complete(cx))
9242 .await;
9243 cx.executor().run_until_parked();
9244
9245 assert_eq!(
9246 repository_updates
9247 .lock()
9248 .iter()
9249 .cloned()
9250 .collect::<Vec<_>>(),
9251 Vec::new(),
9252 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9253 );
9254 assert_eq!(
9255 project_events.lock().as_slice(),
9256 Vec::new(),
9257 "No further project events should happen, as only ignored dirs received FS events",
9258 );
9259}
9260
9261#[gpui::test]
9262async fn test_repos_in_invisible_worktrees(
9263 executor: BackgroundExecutor,
9264 cx: &mut gpui::TestAppContext,
9265) {
9266 init_test(cx);
9267 let fs = FakeFs::new(executor);
9268 fs.insert_tree(
9269 path!("/root"),
9270 json!({
9271 "dir1": {
9272 ".git": {},
9273 "dep1": {
9274 ".git": {},
9275 "src": {
9276 "a.txt": "",
9277 },
9278 },
9279 "b.txt": "",
9280 },
9281 }),
9282 )
9283 .await;
9284
9285 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9286 let _visible_worktree =
9287 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9288 project
9289 .update(cx, |project, cx| project.git_scans_complete(cx))
9290 .await;
9291
9292 let repos = project.read_with(cx, |project, cx| {
9293 project
9294 .repositories(cx)
9295 .values()
9296 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9297 .collect::<Vec<_>>()
9298 });
9299 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9300
9301 let (_invisible_worktree, _) = project
9302 .update(cx, |project, cx| {
9303 project.worktree_store.update(cx, |worktree_store, cx| {
9304 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9305 })
9306 })
9307 .await
9308 .expect("failed to create worktree");
9309 project
9310 .update(cx, |project, cx| project.git_scans_complete(cx))
9311 .await;
9312
9313 let repos = project.read_with(cx, |project, cx| {
9314 project
9315 .repositories(cx)
9316 .values()
9317 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9318 .collect::<Vec<_>>()
9319 });
9320 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9321}
9322
9323#[gpui::test(iterations = 10)]
9324async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9325 init_test(cx);
9326 cx.update(|cx| {
9327 cx.update_global::<SettingsStore, _>(|store, cx| {
9328 store.update_user_settings(cx, |settings| {
9329 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9330 });
9331 });
9332 });
9333 let fs = FakeFs::new(cx.background_executor.clone());
9334 fs.insert_tree(
9335 path!("/root"),
9336 json!({
9337 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9338 "tree": {
9339 ".git": {},
9340 ".gitignore": "ignored-dir\n",
9341 "tracked-dir": {
9342 "tracked-file1": "",
9343 "ancestor-ignored-file1": "",
9344 },
9345 "ignored-dir": {
9346 "ignored-file1": ""
9347 }
9348 }
9349 }),
9350 )
9351 .await;
9352 fs.set_head_and_index_for_repo(
9353 path!("/root/tree/.git").as_ref(),
9354 &[
9355 (".gitignore", "ignored-dir\n".into()),
9356 ("tracked-dir/tracked-file1", "".into()),
9357 ],
9358 );
9359
9360 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9361
9362 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9363 tree.flush_fs_events(cx).await;
9364 project
9365 .update(cx, |project, cx| project.git_scans_complete(cx))
9366 .await;
9367 cx.executor().run_until_parked();
9368
9369 let repository = project.read_with(cx, |project, cx| {
9370 project.repositories(cx).values().next().unwrap().clone()
9371 });
9372
9373 tree.read_with(cx, |tree, _| {
9374 tree.as_local()
9375 .unwrap()
9376 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9377 })
9378 .recv()
9379 .await;
9380
9381 cx.read(|cx| {
9382 assert_entry_git_state(
9383 tree.read(cx),
9384 repository.read(cx),
9385 "tracked-dir/tracked-file1",
9386 None,
9387 false,
9388 );
9389 assert_entry_git_state(
9390 tree.read(cx),
9391 repository.read(cx),
9392 "tracked-dir/ancestor-ignored-file1",
9393 None,
9394 false,
9395 );
9396 assert_entry_git_state(
9397 tree.read(cx),
9398 repository.read(cx),
9399 "ignored-dir/ignored-file1",
9400 None,
9401 true,
9402 );
9403 });
9404
9405 fs.create_file(
9406 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9407 Default::default(),
9408 )
9409 .await
9410 .unwrap();
9411 fs.set_index_for_repo(
9412 path!("/root/tree/.git").as_ref(),
9413 &[
9414 (".gitignore", "ignored-dir\n".into()),
9415 ("tracked-dir/tracked-file1", "".into()),
9416 ("tracked-dir/tracked-file2", "".into()),
9417 ],
9418 );
9419 fs.create_file(
9420 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9421 Default::default(),
9422 )
9423 .await
9424 .unwrap();
9425 fs.create_file(
9426 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9427 Default::default(),
9428 )
9429 .await
9430 .unwrap();
9431
9432 cx.executor().run_until_parked();
9433 cx.read(|cx| {
9434 assert_entry_git_state(
9435 tree.read(cx),
9436 repository.read(cx),
9437 "tracked-dir/tracked-file2",
9438 Some(StatusCode::Added),
9439 false,
9440 );
9441 assert_entry_git_state(
9442 tree.read(cx),
9443 repository.read(cx),
9444 "tracked-dir/ancestor-ignored-file2",
9445 None,
9446 false,
9447 );
9448 assert_entry_git_state(
9449 tree.read(cx),
9450 repository.read(cx),
9451 "ignored-dir/ignored-file2",
9452 None,
9453 true,
9454 );
9455 assert!(
9456 tree.read(cx)
9457 .entry_for_path(&rel_path(".git"))
9458 .unwrap()
9459 .is_ignored
9460 );
9461 });
9462}
9463
9464#[gpui::test]
9465async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9466 init_test(cx);
9467
9468 let fs = FakeFs::new(cx.executor());
9469 fs.insert_tree(
9470 path!("/project"),
9471 json!({
9472 ".git": {
9473 "worktrees": {
9474 "some-worktree": {
9475 "commondir": "../..\n",
9476 // For is_git_dir
9477 "HEAD": "",
9478 "config": ""
9479 }
9480 },
9481 "modules": {
9482 "subdir": {
9483 "some-submodule": {
9484 // For is_git_dir
9485 "HEAD": "",
9486 "config": "",
9487 }
9488 }
9489 }
9490 },
9491 "src": {
9492 "a.txt": "A",
9493 },
9494 "some-worktree": {
9495 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9496 "src": {
9497 "b.txt": "B",
9498 }
9499 },
9500 "subdir": {
9501 "some-submodule": {
9502 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9503 "c.txt": "C",
9504 }
9505 }
9506 }),
9507 )
9508 .await;
9509
9510 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9511 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9512 scan_complete.await;
9513
9514 let mut repositories = project.update(cx, |project, cx| {
9515 project
9516 .repositories(cx)
9517 .values()
9518 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9519 .collect::<Vec<_>>()
9520 });
9521 repositories.sort();
9522 pretty_assertions::assert_eq!(
9523 repositories,
9524 [
9525 Path::new(path!("/project")).into(),
9526 Path::new(path!("/project/some-worktree")).into(),
9527 Path::new(path!("/project/subdir/some-submodule")).into(),
9528 ]
9529 );
9530
9531 // Generate a git-related event for the worktree and check that it's refreshed.
9532 fs.with_git_state(
9533 path!("/project/some-worktree/.git").as_ref(),
9534 true,
9535 |state| {
9536 state
9537 .head_contents
9538 .insert(repo_path("src/b.txt"), "b".to_owned());
9539 state
9540 .index_contents
9541 .insert(repo_path("src/b.txt"), "b".to_owned());
9542 },
9543 )
9544 .unwrap();
9545 cx.run_until_parked();
9546
9547 let buffer = project
9548 .update(cx, |project, cx| {
9549 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9550 })
9551 .await
9552 .unwrap();
9553 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9554 let (repo, _) = project
9555 .git_store()
9556 .read(cx)
9557 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9558 .unwrap();
9559 pretty_assertions::assert_eq!(
9560 repo.read(cx).work_directory_abs_path,
9561 Path::new(path!("/project/some-worktree")).into(),
9562 );
9563 let barrier = repo.update(cx, |repo, _| repo.barrier());
9564 (repo.clone(), barrier)
9565 });
9566 barrier.await.unwrap();
9567 worktree_repo.update(cx, |repo, _| {
9568 pretty_assertions::assert_eq!(
9569 repo.status_for_path(&repo_path("src/b.txt"))
9570 .unwrap()
9571 .status,
9572 StatusCode::Modified.worktree(),
9573 );
9574 });
9575
9576 // The same for the submodule.
9577 fs.with_git_state(
9578 path!("/project/subdir/some-submodule/.git").as_ref(),
9579 true,
9580 |state| {
9581 state
9582 .head_contents
9583 .insert(repo_path("c.txt"), "c".to_owned());
9584 state
9585 .index_contents
9586 .insert(repo_path("c.txt"), "c".to_owned());
9587 },
9588 )
9589 .unwrap();
9590 cx.run_until_parked();
9591
9592 let buffer = project
9593 .update(cx, |project, cx| {
9594 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9595 })
9596 .await
9597 .unwrap();
9598 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9599 let (repo, _) = project
9600 .git_store()
9601 .read(cx)
9602 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9603 .unwrap();
9604 pretty_assertions::assert_eq!(
9605 repo.read(cx).work_directory_abs_path,
9606 Path::new(path!("/project/subdir/some-submodule")).into(),
9607 );
9608 let barrier = repo.update(cx, |repo, _| repo.barrier());
9609 (repo.clone(), barrier)
9610 });
9611 barrier.await.unwrap();
9612 submodule_repo.update(cx, |repo, _| {
9613 pretty_assertions::assert_eq!(
9614 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9615 StatusCode::Modified.worktree(),
9616 );
9617 });
9618}
9619
9620#[gpui::test]
9621async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9622 init_test(cx);
9623 let fs = FakeFs::new(cx.background_executor.clone());
9624 fs.insert_tree(
9625 path!("/root"),
9626 json!({
9627 "project": {
9628 ".git": {},
9629 "child1": {
9630 "a.txt": "A",
9631 },
9632 "child2": {
9633 "b.txt": "B",
9634 }
9635 }
9636 }),
9637 )
9638 .await;
9639
9640 let project = Project::test(
9641 fs.clone(),
9642 [
9643 path!("/root/project/child1").as_ref(),
9644 path!("/root/project/child2").as_ref(),
9645 ],
9646 cx,
9647 )
9648 .await;
9649
9650 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9651 tree.flush_fs_events(cx).await;
9652 project
9653 .update(cx, |project, cx| project.git_scans_complete(cx))
9654 .await;
9655 cx.executor().run_until_parked();
9656
9657 let repos = project.read_with(cx, |project, cx| {
9658 project
9659 .repositories(cx)
9660 .values()
9661 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9662 .collect::<Vec<_>>()
9663 });
9664 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9665}
9666
9667async fn search(
9668 project: &Entity<Project>,
9669 query: SearchQuery,
9670 cx: &mut gpui::TestAppContext,
9671) -> Result<HashMap<String, Vec<Range<usize>>>> {
9672 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9673 let mut results = HashMap::default();
9674 while let Ok(search_result) = search_rx.recv().await {
9675 match search_result {
9676 SearchResult::Buffer { buffer, ranges } => {
9677 results.entry(buffer).or_insert(ranges);
9678 }
9679 SearchResult::LimitReached => {}
9680 }
9681 }
9682 Ok(results
9683 .into_iter()
9684 .map(|(buffer, ranges)| {
9685 buffer.update(cx, |buffer, cx| {
9686 let path = buffer
9687 .file()
9688 .unwrap()
9689 .full_path(cx)
9690 .to_string_lossy()
9691 .to_string();
9692 let ranges = ranges
9693 .into_iter()
9694 .map(|range| range.to_offset(buffer))
9695 .collect::<Vec<_>>();
9696 (path, ranges)
9697 })
9698 })
9699 .collect())
9700}
9701
9702pub fn init_test(cx: &mut gpui::TestAppContext) {
9703 zlog::init_test();
9704
9705 cx.update(|cx| {
9706 let settings_store = SettingsStore::test(cx);
9707 cx.set_global(settings_store);
9708 release_channel::init(SemanticVersion::default(), cx);
9709 language::init(cx);
9710 Project::init_settings(cx);
9711 });
9712}
9713
9714fn json_lang() -> Arc<Language> {
9715 Arc::new(Language::new(
9716 LanguageConfig {
9717 name: "JSON".into(),
9718 matcher: LanguageMatcher {
9719 path_suffixes: vec!["json".to_string()],
9720 ..Default::default()
9721 },
9722 ..Default::default()
9723 },
9724 None,
9725 ))
9726}
9727
9728fn js_lang() -> Arc<Language> {
9729 Arc::new(Language::new(
9730 LanguageConfig {
9731 name: "JavaScript".into(),
9732 matcher: LanguageMatcher {
9733 path_suffixes: vec!["js".to_string()],
9734 ..Default::default()
9735 },
9736 ..Default::default()
9737 },
9738 None,
9739 ))
9740}
9741
9742fn rust_lang() -> Arc<Language> {
9743 Arc::new(Language::new(
9744 LanguageConfig {
9745 name: "Rust".into(),
9746 matcher: LanguageMatcher {
9747 path_suffixes: vec!["rs".to_string()],
9748 ..Default::default()
9749 },
9750 ..Default::default()
9751 },
9752 Some(tree_sitter_rust::LANGUAGE.into()),
9753 ))
9754}
9755
9756fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9757 struct PythonMootToolchainLister(Arc<FakeFs>);
9758 #[async_trait]
9759 impl ToolchainLister for PythonMootToolchainLister {
9760 async fn list(
9761 &self,
9762 worktree_root: PathBuf,
9763 subroot_relative_path: Arc<RelPath>,
9764 _: Option<HashMap<String, String>>,
9765 _: &dyn Fs,
9766 ) -> ToolchainList {
9767 // This lister will always return a path .venv directories within ancestors
9768 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9769 let mut toolchains = vec![];
9770 for ancestor in ancestors {
9771 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9772 if self.0.is_dir(&venv_path).await {
9773 toolchains.push(Toolchain {
9774 name: SharedString::new("Python Venv"),
9775 path: venv_path.to_string_lossy().into_owned().into(),
9776 language_name: LanguageName(SharedString::new_static("Python")),
9777 as_json: serde_json::Value::Null,
9778 })
9779 }
9780 }
9781 ToolchainList {
9782 toolchains,
9783 ..Default::default()
9784 }
9785 }
9786 async fn resolve(
9787 &self,
9788 _: PathBuf,
9789 _: Option<HashMap<String, String>>,
9790 _: &dyn Fs,
9791 ) -> anyhow::Result<Toolchain> {
9792 Err(anyhow::anyhow!("Not implemented"))
9793 }
9794 fn meta(&self) -> ToolchainMetadata {
9795 ToolchainMetadata {
9796 term: SharedString::new_static("Virtual Environment"),
9797 new_toolchain_placeholder: SharedString::new_static(
9798 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9799 ),
9800 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9801 }
9802 }
9803 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9804 vec![]
9805 }
9806 }
9807 Arc::new(
9808 Language::new(
9809 LanguageConfig {
9810 name: "Python".into(),
9811 matcher: LanguageMatcher {
9812 path_suffixes: vec!["py".to_string()],
9813 ..Default::default()
9814 },
9815 ..Default::default()
9816 },
9817 None, // We're not testing Python parsing with this language.
9818 )
9819 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9820 "pyproject.toml",
9821 ))))
9822 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9823 )
9824}
9825
9826fn typescript_lang() -> Arc<Language> {
9827 Arc::new(Language::new(
9828 LanguageConfig {
9829 name: "TypeScript".into(),
9830 matcher: LanguageMatcher {
9831 path_suffixes: vec!["ts".to_string()],
9832 ..Default::default()
9833 },
9834 ..Default::default()
9835 },
9836 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9837 ))
9838}
9839
9840fn tsx_lang() -> Arc<Language> {
9841 Arc::new(Language::new(
9842 LanguageConfig {
9843 name: "tsx".into(),
9844 matcher: LanguageMatcher {
9845 path_suffixes: vec!["tsx".to_string()],
9846 ..Default::default()
9847 },
9848 ..Default::default()
9849 },
9850 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9851 ))
9852}
9853
9854fn get_all_tasks(
9855 project: &Entity<Project>,
9856 task_contexts: Arc<TaskContexts>,
9857 cx: &mut App,
9858) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9859 let new_tasks = project.update(cx, |project, cx| {
9860 project.task_store.update(cx, |task_store, cx| {
9861 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9862 this.used_and_current_resolved_tasks(task_contexts, cx)
9863 })
9864 })
9865 });
9866
9867 cx.background_spawn(async move {
9868 let (mut old, new) = new_tasks.await;
9869 old.extend(new);
9870 old
9871 })
9872}
9873
9874#[track_caller]
9875fn assert_entry_git_state(
9876 tree: &Worktree,
9877 repository: &Repository,
9878 path: &str,
9879 index_status: Option<StatusCode>,
9880 is_ignored: bool,
9881) {
9882 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9883 let entry = tree
9884 .entry_for_path(&rel_path(path))
9885 .unwrap_or_else(|| panic!("entry {path} not found"));
9886 let status = repository
9887 .status_for_path(&repo_path(path))
9888 .map(|entry| entry.status);
9889 let expected = index_status.map(|index_status| {
9890 TrackedStatus {
9891 index_status,
9892 worktree_status: StatusCode::Unmodified,
9893 }
9894 .into()
9895 });
9896 assert_eq!(
9897 status, expected,
9898 "expected {path} to have git status: {expected:?}"
9899 );
9900 assert_eq!(
9901 entry.is_ignored, is_ignored,
9902 "expected {path} to have is_ignored: {is_ignored}"
9903 );
9904}
9905
9906#[track_caller]
9907fn git_init(path: &Path) -> git2::Repository {
9908 let mut init_opts = RepositoryInitOptions::new();
9909 init_opts.initial_head("main");
9910 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9911}
9912
9913#[track_caller]
9914fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9915 let path = path.as_ref();
9916 let mut index = repo.index().expect("Failed to get index");
9917 index.add_path(path).expect("Failed to add file");
9918 index.write().expect("Failed to write index");
9919}
9920
9921#[track_caller]
9922fn git_remove_index(path: &Path, repo: &git2::Repository) {
9923 let mut index = repo.index().expect("Failed to get index");
9924 index.remove_path(path).expect("Failed to add file");
9925 index.write().expect("Failed to write index");
9926}
9927
9928#[track_caller]
9929fn git_commit(msg: &'static str, repo: &git2::Repository) {
9930 use git2::Signature;
9931
9932 let signature = Signature::now("test", "test@zed.dev").unwrap();
9933 let oid = repo.index().unwrap().write_tree().unwrap();
9934 let tree = repo.find_tree(oid).unwrap();
9935 if let Ok(head) = repo.head() {
9936 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9937
9938 let parent_commit = parent_obj.as_commit().unwrap();
9939
9940 repo.commit(
9941 Some("HEAD"),
9942 &signature,
9943 &signature,
9944 msg,
9945 &tree,
9946 &[parent_commit],
9947 )
9948 .expect("Failed to commit with parent");
9949 } else {
9950 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9951 .expect("Failed to commit");
9952 }
9953}
9954
9955#[cfg(any())]
9956#[track_caller]
9957fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9958 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9959}
9960
9961#[track_caller]
9962fn git_stash(repo: &mut git2::Repository) {
9963 use git2::Signature;
9964
9965 let signature = Signature::now("test", "test@zed.dev").unwrap();
9966 repo.stash_save(&signature, "N/A", None)
9967 .expect("Failed to stash");
9968}
9969
9970#[track_caller]
9971fn git_reset(offset: usize, repo: &git2::Repository) {
9972 let head = repo.head().expect("Couldn't get repo head");
9973 let object = head.peel(git2::ObjectType::Commit).unwrap();
9974 let commit = object.as_commit().unwrap();
9975 let new_head = commit
9976 .parents()
9977 .inspect(|parnet| {
9978 parnet.message();
9979 })
9980 .nth(offset)
9981 .expect("Not enough history");
9982 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9983 .expect("Could not reset");
9984}
9985
9986#[cfg(any())]
9987#[track_caller]
9988fn git_branch(name: &str, repo: &git2::Repository) {
9989 let head = repo
9990 .head()
9991 .expect("Couldn't get repo head")
9992 .peel_to_commit()
9993 .expect("HEAD is not a commit");
9994 repo.branch(name, &head, false).expect("Failed to commit");
9995}
9996
9997#[cfg(any())]
9998#[track_caller]
9999fn git_checkout(name: &str, repo: &git2::Repository) {
10000 repo.set_head(name).expect("Failed to set head");
10001 repo.checkout_head(None).expect("Failed to check out head");
10002}
10003
10004#[cfg(any())]
10005#[track_caller]
10006fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10007 repo.statuses(None)
10008 .unwrap()
10009 .iter()
10010 .map(|status| (status.path().unwrap().to_string(), status.status()))
10011 .collect()
10012}
10013
10014#[gpui::test]
10015async fn test_find_project_path_abs(
10016 background_executor: BackgroundExecutor,
10017 cx: &mut gpui::TestAppContext,
10018) {
10019 // find_project_path should work with absolute paths
10020 init_test(cx);
10021
10022 let fs = FakeFs::new(background_executor);
10023 fs.insert_tree(
10024 path!("/root"),
10025 json!({
10026 "project1": {
10027 "file1.txt": "content1",
10028 "subdir": {
10029 "file2.txt": "content2"
10030 }
10031 },
10032 "project2": {
10033 "file3.txt": "content3"
10034 }
10035 }),
10036 )
10037 .await;
10038
10039 let project = Project::test(
10040 fs.clone(),
10041 [
10042 path!("/root/project1").as_ref(),
10043 path!("/root/project2").as_ref(),
10044 ],
10045 cx,
10046 )
10047 .await;
10048
10049 // Make sure the worktrees are fully initialized
10050 project
10051 .update(cx, |project, cx| project.git_scans_complete(cx))
10052 .await;
10053 cx.run_until_parked();
10054
10055 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10056 project.read_with(cx, |project, cx| {
10057 let worktrees: Vec<_> = project.worktrees(cx).collect();
10058 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10059 let id1 = worktrees[0].read(cx).id();
10060 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10061 let id2 = worktrees[1].read(cx).id();
10062 (abs_path1, id1, abs_path2, id2)
10063 });
10064
10065 project.update(cx, |project, cx| {
10066 let abs_path = project1_abs_path.join("file1.txt");
10067 let found_path = project.find_project_path(abs_path, cx).unwrap();
10068 assert_eq!(found_path.worktree_id, project1_id);
10069 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10070
10071 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10072 let found_path = project.find_project_path(abs_path, cx).unwrap();
10073 assert_eq!(found_path.worktree_id, project1_id);
10074 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10075
10076 let abs_path = project2_abs_path.join("file3.txt");
10077 let found_path = project.find_project_path(abs_path, cx).unwrap();
10078 assert_eq!(found_path.worktree_id, project2_id);
10079 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10080
10081 let abs_path = project1_abs_path.join("nonexistent.txt");
10082 let found_path = project.find_project_path(abs_path, cx);
10083 assert!(
10084 found_path.is_some(),
10085 "Should find project path for nonexistent file in worktree"
10086 );
10087
10088 // Test with an absolute path outside any worktree
10089 let abs_path = Path::new("/some/other/path");
10090 let found_path = project.find_project_path(abs_path, cx);
10091 assert!(
10092 found_path.is_none(),
10093 "Should not find project path for path outside any worktree"
10094 );
10095 });
10096}