1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use encodings::{Encoding, UTF_8};
16use fs::FakeFs;
17use futures::{StreamExt, future};
18use git::{
19 GitHostingProviderRegistry,
20 repository::{RepoPath, repo_path},
21 status::{StatusCode, TrackedStatus},
22};
23use git2::RepositoryInitOptions;
24use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
25use itertools::Itertools;
26use language::{
27 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
28 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
29 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
30 ToolchainLister,
31 language_settings::{LanguageSettingsContent, language_settings},
32 tree_sitter_rust, tree_sitter_typescript,
33};
34use lsp::{
35 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
36 Uri, WillRenameFiles, notification::DidRenameFiles,
37};
38use parking_lot::Mutex;
39use paths::{config_dir, global_gitignore_path, tasks_file};
40use postage::stream::Stream as _;
41use pretty_assertions::{assert_eq, assert_matches};
42use rand::{Rng as _, rngs::StdRng};
43use serde_json::json;
44#[cfg(not(windows))]
45use std::os;
46use std::{
47 env, mem,
48 num::NonZeroU32,
49 ops::Range,
50 str::FromStr,
51 sync::{Arc, OnceLock},
52 task::Poll,
53};
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree(
1217 path!("/the-root"),
1218 json!({
1219 ".gitignore": "target\n",
1220 "Cargo.lock": "",
1221 "src": {
1222 "a.rs": "",
1223 "b.rs": "",
1224 },
1225 "target": {
1226 "x": {
1227 "out": {
1228 "x.rs": ""
1229 }
1230 },
1231 "y": {
1232 "out": {
1233 "y.rs": "",
1234 }
1235 },
1236 "z": {
1237 "out": {
1238 "z.rs": ""
1239 }
1240 }
1241 }
1242 }),
1243 )
1244 .await;
1245 fs.insert_tree(
1246 path!("/the-registry"),
1247 json!({
1248 "dep1": {
1249 "src": {
1250 "dep1.rs": "",
1251 }
1252 },
1253 "dep2": {
1254 "src": {
1255 "dep2.rs": "",
1256 }
1257 },
1258 }),
1259 )
1260 .await;
1261 fs.insert_tree(
1262 path!("/the/stdlib"),
1263 json!({
1264 "LICENSE": "",
1265 "src": {
1266 "string.rs": "",
1267 }
1268 }),
1269 )
1270 .await;
1271
1272 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1273 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1274 (project.languages().clone(), project.lsp_store())
1275 });
1276 language_registry.add(rust_lang());
1277 let mut fake_servers = language_registry.register_fake_lsp(
1278 "Rust",
1279 FakeLspAdapter {
1280 name: "the-language-server",
1281 ..Default::default()
1282 },
1283 );
1284
1285 cx.executor().run_until_parked();
1286
1287 // Start the language server by opening a buffer with a compatible file extension.
1288 project
1289 .update(cx, |project, cx| {
1290 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1291 })
1292 .await
1293 .unwrap();
1294
1295 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1296 project.update(cx, |project, cx| {
1297 let worktree = project.worktrees(cx).next().unwrap();
1298 assert_eq!(
1299 worktree
1300 .read(cx)
1301 .snapshot()
1302 .entries(true, 0)
1303 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("", false),
1307 (".gitignore", false),
1308 ("Cargo.lock", false),
1309 ("src", false),
1310 ("src/a.rs", false),
1311 ("src/b.rs", false),
1312 ("target", true),
1313 ]
1314 );
1315 });
1316
1317 let prev_read_dir_count = fs.read_dir_call_count();
1318
1319 let fake_server = fake_servers.next().await.unwrap();
1320 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1321 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1322 id
1323 });
1324
1325 // Simulate jumping to a definition in a dependency outside of the worktree.
1326 let _out_of_worktree_buffer = project
1327 .update(cx, |project, cx| {
1328 project.open_local_buffer_via_lsp(
1329 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1330 server_id,
1331 cx,
1332 )
1333 })
1334 .await
1335 .unwrap();
1336
1337 // Keep track of the FS events reported to the language server.
1338 let file_changes = Arc::new(Mutex::new(Vec::new()));
1339 fake_server
1340 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1341 registrations: vec![lsp::Registration {
1342 id: Default::default(),
1343 method: "workspace/didChangeWatchedFiles".to_string(),
1344 register_options: serde_json::to_value(
1345 lsp::DidChangeWatchedFilesRegistrationOptions {
1346 watchers: vec![
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/Cargo.toml").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/src/*.{rs,c}").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the-root/target/y/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("/the/stdlib/src/**/*.rs").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 lsp::FileSystemWatcher {
1372 glob_pattern: lsp::GlobPattern::String(
1373 path!("**/Cargo.lock").to_string(),
1374 ),
1375 kind: None,
1376 },
1377 ],
1378 },
1379 )
1380 .ok(),
1381 }],
1382 })
1383 .await
1384 .into_response()
1385 .unwrap();
1386 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1387 let file_changes = file_changes.clone();
1388 move |params, _| {
1389 let mut file_changes = file_changes.lock();
1390 file_changes.extend(params.changes);
1391 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1392 }
1393 });
1394
1395 cx.executor().run_until_parked();
1396 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1397 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1398
1399 let mut new_watched_paths = fs.watched_paths();
1400 new_watched_paths.retain(|path| {
1401 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1402 });
1403 assert_eq!(
1404 &new_watched_paths,
1405 &[
1406 Path::new(path!("/the-root")),
1407 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1408 Path::new(path!("/the/stdlib/src"))
1409 ]
1410 );
1411
1412 // Now the language server has asked us to watch an ignored directory path,
1413 // so we recursively load it.
1414 project.update(cx, |project, cx| {
1415 let worktree = project.visible_worktrees(cx).next().unwrap();
1416 assert_eq!(
1417 worktree
1418 .read(cx)
1419 .snapshot()
1420 .entries(true, 0)
1421 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1422 .collect::<Vec<_>>(),
1423 &[
1424 ("", false),
1425 (".gitignore", false),
1426 ("Cargo.lock", false),
1427 ("src", false),
1428 ("src/a.rs", false),
1429 ("src/b.rs", false),
1430 ("target", true),
1431 ("target/x", true),
1432 ("target/y", true),
1433 ("target/y/out", true),
1434 ("target/y/out/y.rs", true),
1435 ("target/z", true),
1436 ]
1437 );
1438 });
1439
1440 // Perform some file system mutations, two of which match the watched patterns,
1441 // and one of which does not.
1442 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1446 .await
1447 .unwrap();
1448 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/x/out/x2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.create_file(
1458 path!("/the-root/target/y/out/y2.rs").as_ref(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463
1464 let encoding = Encoding::default();
1465
1466 fs.save(
1467 path!("/the-root/Cargo.lock").as_ref(),
1468 &Rope::default(),
1469 Default::default(),
1470 encoding.clone(),
1471 )
1472 .await
1473 .unwrap();
1474 fs.save(
1475 path!("/the-stdlib/LICENSE").as_ref(),
1476 &Rope::default(),
1477 Default::default(),
1478 encoding.clone(),
1479 )
1480 .await
1481 .unwrap();
1482 fs.save(
1483 path!("/the/stdlib/src/string.rs").as_ref(),
1484 &Rope::default(),
1485 Default::default(),
1486 encoding,
1487 )
1488 .await
1489 .unwrap();
1490
1491 // The language server receives events for the FS mutations that match its watch patterns.
1492 cx.executor().run_until_parked();
1493 assert_eq!(
1494 &*file_changes.lock(),
1495 &[
1496 lsp::FileEvent {
1497 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1498 typ: lsp::FileChangeType::CHANGED,
1499 },
1500 lsp::FileEvent {
1501 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1502 typ: lsp::FileChangeType::DELETED,
1503 },
1504 lsp::FileEvent {
1505 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1506 typ: lsp::FileChangeType::CREATED,
1507 },
1508 lsp::FileEvent {
1509 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1510 typ: lsp::FileChangeType::CREATED,
1511 },
1512 lsp::FileEvent {
1513 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1514 typ: lsp::FileChangeType::CHANGED,
1515 },
1516 ]
1517 );
1518}
1519
1520#[gpui::test]
1521async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1522 init_test(cx);
1523
1524 let fs = FakeFs::new(cx.executor());
1525 fs.insert_tree(
1526 path!("/dir"),
1527 json!({
1528 "a.rs": "let a = 1;",
1529 "b.rs": "let b = 2;"
1530 }),
1531 )
1532 .await;
1533
1534 let project = Project::test(
1535 fs,
1536 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1537 cx,
1538 )
1539 .await;
1540 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1541
1542 let buffer_a = project
1543 .update(cx, |project, cx| {
1544 project.open_local_buffer(path!("/dir/a.rs"), cx)
1545 })
1546 .await
1547 .unwrap();
1548 let buffer_b = project
1549 .update(cx, |project, cx| {
1550 project.open_local_buffer(path!("/dir/b.rs"), cx)
1551 })
1552 .await
1553 .unwrap();
1554
1555 lsp_store.update(cx, |lsp_store, cx| {
1556 lsp_store
1557 .update_diagnostics(
1558 LanguageServerId(0),
1559 lsp::PublishDiagnosticsParams {
1560 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1561 version: None,
1562 diagnostics: vec![lsp::Diagnostic {
1563 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1564 severity: Some(lsp::DiagnosticSeverity::ERROR),
1565 message: "error 1".to_string(),
1566 ..Default::default()
1567 }],
1568 },
1569 None,
1570 DiagnosticSourceKind::Pushed,
1571 &[],
1572 cx,
1573 )
1574 .unwrap();
1575 lsp_store
1576 .update_diagnostics(
1577 LanguageServerId(0),
1578 lsp::PublishDiagnosticsParams {
1579 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1580 version: None,
1581 diagnostics: vec![lsp::Diagnostic {
1582 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1583 severity: Some(DiagnosticSeverity::WARNING),
1584 message: "error 2".to_string(),
1585 ..Default::default()
1586 }],
1587 },
1588 None,
1589 DiagnosticSourceKind::Pushed,
1590 &[],
1591 cx,
1592 )
1593 .unwrap();
1594 });
1595
1596 buffer_a.update(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let ", None),
1605 ("a", Some(DiagnosticSeverity::ERROR)),
1606 (" = 1;", None),
1607 ]
1608 );
1609 });
1610 buffer_b.update(cx, |buffer, _| {
1611 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1612 assert_eq!(
1613 chunks
1614 .iter()
1615 .map(|(s, d)| (s.as_str(), *d))
1616 .collect::<Vec<_>>(),
1617 &[
1618 ("let ", None),
1619 ("b", Some(DiagnosticSeverity::WARNING)),
1620 (" = 2;", None),
1621 ]
1622 );
1623 });
1624}
1625
1626#[gpui::test]
1627async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1628 init_test(cx);
1629
1630 let fs = FakeFs::new(cx.executor());
1631 fs.insert_tree(
1632 path!("/root"),
1633 json!({
1634 "dir": {
1635 ".git": {
1636 "HEAD": "ref: refs/heads/main",
1637 },
1638 ".gitignore": "b.rs",
1639 "a.rs": "let a = 1;",
1640 "b.rs": "let b = 2;",
1641 },
1642 "other.rs": "let b = c;"
1643 }),
1644 )
1645 .await;
1646
1647 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1648 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1649 let (worktree, _) = project
1650 .update(cx, |project, cx| {
1651 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1652 })
1653 .await
1654 .unwrap();
1655 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1656
1657 let (worktree, _) = project
1658 .update(cx, |project, cx| {
1659 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1660 })
1661 .await
1662 .unwrap();
1663 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1664
1665 let server_id = LanguageServerId(0);
1666 lsp_store.update(cx, |lsp_store, cx| {
1667 lsp_store
1668 .update_diagnostics(
1669 server_id,
1670 lsp::PublishDiagnosticsParams {
1671 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1672 version: None,
1673 diagnostics: vec![lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1675 severity: Some(lsp::DiagnosticSeverity::ERROR),
1676 message: "unused variable 'b'".to_string(),
1677 ..Default::default()
1678 }],
1679 },
1680 None,
1681 DiagnosticSourceKind::Pushed,
1682 &[],
1683 cx,
1684 )
1685 .unwrap();
1686 lsp_store
1687 .update_diagnostics(
1688 server_id,
1689 lsp::PublishDiagnosticsParams {
1690 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1691 version: None,
1692 diagnostics: vec![lsp::Diagnostic {
1693 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1694 severity: Some(lsp::DiagnosticSeverity::ERROR),
1695 message: "unknown variable 'c'".to_string(),
1696 ..Default::default()
1697 }],
1698 },
1699 None,
1700 DiagnosticSourceKind::Pushed,
1701 &[],
1702 cx,
1703 )
1704 .unwrap();
1705 });
1706
1707 let main_ignored_buffer = project
1708 .update(cx, |project, cx| {
1709 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1710 })
1711 .await
1712 .unwrap();
1713 main_ignored_buffer.update(cx, |buffer, _| {
1714 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1715 assert_eq!(
1716 chunks
1717 .iter()
1718 .map(|(s, d)| (s.as_str(), *d))
1719 .collect::<Vec<_>>(),
1720 &[
1721 ("let ", None),
1722 ("b", Some(DiagnosticSeverity::ERROR)),
1723 (" = 2;", None),
1724 ],
1725 "Gigitnored buffers should still get in-buffer diagnostics",
1726 );
1727 });
1728 let other_buffer = project
1729 .update(cx, |project, cx| {
1730 project.open_buffer((other_worktree_id, rel_path("")), cx)
1731 })
1732 .await
1733 .unwrap();
1734 other_buffer.update(cx, |buffer, _| {
1735 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1736 assert_eq!(
1737 chunks
1738 .iter()
1739 .map(|(s, d)| (s.as_str(), *d))
1740 .collect::<Vec<_>>(),
1741 &[
1742 ("let b = ", None),
1743 ("c", Some(DiagnosticSeverity::ERROR)),
1744 (";", None),
1745 ],
1746 "Buffers from hidden projects should still get in-buffer diagnostics"
1747 );
1748 });
1749
1750 project.update(cx, |project, cx| {
1751 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1752 assert_eq!(
1753 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1754 vec![(
1755 ProjectPath {
1756 worktree_id: main_worktree_id,
1757 path: rel_path("b.rs").into(),
1758 },
1759 server_id,
1760 DiagnosticSummary {
1761 error_count: 1,
1762 warning_count: 0,
1763 }
1764 )]
1765 );
1766 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1767 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1768 });
1769}
1770
1771#[gpui::test]
1772async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1773 init_test(cx);
1774
1775 let progress_token = "the-progress-token";
1776
1777 let fs = FakeFs::new(cx.executor());
1778 fs.insert_tree(
1779 path!("/dir"),
1780 json!({
1781 "a.rs": "fn a() { A }",
1782 "b.rs": "const y: i32 = 1",
1783 }),
1784 )
1785 .await;
1786
1787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1789
1790 language_registry.add(rust_lang());
1791 let mut fake_servers = language_registry.register_fake_lsp(
1792 "Rust",
1793 FakeLspAdapter {
1794 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1801
1802 // Cause worktree to start the fake language server
1803 let _ = project
1804 .update(cx, |project, cx| {
1805 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1806 })
1807 .await
1808 .unwrap();
1809
1810 let mut events = cx.events(&project);
1811
1812 let fake_server = fake_servers.next().await.unwrap();
1813 assert_eq!(
1814 events.next().await.unwrap(),
1815 Event::LanguageServerAdded(
1816 LanguageServerId(0),
1817 fake_server.server.name(),
1818 Some(worktree_id)
1819 ),
1820 );
1821
1822 fake_server
1823 .start_progress(format!("{}/0", progress_token))
1824 .await;
1825 assert_eq!(
1826 events.next().await.unwrap(),
1827 Event::RefreshInlayHints(fake_server.server.server_id())
1828 );
1829 assert_eq!(
1830 events.next().await.unwrap(),
1831 Event::DiskBasedDiagnosticsStarted {
1832 language_server_id: LanguageServerId(0),
1833 }
1834 );
1835
1836 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1837 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1838 version: None,
1839 diagnostics: vec![lsp::Diagnostic {
1840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1841 severity: Some(lsp::DiagnosticSeverity::ERROR),
1842 message: "undefined variable 'A'".to_string(),
1843 ..Default::default()
1844 }],
1845 });
1846 assert_eq!(
1847 events.next().await.unwrap(),
1848 Event::DiagnosticsUpdated {
1849 language_server_id: LanguageServerId(0),
1850 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1851 }
1852 );
1853
1854 fake_server.end_progress(format!("{}/0", progress_token));
1855 assert_eq!(
1856 events.next().await.unwrap(),
1857 Event::DiskBasedDiagnosticsFinished {
1858 language_server_id: LanguageServerId(0)
1859 }
1860 );
1861
1862 let buffer = project
1863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1864 .await
1865 .unwrap();
1866
1867 buffer.update(cx, |buffer, _| {
1868 let snapshot = buffer.snapshot();
1869 let diagnostics = snapshot
1870 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1871 .collect::<Vec<_>>();
1872 assert_eq!(
1873 diagnostics,
1874 &[DiagnosticEntryRef {
1875 range: Point::new(0, 9)..Point::new(0, 10),
1876 diagnostic: &Diagnostic {
1877 severity: lsp::DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'A'".to_string(),
1879 group_id: 0,
1880 is_primary: true,
1881 source_kind: DiagnosticSourceKind::Pushed,
1882 ..Diagnostic::default()
1883 }
1884 }]
1885 )
1886 });
1887
1888 // Ensure publishing empty diagnostics twice only results in one update event.
1889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1891 version: None,
1892 diagnostics: Default::default(),
1893 });
1894 assert_eq!(
1895 events.next().await.unwrap(),
1896 Event::DiagnosticsUpdated {
1897 language_server_id: LanguageServerId(0),
1898 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1899 }
1900 );
1901
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1903 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1904 version: None,
1905 diagnostics: Default::default(),
1906 });
1907 cx.executor().run_until_parked();
1908 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1909}
1910
1911#[gpui::test]
1912async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1913 init_test(cx);
1914
1915 let progress_token = "the-progress-token";
1916
1917 let fs = FakeFs::new(cx.executor());
1918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1919
1920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1921
1922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1923 language_registry.add(rust_lang());
1924 let mut fake_servers = language_registry.register_fake_lsp(
1925 "Rust",
1926 FakeLspAdapter {
1927 name: "the-language-server",
1928 disk_based_diagnostics_sources: vec!["disk".into()],
1929 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1930 ..FakeLspAdapter::default()
1931 },
1932 );
1933
1934 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1935
1936 let (buffer, _handle) = project
1937 .update(cx, |project, cx| {
1938 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1939 })
1940 .await
1941 .unwrap();
1942 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1943 // Simulate diagnostics starting to update.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 fake_server.start_progress(progress_token).await;
1946
1947 // Restart the server before the diagnostics finish updating.
1948 project.update(cx, |project, cx| {
1949 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1950 });
1951 let mut events = cx.events(&project);
1952
1953 // Simulate the newly started server sending more diagnostics.
1954 let fake_server = fake_servers.next().await.unwrap();
1955 assert_eq!(
1956 events.next().await.unwrap(),
1957 Event::LanguageServerRemoved(LanguageServerId(0))
1958 );
1959 assert_eq!(
1960 events.next().await.unwrap(),
1961 Event::LanguageServerAdded(
1962 LanguageServerId(1),
1963 fake_server.server.name(),
1964 Some(worktree_id)
1965 )
1966 );
1967 assert_eq!(
1968 events.next().await.unwrap(),
1969 Event::RefreshInlayHints(fake_server.server.server_id())
1970 );
1971 fake_server.start_progress(progress_token).await;
1972 assert_eq!(
1973 events.next().await.unwrap(),
1974 Event::LanguageServerBufferRegistered {
1975 server_id: LanguageServerId(1),
1976 buffer_id,
1977 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1978 name: Some(fake_server.server.name())
1979 }
1980 );
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiskBasedDiagnosticsStarted {
1984 language_server_id: LanguageServerId(1)
1985 }
1986 );
1987 project.update(cx, |project, cx| {
1988 assert_eq!(
1989 project
1990 .language_servers_running_disk_based_diagnostics(cx)
1991 .collect::<Vec<_>>(),
1992 [LanguageServerId(1)]
1993 );
1994 });
1995
1996 // All diagnostics are considered done, despite the old server's diagnostic
1997 // task never completing.
1998 fake_server.end_progress(progress_token);
1999 assert_eq!(
2000 events.next().await.unwrap(),
2001 Event::DiskBasedDiagnosticsFinished {
2002 language_server_id: LanguageServerId(1)
2003 }
2004 );
2005 project.update(cx, |project, cx| {
2006 assert_eq!(
2007 project
2008 .language_servers_running_disk_based_diagnostics(cx)
2009 .collect::<Vec<_>>(),
2010 [] as [language::LanguageServerId; 0]
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let fs = FakeFs::new(cx.executor());
2020 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2021
2022 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2023
2024 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2025 language_registry.add(rust_lang());
2026 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2027
2028 let (buffer, _) = project
2029 .update(cx, |project, cx| {
2030 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2031 })
2032 .await
2033 .unwrap();
2034
2035 // Publish diagnostics
2036 let fake_server = fake_servers.next().await.unwrap();
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: vec![lsp::Diagnostic {
2041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2042 severity: Some(lsp::DiagnosticSeverity::ERROR),
2043 message: "the message".to_string(),
2044 ..Default::default()
2045 }],
2046 });
2047
2048 cx.executor().run_until_parked();
2049 buffer.update(cx, |buffer, _| {
2050 assert_eq!(
2051 buffer
2052 .snapshot()
2053 .diagnostics_in_range::<_, usize>(0..1, false)
2054 .map(|entry| entry.diagnostic.message.clone())
2055 .collect::<Vec<_>>(),
2056 ["the message".to_string()]
2057 );
2058 });
2059 project.update(cx, |project, cx| {
2060 assert_eq!(
2061 project.diagnostic_summary(false, cx),
2062 DiagnosticSummary {
2063 error_count: 1,
2064 warning_count: 0,
2065 }
2066 );
2067 });
2068
2069 project.update(cx, |project, cx| {
2070 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2071 });
2072
2073 // The diagnostics are cleared.
2074 cx.executor().run_until_parked();
2075 buffer.update(cx, |buffer, _| {
2076 assert_eq!(
2077 buffer
2078 .snapshot()
2079 .diagnostics_in_range::<_, usize>(0..1, false)
2080 .map(|entry| entry.diagnostic.message.clone())
2081 .collect::<Vec<_>>(),
2082 Vec::<String>::new(),
2083 );
2084 });
2085 project.update(cx, |project, cx| {
2086 assert_eq!(
2087 project.diagnostic_summary(false, cx),
2088 DiagnosticSummary {
2089 error_count: 0,
2090 warning_count: 0,
2091 }
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2102
2103 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2104 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2105
2106 language_registry.add(rust_lang());
2107 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2108
2109 let (buffer, _handle) = project
2110 .update(cx, |project, cx| {
2111 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2112 })
2113 .await
2114 .unwrap();
2115
2116 // Before restarting the server, report diagnostics with an unknown buffer version.
2117 let fake_server = fake_servers.next().await.unwrap();
2118 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2119 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2120 version: Some(10000),
2121 diagnostics: Vec::new(),
2122 });
2123 cx.executor().run_until_parked();
2124 project.update(cx, |project, cx| {
2125 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2126 });
2127
2128 let mut fake_server = fake_servers.next().await.unwrap();
2129 let notification = fake_server
2130 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2131 .await
2132 .text_document;
2133 assert_eq!(notification.version, 0);
2134}
2135
2136#[gpui::test]
2137async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2138 init_test(cx);
2139
2140 let progress_token = "the-progress-token";
2141
2142 let fs = FakeFs::new(cx.executor());
2143 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2144
2145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2146
2147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2148 language_registry.add(rust_lang());
2149 let mut fake_servers = language_registry.register_fake_lsp(
2150 "Rust",
2151 FakeLspAdapter {
2152 name: "the-language-server",
2153 disk_based_diagnostics_sources: vec!["disk".into()],
2154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2155 ..Default::default()
2156 },
2157 );
2158
2159 let (buffer, _handle) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Simulate diagnostics starting to update.
2167 let mut fake_server = fake_servers.next().await.unwrap();
2168 fake_server
2169 .start_progress_with(
2170 "another-token",
2171 lsp::WorkDoneProgressBegin {
2172 cancellable: Some(false),
2173 ..Default::default()
2174 },
2175 )
2176 .await;
2177 fake_server
2178 .start_progress_with(
2179 progress_token,
2180 lsp::WorkDoneProgressBegin {
2181 cancellable: Some(true),
2182 ..Default::default()
2183 },
2184 )
2185 .await;
2186 cx.executor().run_until_parked();
2187
2188 project.update(cx, |project, cx| {
2189 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2190 });
2191
2192 let cancel_notification = fake_server
2193 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2194 .await;
2195 assert_eq!(
2196 cancel_notification.token,
2197 NumberOrString::String(progress_token.into())
2198 );
2199}
2200
2201#[gpui::test]
2202async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2203 init_test(cx);
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2207 .await;
2208
2209 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2211
2212 let mut fake_rust_servers = language_registry.register_fake_lsp(
2213 "Rust",
2214 FakeLspAdapter {
2215 name: "rust-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 let mut fake_js_servers = language_registry.register_fake_lsp(
2220 "JavaScript",
2221 FakeLspAdapter {
2222 name: "js-lsp",
2223 ..Default::default()
2224 },
2225 );
2226 language_registry.add(rust_lang());
2227 language_registry.add(js_lang());
2228
2229 let _rs_buffer = project
2230 .update(cx, |project, cx| {
2231 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2232 })
2233 .await
2234 .unwrap();
2235 let _js_buffer = project
2236 .update(cx, |project, cx| {
2237 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2238 })
2239 .await
2240 .unwrap();
2241
2242 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2243 assert_eq!(
2244 fake_rust_server_1
2245 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2246 .await
2247 .text_document
2248 .uri
2249 .as_str(),
2250 uri!("file:///dir/a.rs")
2251 );
2252
2253 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2254 assert_eq!(
2255 fake_js_server
2256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2257 .await
2258 .text_document
2259 .uri
2260 .as_str(),
2261 uri!("file:///dir/b.js")
2262 );
2263
2264 // Disable Rust language server, ensuring only that server gets stopped.
2265 cx.update(|cx| {
2266 SettingsStore::update_global(cx, |settings, cx| {
2267 settings.update_user_settings(cx, |settings| {
2268 settings.languages_mut().insert(
2269 "Rust".into(),
2270 LanguageSettingsContent {
2271 enable_language_server: Some(false),
2272 ..Default::default()
2273 },
2274 );
2275 });
2276 })
2277 });
2278 fake_rust_server_1
2279 .receive_notification::<lsp::notification::Exit>()
2280 .await;
2281
2282 // Enable Rust and disable JavaScript language servers, ensuring that the
2283 // former gets started again and that the latter stops.
2284 cx.update(|cx| {
2285 SettingsStore::update_global(cx, |settings, cx| {
2286 settings.update_user_settings(cx, |settings| {
2287 settings.languages_mut().insert(
2288 "Rust".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(true),
2291 ..Default::default()
2292 },
2293 );
2294 settings.languages_mut().insert(
2295 "JavaScript".into(),
2296 LanguageSettingsContent {
2297 enable_language_server: Some(false),
2298 ..Default::default()
2299 },
2300 );
2301 });
2302 })
2303 });
2304 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2305 assert_eq!(
2306 fake_rust_server_2
2307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2308 .await
2309 .text_document
2310 .uri
2311 .as_str(),
2312 uri!("file:///dir/a.rs")
2313 );
2314 fake_js_server
2315 .receive_notification::<lsp::notification::Exit>()
2316 .await;
2317}
2318
2319#[gpui::test(iterations = 3)]
2320async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2321 init_test(cx);
2322
2323 let text = "
2324 fn a() { A }
2325 fn b() { BB }
2326 fn c() { CCC }
2327 "
2328 .unindent();
2329
2330 let fs = FakeFs::new(cx.executor());
2331 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2332
2333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2334 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2335
2336 language_registry.add(rust_lang());
2337 let mut fake_servers = language_registry.register_fake_lsp(
2338 "Rust",
2339 FakeLspAdapter {
2340 disk_based_diagnostics_sources: vec!["disk".into()],
2341 ..Default::default()
2342 },
2343 );
2344
2345 let buffer = project
2346 .update(cx, |project, cx| {
2347 project.open_local_buffer(path!("/dir/a.rs"), cx)
2348 })
2349 .await
2350 .unwrap();
2351
2352 let _handle = project.update(cx, |project, cx| {
2353 project.register_buffer_with_language_servers(&buffer, cx)
2354 });
2355
2356 let mut fake_server = fake_servers.next().await.unwrap();
2357 let open_notification = fake_server
2358 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2359 .await;
2360
2361 // Edit the buffer, moving the content down
2362 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2363 let change_notification_1 = fake_server
2364 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2365 .await;
2366 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2367
2368 // Report some diagnostics for the initial version of the buffer
2369 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2370 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2371 version: Some(open_notification.text_document.version),
2372 diagnostics: vec![
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'A'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 message: "undefined variable 'BB'".to_string(),
2384 source: Some("disk".to_string()),
2385 ..Default::default()
2386 },
2387 lsp::Diagnostic {
2388 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2389 severity: Some(DiagnosticSeverity::ERROR),
2390 source: Some("disk".to_string()),
2391 message: "undefined variable 'CCC'".to_string(),
2392 ..Default::default()
2393 },
2394 ],
2395 });
2396
2397 // The diagnostics have moved down since they were created.
2398 cx.executor().run_until_parked();
2399 buffer.update(cx, |buffer, _| {
2400 assert_eq!(
2401 buffer
2402 .snapshot()
2403 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2404 .collect::<Vec<_>>(),
2405 &[
2406 DiagnosticEntry {
2407 range: Point::new(3, 9)..Point::new(3, 11),
2408 diagnostic: Diagnostic {
2409 source: Some("disk".into()),
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "undefined variable 'BB'".to_string(),
2412 is_disk_based: true,
2413 group_id: 1,
2414 is_primary: true,
2415 source_kind: DiagnosticSourceKind::Pushed,
2416 ..Diagnostic::default()
2417 },
2418 },
2419 DiagnosticEntry {
2420 range: Point::new(4, 9)..Point::new(4, 12),
2421 diagnostic: Diagnostic {
2422 source: Some("disk".into()),
2423 severity: DiagnosticSeverity::ERROR,
2424 message: "undefined variable 'CCC'".to_string(),
2425 is_disk_based: true,
2426 group_id: 2,
2427 is_primary: true,
2428 source_kind: DiagnosticSourceKind::Pushed,
2429 ..Diagnostic::default()
2430 }
2431 }
2432 ]
2433 );
2434 assert_eq!(
2435 chunks_with_diagnostics(buffer, 0..buffer.len()),
2436 [
2437 ("\n\nfn a() { ".to_string(), None),
2438 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2439 (" }\nfn b() { ".to_string(), None),
2440 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2441 (" }\nfn c() { ".to_string(), None),
2442 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\n".to_string(), None),
2444 ]
2445 );
2446 assert_eq!(
2447 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2448 [
2449 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2450 (" }\nfn c() { ".to_string(), None),
2451 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2452 ]
2453 );
2454 });
2455
2456 // Ensure overlapping diagnostics are highlighted correctly.
2457 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2458 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2459 version: Some(open_notification.text_document.version),
2460 diagnostics: vec![
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2463 severity: Some(DiagnosticSeverity::ERROR),
2464 message: "undefined variable 'A'".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 lsp::Diagnostic {
2469 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2470 severity: Some(DiagnosticSeverity::WARNING),
2471 message: "unreachable statement".to_string(),
2472 source: Some("disk".to_string()),
2473 ..Default::default()
2474 },
2475 ],
2476 });
2477
2478 cx.executor().run_until_parked();
2479 buffer.update(cx, |buffer, _| {
2480 assert_eq!(
2481 buffer
2482 .snapshot()
2483 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2484 .collect::<Vec<_>>(),
2485 &[
2486 DiagnosticEntry {
2487 range: Point::new(2, 9)..Point::new(2, 12),
2488 diagnostic: Diagnostic {
2489 source: Some("disk".into()),
2490 severity: DiagnosticSeverity::WARNING,
2491 message: "unreachable statement".to_string(),
2492 is_disk_based: true,
2493 group_id: 4,
2494 is_primary: true,
2495 source_kind: DiagnosticSourceKind::Pushed,
2496 ..Diagnostic::default()
2497 }
2498 },
2499 DiagnosticEntry {
2500 range: Point::new(2, 9)..Point::new(2, 10),
2501 diagnostic: Diagnostic {
2502 source: Some("disk".into()),
2503 severity: DiagnosticSeverity::ERROR,
2504 message: "undefined variable 'A'".to_string(),
2505 is_disk_based: true,
2506 group_id: 3,
2507 is_primary: true,
2508 source_kind: DiagnosticSourceKind::Pushed,
2509 ..Diagnostic::default()
2510 },
2511 }
2512 ]
2513 );
2514 assert_eq!(
2515 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2516 [
2517 ("fn a() { ".to_string(), None),
2518 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 assert_eq!(
2524 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2525 [
2526 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2527 ("\n".to_string(), None),
2528 ]
2529 );
2530 });
2531
2532 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2533 // changes since the last save.
2534 buffer.update(cx, |buffer, cx| {
2535 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2536 buffer.edit(
2537 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2538 None,
2539 cx,
2540 );
2541 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2542 });
2543 let change_notification_2 = fake_server
2544 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2545 .await;
2546 assert!(
2547 change_notification_2.text_document.version > change_notification_1.text_document.version
2548 );
2549
2550 // Handle out-of-order diagnostics
2551 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2552 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2553 version: Some(change_notification_2.text_document.version),
2554 diagnostics: vec![
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2557 severity: Some(DiagnosticSeverity::ERROR),
2558 message: "undefined variable 'BB'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 lsp::Diagnostic {
2563 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2564 severity: Some(DiagnosticSeverity::WARNING),
2565 message: "undefined variable 'A'".to_string(),
2566 source: Some("disk".to_string()),
2567 ..Default::default()
2568 },
2569 ],
2570 });
2571
2572 cx.executor().run_until_parked();
2573 buffer.update(cx, |buffer, _| {
2574 assert_eq!(
2575 buffer
2576 .snapshot()
2577 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2578 .collect::<Vec<_>>(),
2579 &[
2580 DiagnosticEntry {
2581 range: Point::new(2, 21)..Point::new(2, 22),
2582 diagnostic: Diagnostic {
2583 source: Some("disk".into()),
2584 severity: DiagnosticSeverity::WARNING,
2585 message: "undefined variable 'A'".to_string(),
2586 is_disk_based: true,
2587 group_id: 6,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 }
2592 },
2593 DiagnosticEntry {
2594 range: Point::new(3, 9)..Point::new(3, 14),
2595 diagnostic: Diagnostic {
2596 source: Some("disk".into()),
2597 severity: DiagnosticSeverity::ERROR,
2598 message: "undefined variable 'BB'".to_string(),
2599 is_disk_based: true,
2600 group_id: 5,
2601 is_primary: true,
2602 source_kind: DiagnosticSourceKind::Pushed,
2603 ..Diagnostic::default()
2604 },
2605 }
2606 ]
2607 );
2608 });
2609}
2610
2611#[gpui::test]
2612async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2613 init_test(cx);
2614
2615 let text = concat!(
2616 "let one = ;\n", //
2617 "let two = \n",
2618 "let three = 3;\n",
2619 );
2620
2621 let fs = FakeFs::new(cx.executor());
2622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2623
2624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2625 let buffer = project
2626 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2627 .await
2628 .unwrap();
2629
2630 project.update(cx, |project, cx| {
2631 project.lsp_store.update(cx, |lsp_store, cx| {
2632 lsp_store
2633 .update_diagnostic_entries(
2634 LanguageServerId(0),
2635 PathBuf::from("/dir/a.rs"),
2636 None,
2637 None,
2638 vec![
2639 DiagnosticEntry {
2640 range: Unclipped(PointUtf16::new(0, 10))
2641 ..Unclipped(PointUtf16::new(0, 10)),
2642 diagnostic: Diagnostic {
2643 severity: DiagnosticSeverity::ERROR,
2644 message: "syntax error 1".to_string(),
2645 source_kind: DiagnosticSourceKind::Pushed,
2646 ..Diagnostic::default()
2647 },
2648 },
2649 DiagnosticEntry {
2650 range: Unclipped(PointUtf16::new(1, 10))
2651 ..Unclipped(PointUtf16::new(1, 10)),
2652 diagnostic: Diagnostic {
2653 severity: DiagnosticSeverity::ERROR,
2654 message: "syntax error 2".to_string(),
2655 source_kind: DiagnosticSourceKind::Pushed,
2656 ..Diagnostic::default()
2657 },
2658 },
2659 ],
2660 cx,
2661 )
2662 .unwrap();
2663 })
2664 });
2665
2666 // An empty range is extended forward to include the following character.
2667 // At the end of a line, an empty range is extended backward to include
2668 // the preceding character.
2669 buffer.update(cx, |buffer, _| {
2670 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2671 assert_eq!(
2672 chunks
2673 .iter()
2674 .map(|(s, d)| (s.as_str(), *d))
2675 .collect::<Vec<_>>(),
2676 &[
2677 ("let one = ", None),
2678 (";", Some(DiagnosticSeverity::ERROR)),
2679 ("\nlet two =", None),
2680 (" ", Some(DiagnosticSeverity::ERROR)),
2681 ("\nlet three = 3;\n", None)
2682 ]
2683 );
2684 });
2685}
2686
2687#[gpui::test]
2688async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2689 init_test(cx);
2690
2691 let fs = FakeFs::new(cx.executor());
2692 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2693 .await;
2694
2695 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2696 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2697
2698 lsp_store.update(cx, |lsp_store, cx| {
2699 lsp_store
2700 .update_diagnostic_entries(
2701 LanguageServerId(0),
2702 Path::new("/dir/a.rs").to_owned(),
2703 None,
2704 None,
2705 vec![DiagnosticEntry {
2706 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2707 diagnostic: Diagnostic {
2708 severity: DiagnosticSeverity::ERROR,
2709 is_primary: true,
2710 message: "syntax error a1".to_string(),
2711 source_kind: DiagnosticSourceKind::Pushed,
2712 ..Diagnostic::default()
2713 },
2714 }],
2715 cx,
2716 )
2717 .unwrap();
2718 lsp_store
2719 .update_diagnostic_entries(
2720 LanguageServerId(1),
2721 Path::new("/dir/a.rs").to_owned(),
2722 None,
2723 None,
2724 vec![DiagnosticEntry {
2725 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2726 diagnostic: Diagnostic {
2727 severity: DiagnosticSeverity::ERROR,
2728 is_primary: true,
2729 message: "syntax error b1".to_string(),
2730 source_kind: DiagnosticSourceKind::Pushed,
2731 ..Diagnostic::default()
2732 },
2733 }],
2734 cx,
2735 )
2736 .unwrap();
2737
2738 assert_eq!(
2739 lsp_store.diagnostic_summary(false, cx),
2740 DiagnosticSummary {
2741 error_count: 2,
2742 warning_count: 0,
2743 }
2744 );
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2750 init_test(cx);
2751
2752 let text = "
2753 fn a() {
2754 f1();
2755 }
2756 fn b() {
2757 f2();
2758 }
2759 fn c() {
2760 f3();
2761 }
2762 "
2763 .unindent();
2764
2765 let fs = FakeFs::new(cx.executor());
2766 fs.insert_tree(
2767 path!("/dir"),
2768 json!({
2769 "a.rs": text.clone(),
2770 }),
2771 )
2772 .await;
2773
2774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2775 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2776
2777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2778 language_registry.add(rust_lang());
2779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2780
2781 let (buffer, _handle) = project
2782 .update(cx, |project, cx| {
2783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2784 })
2785 .await
2786 .unwrap();
2787
2788 let mut fake_server = fake_servers.next().await.unwrap();
2789 let lsp_document_version = fake_server
2790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2791 .await
2792 .text_document
2793 .version;
2794
2795 // Simulate editing the buffer after the language server computes some edits.
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit(
2798 [(
2799 Point::new(0, 0)..Point::new(0, 0),
2800 "// above first function\n",
2801 )],
2802 None,
2803 cx,
2804 );
2805 buffer.edit(
2806 [(
2807 Point::new(2, 0)..Point::new(2, 0),
2808 " // inside first function\n",
2809 )],
2810 None,
2811 cx,
2812 );
2813 buffer.edit(
2814 [(
2815 Point::new(6, 4)..Point::new(6, 4),
2816 "// inside second function ",
2817 )],
2818 None,
2819 cx,
2820 );
2821
2822 assert_eq!(
2823 buffer.text(),
2824 "
2825 // above first function
2826 fn a() {
2827 // inside first function
2828 f1();
2829 }
2830 fn b() {
2831 // inside second function f2();
2832 }
2833 fn c() {
2834 f3();
2835 }
2836 "
2837 .unindent()
2838 );
2839 });
2840
2841 let edits = lsp_store
2842 .update(cx, |lsp_store, cx| {
2843 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2844 &buffer,
2845 vec![
2846 // replace body of first function
2847 lsp::TextEdit {
2848 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2849 new_text: "
2850 fn a() {
2851 f10();
2852 }
2853 "
2854 .unindent(),
2855 },
2856 // edit inside second function
2857 lsp::TextEdit {
2858 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2859 new_text: "00".into(),
2860 },
2861 // edit inside third function via two distinct edits
2862 lsp::TextEdit {
2863 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2864 new_text: "4000".into(),
2865 },
2866 lsp::TextEdit {
2867 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2868 new_text: "".into(),
2869 },
2870 ],
2871 LanguageServerId(0),
2872 Some(lsp_document_version),
2873 cx,
2874 )
2875 })
2876 .await
2877 .unwrap();
2878
2879 buffer.update(cx, |buffer, cx| {
2880 for (range, new_text) in edits {
2881 buffer.edit([(range, new_text)], None, cx);
2882 }
2883 assert_eq!(
2884 buffer.text(),
2885 "
2886 // above first function
2887 fn a() {
2888 // inside first function
2889 f10();
2890 }
2891 fn b() {
2892 // inside second function f200();
2893 }
2894 fn c() {
2895 f4000();
2896 }
2897 "
2898 .unindent()
2899 );
2900 });
2901}
2902
2903#[gpui::test]
2904async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2905 init_test(cx);
2906
2907 let text = "
2908 use a::b;
2909 use a::c;
2910
2911 fn f() {
2912 b();
2913 c();
2914 }
2915 "
2916 .unindent();
2917
2918 let fs = FakeFs::new(cx.executor());
2919 fs.insert_tree(
2920 path!("/dir"),
2921 json!({
2922 "a.rs": text.clone(),
2923 }),
2924 )
2925 .await;
2926
2927 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2928 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2929 let buffer = project
2930 .update(cx, |project, cx| {
2931 project.open_local_buffer(path!("/dir/a.rs"), cx)
2932 })
2933 .await
2934 .unwrap();
2935
2936 // Simulate the language server sending us a small edit in the form of a very large diff.
2937 // Rust-analyzer does this when performing a merge-imports code action.
2938 let edits = lsp_store
2939 .update(cx, |lsp_store, cx| {
2940 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2941 &buffer,
2942 [
2943 // Replace the first use statement without editing the semicolon.
2944 lsp::TextEdit {
2945 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2946 new_text: "a::{b, c}".into(),
2947 },
2948 // Reinsert the remainder of the file between the semicolon and the final
2949 // newline of the file.
2950 lsp::TextEdit {
2951 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2952 new_text: "\n\n".into(),
2953 },
2954 lsp::TextEdit {
2955 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2956 new_text: "
2957 fn f() {
2958 b();
2959 c();
2960 }"
2961 .unindent(),
2962 },
2963 // Delete everything after the first newline of the file.
2964 lsp::TextEdit {
2965 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2966 new_text: "".into(),
2967 },
2968 ],
2969 LanguageServerId(0),
2970 None,
2971 cx,
2972 )
2973 })
2974 .await
2975 .unwrap();
2976
2977 buffer.update(cx, |buffer, cx| {
2978 let edits = edits
2979 .into_iter()
2980 .map(|(range, text)| {
2981 (
2982 range.start.to_point(buffer)..range.end.to_point(buffer),
2983 text,
2984 )
2985 })
2986 .collect::<Vec<_>>();
2987
2988 assert_eq!(
2989 edits,
2990 [
2991 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2992 (Point::new(1, 0)..Point::new(2, 0), "".into())
2993 ]
2994 );
2995
2996 for (range, new_text) in edits {
2997 buffer.edit([(range, new_text)], None, cx);
2998 }
2999 assert_eq!(
3000 buffer.text(),
3001 "
3002 use a::{b, c};
3003
3004 fn f() {
3005 b();
3006 c();
3007 }
3008 "
3009 .unindent()
3010 );
3011 });
3012}
3013
3014#[gpui::test]
3015async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3016 cx: &mut gpui::TestAppContext,
3017) {
3018 init_test(cx);
3019
3020 let text = "Path()";
3021
3022 let fs = FakeFs::new(cx.executor());
3023 fs.insert_tree(
3024 path!("/dir"),
3025 json!({
3026 "a.rs": text
3027 }),
3028 )
3029 .await;
3030
3031 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3032 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3033 let buffer = project
3034 .update(cx, |project, cx| {
3035 project.open_local_buffer(path!("/dir/a.rs"), cx)
3036 })
3037 .await
3038 .unwrap();
3039
3040 // Simulate the language server sending us a pair of edits at the same location,
3041 // with an insertion following a replacement (which violates the LSP spec).
3042 let edits = lsp_store
3043 .update(cx, |lsp_store, cx| {
3044 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3045 &buffer,
3046 [
3047 lsp::TextEdit {
3048 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3049 new_text: "Path".into(),
3050 },
3051 lsp::TextEdit {
3052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3053 new_text: "from path import Path\n\n\n".into(),
3054 },
3055 ],
3056 LanguageServerId(0),
3057 None,
3058 cx,
3059 )
3060 })
3061 .await
3062 .unwrap();
3063
3064 buffer.update(cx, |buffer, cx| {
3065 buffer.edit(edits, None, cx);
3066 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3067 });
3068}
3069
3070#[gpui::test]
3071async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let text = "
3075 use a::b;
3076 use a::c;
3077
3078 fn f() {
3079 b();
3080 c();
3081 }
3082 "
3083 .unindent();
3084
3085 let fs = FakeFs::new(cx.executor());
3086 fs.insert_tree(
3087 path!("/dir"),
3088 json!({
3089 "a.rs": text.clone(),
3090 }),
3091 )
3092 .await;
3093
3094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3095 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3096 let buffer = project
3097 .update(cx, |project, cx| {
3098 project.open_local_buffer(path!("/dir/a.rs"), cx)
3099 })
3100 .await
3101 .unwrap();
3102
3103 // Simulate the language server sending us edits in a non-ordered fashion,
3104 // with ranges sometimes being inverted or pointing to invalid locations.
3105 let edits = lsp_store
3106 .update(cx, |lsp_store, cx| {
3107 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3108 &buffer,
3109 [
3110 lsp::TextEdit {
3111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3112 new_text: "\n\n".into(),
3113 },
3114 lsp::TextEdit {
3115 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3116 new_text: "a::{b, c}".into(),
3117 },
3118 lsp::TextEdit {
3119 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3120 new_text: "".into(),
3121 },
3122 lsp::TextEdit {
3123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3124 new_text: "
3125 fn f() {
3126 b();
3127 c();
3128 }"
3129 .unindent(),
3130 },
3131 ],
3132 LanguageServerId(0),
3133 None,
3134 cx,
3135 )
3136 })
3137 .await
3138 .unwrap();
3139
3140 buffer.update(cx, |buffer, cx| {
3141 let edits = edits
3142 .into_iter()
3143 .map(|(range, text)| {
3144 (
3145 range.start.to_point(buffer)..range.end.to_point(buffer),
3146 text,
3147 )
3148 })
3149 .collect::<Vec<_>>();
3150
3151 assert_eq!(
3152 edits,
3153 [
3154 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3155 (Point::new(1, 0)..Point::new(2, 0), "".into())
3156 ]
3157 );
3158
3159 for (range, new_text) in edits {
3160 buffer.edit([(range, new_text)], None, cx);
3161 }
3162 assert_eq!(
3163 buffer.text(),
3164 "
3165 use a::{b, c};
3166
3167 fn f() {
3168 b();
3169 c();
3170 }
3171 "
3172 .unindent()
3173 );
3174 });
3175}
3176
3177fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3178 buffer: &Buffer,
3179 range: Range<T>,
3180) -> Vec<(String, Option<DiagnosticSeverity>)> {
3181 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3182 for chunk in buffer.snapshot().chunks(range, true) {
3183 if chunks
3184 .last()
3185 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3186 {
3187 chunks.last_mut().unwrap().0.push_str(chunk.text);
3188 } else {
3189 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3190 }
3191 }
3192 chunks
3193}
3194
3195#[gpui::test(iterations = 10)]
3196async fn test_definition(cx: &mut gpui::TestAppContext) {
3197 init_test(cx);
3198
3199 let fs = FakeFs::new(cx.executor());
3200 fs.insert_tree(
3201 path!("/dir"),
3202 json!({
3203 "a.rs": "const fn a() { A }",
3204 "b.rs": "const y: i32 = crate::a()",
3205 }),
3206 )
3207 .await;
3208
3209 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3210
3211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3212 language_registry.add(rust_lang());
3213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3214
3215 let (buffer, _handle) = project
3216 .update(cx, |project, cx| {
3217 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3218 })
3219 .await
3220 .unwrap();
3221
3222 let fake_server = fake_servers.next().await.unwrap();
3223 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3224 let params = params.text_document_position_params;
3225 assert_eq!(
3226 params.text_document.uri.to_file_path().unwrap(),
3227 Path::new(path!("/dir/b.rs")),
3228 );
3229 assert_eq!(params.position, lsp::Position::new(0, 22));
3230
3231 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3232 lsp::Location::new(
3233 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3235 ),
3236 )))
3237 });
3238 let mut definitions = project
3239 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3240 .await
3241 .unwrap()
3242 .unwrap();
3243
3244 // Assert no new language server started
3245 cx.executor().run_until_parked();
3246 assert!(fake_servers.try_next().is_err());
3247
3248 assert_eq!(definitions.len(), 1);
3249 let definition = definitions.pop().unwrap();
3250 cx.update(|cx| {
3251 let target_buffer = definition.target.buffer.read(cx);
3252 assert_eq!(
3253 target_buffer
3254 .file()
3255 .unwrap()
3256 .as_local()
3257 .unwrap()
3258 .abs_path(cx),
3259 Path::new(path!("/dir/a.rs")),
3260 );
3261 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3262 assert_eq!(
3263 list_worktrees(&project, cx),
3264 [
3265 (path!("/dir/a.rs").as_ref(), false),
3266 (path!("/dir/b.rs").as_ref(), true)
3267 ],
3268 );
3269
3270 drop(definition);
3271 });
3272 cx.update(|cx| {
3273 assert_eq!(
3274 list_worktrees(&project, cx),
3275 [(path!("/dir/b.rs").as_ref(), true)]
3276 );
3277 });
3278
3279 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3280 project
3281 .read(cx)
3282 .worktrees(cx)
3283 .map(|worktree| {
3284 let worktree = worktree.read(cx);
3285 (
3286 worktree.as_local().unwrap().abs_path().as_ref(),
3287 worktree.is_visible(),
3288 )
3289 })
3290 .collect::<Vec<_>>()
3291 }
3292}
3293
3294#[gpui::test]
3295async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3296 init_test(cx);
3297
3298 let fs = FakeFs::new(cx.executor());
3299 fs.insert_tree(
3300 path!("/dir"),
3301 json!({
3302 "a.ts": "",
3303 }),
3304 )
3305 .await;
3306
3307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3308
3309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3310 language_registry.add(typescript_lang());
3311 let mut fake_language_servers = language_registry.register_fake_lsp(
3312 "TypeScript",
3313 FakeLspAdapter {
3314 capabilities: lsp::ServerCapabilities {
3315 completion_provider: Some(lsp::CompletionOptions {
3316 trigger_characters: Some(vec![".".to_string()]),
3317 ..Default::default()
3318 }),
3319 ..Default::default()
3320 },
3321 ..Default::default()
3322 },
3323 );
3324
3325 let (buffer, _handle) = project
3326 .update(cx, |p, cx| {
3327 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3328 })
3329 .await
3330 .unwrap();
3331
3332 let fake_server = fake_language_servers.next().await.unwrap();
3333
3334 // When text_edit exists, it takes precedence over insert_text and label
3335 let text = "let a = obj.fqn";
3336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3337 let completions = project.update(cx, |project, cx| {
3338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3339 });
3340
3341 fake_server
3342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3343 Ok(Some(lsp::CompletionResponse::Array(vec![
3344 lsp::CompletionItem {
3345 label: "labelText".into(),
3346 insert_text: Some("insertText".into()),
3347 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3348 range: lsp::Range::new(
3349 lsp::Position::new(0, text.len() as u32 - 3),
3350 lsp::Position::new(0, text.len() as u32),
3351 ),
3352 new_text: "textEditText".into(),
3353 })),
3354 ..Default::default()
3355 },
3356 ])))
3357 })
3358 .next()
3359 .await;
3360
3361 let completions = completions
3362 .await
3363 .unwrap()
3364 .into_iter()
3365 .flat_map(|response| response.completions)
3366 .collect::<Vec<_>>();
3367 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3368
3369 assert_eq!(completions.len(), 1);
3370 assert_eq!(completions[0].new_text, "textEditText");
3371 assert_eq!(
3372 completions[0].replace_range.to_offset(&snapshot),
3373 text.len() - 3..text.len()
3374 );
3375}
3376
3377#[gpui::test]
3378async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3379 init_test(cx);
3380
3381 let fs = FakeFs::new(cx.executor());
3382 fs.insert_tree(
3383 path!("/dir"),
3384 json!({
3385 "a.ts": "",
3386 }),
3387 )
3388 .await;
3389
3390 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3391
3392 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3393 language_registry.add(typescript_lang());
3394 let mut fake_language_servers = language_registry.register_fake_lsp(
3395 "TypeScript",
3396 FakeLspAdapter {
3397 capabilities: lsp::ServerCapabilities {
3398 completion_provider: Some(lsp::CompletionOptions {
3399 trigger_characters: Some(vec![".".to_string()]),
3400 ..Default::default()
3401 }),
3402 ..Default::default()
3403 },
3404 ..Default::default()
3405 },
3406 );
3407
3408 let (buffer, _handle) = project
3409 .update(cx, |p, cx| {
3410 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3411 })
3412 .await
3413 .unwrap();
3414
3415 let fake_server = fake_language_servers.next().await.unwrap();
3416 let text = "let a = obj.fqn";
3417
3418 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3419 {
3420 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3421 let completions = project.update(cx, |project, cx| {
3422 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3423 });
3424
3425 fake_server
3426 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3427 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3428 is_incomplete: false,
3429 item_defaults: Some(lsp::CompletionListItemDefaults {
3430 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3431 lsp::Range::new(
3432 lsp::Position::new(0, text.len() as u32 - 3),
3433 lsp::Position::new(0, text.len() as u32),
3434 ),
3435 )),
3436 ..Default::default()
3437 }),
3438 items: vec![lsp::CompletionItem {
3439 label: "labelText".into(),
3440 text_edit_text: Some("textEditText".into()),
3441 text_edit: None,
3442 ..Default::default()
3443 }],
3444 })))
3445 })
3446 .next()
3447 .await;
3448
3449 let completions = completions
3450 .await
3451 .unwrap()
3452 .into_iter()
3453 .flat_map(|response| response.completions)
3454 .collect::<Vec<_>>();
3455 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3456
3457 assert_eq!(completions.len(), 1);
3458 assert_eq!(completions[0].new_text, "textEditText");
3459 assert_eq!(
3460 completions[0].replace_range.to_offset(&snapshot),
3461 text.len() - 3..text.len()
3462 );
3463 }
3464
3465 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3466 {
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3475 is_incomplete: false,
3476 item_defaults: Some(lsp::CompletionListItemDefaults {
3477 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3478 lsp::Range::new(
3479 lsp::Position::new(0, text.len() as u32 - 3),
3480 lsp::Position::new(0, text.len() as u32),
3481 ),
3482 )),
3483 ..Default::default()
3484 }),
3485 items: vec![lsp::CompletionItem {
3486 label: "labelText".into(),
3487 text_edit_text: None,
3488 insert_text: Some("irrelevant".into()),
3489 text_edit: None,
3490 ..Default::default()
3491 }],
3492 })))
3493 })
3494 .next()
3495 .await;
3496
3497 let completions = completions
3498 .await
3499 .unwrap()
3500 .into_iter()
3501 .flat_map(|response| response.completions)
3502 .collect::<Vec<_>>();
3503 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3504
3505 assert_eq!(completions.len(), 1);
3506 assert_eq!(completions[0].new_text, "labelText");
3507 assert_eq!(
3508 completions[0].replace_range.to_offset(&snapshot),
3509 text.len() - 3..text.len()
3510 );
3511 }
3512}
3513
3514#[gpui::test]
3515async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let fs = FakeFs::new(cx.executor());
3519 fs.insert_tree(
3520 path!("/dir"),
3521 json!({
3522 "a.ts": "",
3523 }),
3524 )
3525 .await;
3526
3527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3528
3529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3530 language_registry.add(typescript_lang());
3531 let mut fake_language_servers = language_registry.register_fake_lsp(
3532 "TypeScript",
3533 FakeLspAdapter {
3534 capabilities: lsp::ServerCapabilities {
3535 completion_provider: Some(lsp::CompletionOptions {
3536 trigger_characters: Some(vec![":".to_string()]),
3537 ..Default::default()
3538 }),
3539 ..Default::default()
3540 },
3541 ..Default::default()
3542 },
3543 );
3544
3545 let (buffer, _handle) = project
3546 .update(cx, |p, cx| {
3547 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3548 })
3549 .await
3550 .unwrap();
3551
3552 let fake_server = fake_language_servers.next().await.unwrap();
3553
3554 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3555 let text = "let a = b.fqn";
3556 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3557 let completions = project.update(cx, |project, cx| {
3558 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3559 });
3560
3561 fake_server
3562 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3563 Ok(Some(lsp::CompletionResponse::Array(vec![
3564 lsp::CompletionItem {
3565 label: "fullyQualifiedName?".into(),
3566 insert_text: Some("fullyQualifiedName".into()),
3567 ..Default::default()
3568 },
3569 ])))
3570 })
3571 .next()
3572 .await;
3573 let completions = completions
3574 .await
3575 .unwrap()
3576 .into_iter()
3577 .flat_map(|response| response.completions)
3578 .collect::<Vec<_>>();
3579 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3580 assert_eq!(completions.len(), 1);
3581 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3582 assert_eq!(
3583 completions[0].replace_range.to_offset(&snapshot),
3584 text.len() - 3..text.len()
3585 );
3586
3587 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3588 let text = "let a = \"atoms/cmp\"";
3589 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3590 let completions = project.update(cx, |project, cx| {
3591 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3592 });
3593
3594 fake_server
3595 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3596 Ok(Some(lsp::CompletionResponse::Array(vec![
3597 lsp::CompletionItem {
3598 label: "component".into(),
3599 ..Default::default()
3600 },
3601 ])))
3602 })
3603 .next()
3604 .await;
3605 let completions = completions
3606 .await
3607 .unwrap()
3608 .into_iter()
3609 .flat_map(|response| response.completions)
3610 .collect::<Vec<_>>();
3611 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3612 assert_eq!(completions.len(), 1);
3613 assert_eq!(completions[0].new_text, "component");
3614 assert_eq!(
3615 completions[0].replace_range.to_offset(&snapshot),
3616 text.len() - 4..text.len() - 1
3617 );
3618}
3619
3620#[gpui::test]
3621async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3622 init_test(cx);
3623
3624 let fs = FakeFs::new(cx.executor());
3625 fs.insert_tree(
3626 path!("/dir"),
3627 json!({
3628 "a.ts": "",
3629 }),
3630 )
3631 .await;
3632
3633 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3634
3635 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3636 language_registry.add(typescript_lang());
3637 let mut fake_language_servers = language_registry.register_fake_lsp(
3638 "TypeScript",
3639 FakeLspAdapter {
3640 capabilities: lsp::ServerCapabilities {
3641 completion_provider: Some(lsp::CompletionOptions {
3642 trigger_characters: Some(vec![":".to_string()]),
3643 ..Default::default()
3644 }),
3645 ..Default::default()
3646 },
3647 ..Default::default()
3648 },
3649 );
3650
3651 let (buffer, _handle) = project
3652 .update(cx, |p, cx| {
3653 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3654 })
3655 .await
3656 .unwrap();
3657
3658 let fake_server = fake_language_servers.next().await.unwrap();
3659
3660 let text = "let a = b.fqn";
3661 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3662 let completions = project.update(cx, |project, cx| {
3663 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3664 });
3665
3666 fake_server
3667 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3668 Ok(Some(lsp::CompletionResponse::Array(vec![
3669 lsp::CompletionItem {
3670 label: "fullyQualifiedName?".into(),
3671 insert_text: Some("fully\rQualified\r\nName".into()),
3672 ..Default::default()
3673 },
3674 ])))
3675 })
3676 .next()
3677 .await;
3678 let completions = completions
3679 .await
3680 .unwrap()
3681 .into_iter()
3682 .flat_map(|response| response.completions)
3683 .collect::<Vec<_>>();
3684 assert_eq!(completions.len(), 1);
3685 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3686}
3687
3688#[gpui::test(iterations = 10)]
3689async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3690 init_test(cx);
3691
3692 let fs = FakeFs::new(cx.executor());
3693 fs.insert_tree(
3694 path!("/dir"),
3695 json!({
3696 "a.ts": "a",
3697 }),
3698 )
3699 .await;
3700
3701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3702
3703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3704 language_registry.add(typescript_lang());
3705 let mut fake_language_servers = language_registry.register_fake_lsp(
3706 "TypeScript",
3707 FakeLspAdapter {
3708 capabilities: lsp::ServerCapabilities {
3709 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3710 lsp::CodeActionOptions {
3711 resolve_provider: Some(true),
3712 ..lsp::CodeActionOptions::default()
3713 },
3714 )),
3715 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3716 commands: vec!["_the/command".to_string()],
3717 ..lsp::ExecuteCommandOptions::default()
3718 }),
3719 ..lsp::ServerCapabilities::default()
3720 },
3721 ..FakeLspAdapter::default()
3722 },
3723 );
3724
3725 let (buffer, _handle) = project
3726 .update(cx, |p, cx| {
3727 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3728 })
3729 .await
3730 .unwrap();
3731
3732 let fake_server = fake_language_servers.next().await.unwrap();
3733
3734 // Language server returns code actions that contain commands, and not edits.
3735 let actions = project.update(cx, |project, cx| {
3736 project.code_actions(&buffer, 0..0, None, cx)
3737 });
3738 fake_server
3739 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3740 Ok(Some(vec![
3741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3742 title: "The code action".into(),
3743 data: Some(serde_json::json!({
3744 "command": "_the/command",
3745 })),
3746 ..lsp::CodeAction::default()
3747 }),
3748 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3749 title: "two".into(),
3750 ..lsp::CodeAction::default()
3751 }),
3752 ]))
3753 })
3754 .next()
3755 .await;
3756
3757 let action = actions.await.unwrap().unwrap()[0].clone();
3758 let apply = project.update(cx, |project, cx| {
3759 project.apply_code_action(buffer.clone(), action, true, cx)
3760 });
3761
3762 // Resolving the code action does not populate its edits. In absence of
3763 // edits, we must execute the given command.
3764 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3765 |mut action, _| async move {
3766 if action.data.is_some() {
3767 action.command = Some(lsp::Command {
3768 title: "The command".into(),
3769 command: "_the/command".into(),
3770 arguments: Some(vec![json!("the-argument")]),
3771 });
3772 }
3773 Ok(action)
3774 },
3775 );
3776
3777 // While executing the command, the language server sends the editor
3778 // a `workspaceEdit` request.
3779 fake_server
3780 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3781 let fake = fake_server.clone();
3782 move |params, _| {
3783 assert_eq!(params.command, "_the/command");
3784 let fake = fake.clone();
3785 async move {
3786 fake.server
3787 .request::<lsp::request::ApplyWorkspaceEdit>(
3788 lsp::ApplyWorkspaceEditParams {
3789 label: None,
3790 edit: lsp::WorkspaceEdit {
3791 changes: Some(
3792 [(
3793 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3794 vec![lsp::TextEdit {
3795 range: lsp::Range::new(
3796 lsp::Position::new(0, 0),
3797 lsp::Position::new(0, 0),
3798 ),
3799 new_text: "X".into(),
3800 }],
3801 )]
3802 .into_iter()
3803 .collect(),
3804 ),
3805 ..Default::default()
3806 },
3807 },
3808 )
3809 .await
3810 .into_response()
3811 .unwrap();
3812 Ok(Some(json!(null)))
3813 }
3814 }
3815 })
3816 .next()
3817 .await;
3818
3819 // Applying the code action returns a project transaction containing the edits
3820 // sent by the language server in its `workspaceEdit` request.
3821 let transaction = apply.await.unwrap();
3822 assert!(transaction.0.contains_key(&buffer));
3823 buffer.update(cx, |buffer, cx| {
3824 assert_eq!(buffer.text(), "Xa");
3825 buffer.undo(cx);
3826 assert_eq!(buffer.text(), "a");
3827 });
3828}
3829
3830#[gpui::test]
3831async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833 let fs = FakeFs::new(cx.background_executor.clone());
3834 let expected_contents = "content";
3835 fs.as_fake()
3836 .insert_tree(
3837 "/root",
3838 json!({
3839 "test.txt": expected_contents
3840 }),
3841 )
3842 .await;
3843
3844 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3845
3846 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3847 let worktree = project.worktrees(cx).next().unwrap();
3848 let entry_id = worktree
3849 .read(cx)
3850 .entry_for_path(rel_path("test.txt"))
3851 .unwrap()
3852 .id;
3853 (worktree, entry_id)
3854 });
3855 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3856 let _result = project
3857 .update(cx, |project, cx| {
3858 project.rename_entry(
3859 entry_id,
3860 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3861 cx,
3862 )
3863 })
3864 .await
3865 .unwrap();
3866 worktree.read_with(cx, |worktree, _| {
3867 assert!(
3868 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3869 "Old file should have been removed"
3870 );
3871 assert!(
3872 worktree
3873 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3874 .is_some(),
3875 "Whole directory hierarchy and the new file should have been created"
3876 );
3877 });
3878 assert_eq!(
3879 worktree
3880 .update(cx, |worktree, cx| {
3881 worktree.load_file(
3882 rel_path("dir1/dir2/dir3/test.txt"),
3883 &Default::default(),
3884 None,
3885 cx,
3886 )
3887 })
3888 .await
3889 .unwrap()
3890 .text,
3891 expected_contents,
3892 "Moved file's contents should be preserved"
3893 );
3894
3895 let entry_id = worktree.read_with(cx, |worktree, _| {
3896 worktree
3897 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3898 .unwrap()
3899 .id
3900 });
3901
3902 let _result = project
3903 .update(cx, |project, cx| {
3904 project.rename_entry(
3905 entry_id,
3906 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3907 cx,
3908 )
3909 })
3910 .await
3911 .unwrap();
3912 worktree.read_with(cx, |worktree, _| {
3913 assert!(
3914 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3915 "First file should not reappear"
3916 );
3917 assert!(
3918 worktree
3919 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3920 .is_none(),
3921 "Old file should have been removed"
3922 );
3923 assert!(
3924 worktree
3925 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3926 .is_some(),
3927 "No error should have occurred after moving into existing directory"
3928 );
3929 });
3930 assert_eq!(
3931 worktree
3932 .update(cx, |worktree, cx| {
3933 worktree.load_file(
3934 rel_path("dir1/dir2/test.txt"),
3935 &Default::default(),
3936 None,
3937 cx,
3938 )
3939 })
3940 .await
3941 .unwrap()
3942 .text,
3943 expected_contents,
3944 "Moved file's contents should be preserved"
3945 );
3946}
3947
3948#[gpui::test(iterations = 10)]
3949async fn test_save_file(cx: &mut gpui::TestAppContext) {
3950 init_test(cx);
3951
3952 let fs = FakeFs::new(cx.executor());
3953 fs.insert_tree(
3954 path!("/dir"),
3955 json!({
3956 "file1": "the old contents",
3957 }),
3958 )
3959 .await;
3960
3961 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3962 let buffer = project
3963 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3964 .await
3965 .unwrap();
3966 buffer.update(cx, |buffer, cx| {
3967 assert_eq!(buffer.text(), "the old contents");
3968 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3969 });
3970
3971 project
3972 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3973 .await
3974 .unwrap();
3975
3976 let new_text = fs
3977 .load(Path::new(path!("/dir/file1")))
3978 .await
3979 .unwrap()
3980 .replace("\r\n", "\n");
3981 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3982}
3983
3984#[gpui::test(iterations = 10)]
3985async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3986 // Issue: #24349
3987 init_test(cx);
3988
3989 let fs = FakeFs::new(cx.executor());
3990 fs.insert_tree(path!("/dir"), json!({})).await;
3991
3992 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3993 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3994
3995 language_registry.add(rust_lang());
3996 let mut fake_rust_servers = language_registry.register_fake_lsp(
3997 "Rust",
3998 FakeLspAdapter {
3999 name: "the-rust-language-server",
4000 capabilities: lsp::ServerCapabilities {
4001 completion_provider: Some(lsp::CompletionOptions {
4002 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4003 ..Default::default()
4004 }),
4005 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4006 lsp::TextDocumentSyncOptions {
4007 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4008 ..Default::default()
4009 },
4010 )),
4011 ..Default::default()
4012 },
4013 ..Default::default()
4014 },
4015 );
4016
4017 let buffer = project
4018 .update(cx, |this, cx| this.create_buffer(false, cx))
4019 .unwrap()
4020 .await;
4021 project.update(cx, |this, cx| {
4022 this.register_buffer_with_language_servers(&buffer, cx);
4023 buffer.update(cx, |buffer, cx| {
4024 assert!(!this.has_language_servers_for(buffer, cx));
4025 })
4026 });
4027
4028 project
4029 .update(cx, |this, cx| {
4030 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4031 this.save_buffer_as(
4032 buffer.clone(),
4033 ProjectPath {
4034 worktree_id,
4035 path: rel_path("file.rs").into(),
4036 },
4037 cx,
4038 )
4039 })
4040 .await
4041 .unwrap();
4042 // A server is started up, and it is notified about Rust files.
4043 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4044 assert_eq!(
4045 fake_rust_server
4046 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4047 .await
4048 .text_document,
4049 lsp::TextDocumentItem {
4050 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4051 version: 0,
4052 text: "".to_string(),
4053 language_id: "rust".to_string(),
4054 }
4055 );
4056
4057 project.update(cx, |this, cx| {
4058 buffer.update(cx, |buffer, cx| {
4059 assert!(this.has_language_servers_for(buffer, cx));
4060 })
4061 });
4062}
4063
4064#[gpui::test(iterations = 30)]
4065async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4066 init_test(cx);
4067
4068 let fs = FakeFs::new(cx.executor());
4069 fs.insert_tree(
4070 path!("/dir"),
4071 json!({
4072 "file1": "the original contents",
4073 }),
4074 )
4075 .await;
4076
4077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4078 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4079 let buffer = project
4080 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4081 .await
4082 .unwrap();
4083
4084 // Simulate buffer diffs being slow, so that they don't complete before
4085 // the next file change occurs.
4086 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4087
4088 let encoding = Encoding::default();
4089
4090 // Change the buffer's file on disk, and then wait for the file change
4091 // to be detected by the worktree, so that the buffer starts reloading.
4092 fs.save(
4093 path!("/dir/file1").as_ref(),
4094 &Rope::from_str("the first contents", cx.background_executor()),
4095 Default::default(),
4096 encoding.clone(),
4097 )
4098 .await
4099 .unwrap();
4100 worktree.next_event(cx).await;
4101
4102 // Change the buffer's file again. Depending on the random seed, the
4103 // previous file change may still be in progress.
4104 fs.save(
4105 path!("/dir/file1").as_ref(),
4106 &Rope::from_str("the second contents", cx.background_executor()),
4107 Default::default(),
4108 encoding,
4109 )
4110 .await
4111 .unwrap();
4112 worktree.next_event(cx).await;
4113
4114 cx.executor().run_until_parked();
4115 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4116 buffer.read_with(cx, |buffer, _| {
4117 assert_eq!(buffer.text(), on_disk_text);
4118 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4119 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4120 });
4121}
4122
4123#[gpui::test(iterations = 30)]
4124async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4125 init_test(cx);
4126
4127 let fs = FakeFs::new(cx.executor());
4128 fs.insert_tree(
4129 path!("/dir"),
4130 json!({
4131 "file1": "the original contents",
4132 }),
4133 )
4134 .await;
4135
4136 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4137 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4138 let buffer = project
4139 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4140 .await
4141 .unwrap();
4142
4143 // Simulate buffer diffs being slow, so that they don't complete before
4144 // the next file change occurs.
4145 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4146
4147 let encoding = Encoding::new(UTF_8);
4148
4149 // Change the buffer's file on disk, and then wait for the file change
4150 // to be detected by the worktree, so that the buffer starts reloading.
4151 fs.save(
4152 path!("/dir/file1").as_ref(),
4153 &Rope::from_str("the first contents", cx.background_executor()),
4154 Default::default(),
4155 encoding,
4156 )
4157 .await
4158 .unwrap();
4159 worktree.next_event(cx).await;
4160
4161 cx.executor()
4162 .spawn(cx.executor().simulate_random_delay())
4163 .await;
4164
4165 // Perform a noop edit, causing the buffer's version to increase.
4166 buffer.update(cx, |buffer, cx| {
4167 buffer.edit([(0..0, " ")], None, cx);
4168 buffer.undo(cx);
4169 });
4170
4171 cx.executor().run_until_parked();
4172 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4173 buffer.read_with(cx, |buffer, _| {
4174 let buffer_text = buffer.text();
4175 if buffer_text == on_disk_text {
4176 assert!(
4177 !buffer.is_dirty() && !buffer.has_conflict(),
4178 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4179 );
4180 }
4181 // If the file change occurred while the buffer was processing the first
4182 // change, the buffer will be in a conflicting state.
4183 else {
4184 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4185 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4186 }
4187 });
4188}
4189
4190#[gpui::test]
4191async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4192 init_test(cx);
4193
4194 let fs = FakeFs::new(cx.executor());
4195 fs.insert_tree(
4196 path!("/dir"),
4197 json!({
4198 "file1": "the old contents",
4199 }),
4200 )
4201 .await;
4202
4203 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4204 let buffer = project
4205 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4206 .await
4207 .unwrap();
4208 buffer.update(cx, |buffer, cx| {
4209 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4210 });
4211
4212 project
4213 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4214 .await
4215 .unwrap();
4216
4217 let new_text = fs
4218 .load(Path::new(path!("/dir/file1")))
4219 .await
4220 .unwrap()
4221 .replace("\r\n", "\n");
4222 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4223}
4224
4225#[gpui::test]
4226async fn test_save_as(cx: &mut gpui::TestAppContext) {
4227 init_test(cx);
4228
4229 let fs = FakeFs::new(cx.executor());
4230 fs.insert_tree("/dir", json!({})).await;
4231
4232 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4233
4234 let languages = project.update(cx, |project, _| project.languages().clone());
4235 languages.add(rust_lang());
4236
4237 let buffer = project.update(cx, |project, cx| {
4238 project.create_local_buffer("", None, false, cx)
4239 });
4240 buffer.update(cx, |buffer, cx| {
4241 buffer.edit([(0..0, "abc")], None, cx);
4242 assert!(buffer.is_dirty());
4243 assert!(!buffer.has_conflict());
4244 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4245 });
4246 project
4247 .update(cx, |project, cx| {
4248 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4249 let path = ProjectPath {
4250 worktree_id,
4251 path: rel_path("file1.rs").into(),
4252 };
4253 project.save_buffer_as(buffer.clone(), path, cx)
4254 })
4255 .await
4256 .unwrap();
4257 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4258
4259 cx.executor().run_until_parked();
4260 buffer.update(cx, |buffer, cx| {
4261 assert_eq!(
4262 buffer.file().unwrap().full_path(cx),
4263 Path::new("dir/file1.rs")
4264 );
4265 assert!(!buffer.is_dirty());
4266 assert!(!buffer.has_conflict());
4267 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4268 });
4269
4270 let opened_buffer = project
4271 .update(cx, |project, cx| {
4272 project.open_local_buffer("/dir/file1.rs", cx)
4273 })
4274 .await
4275 .unwrap();
4276 assert_eq!(opened_buffer, buffer);
4277}
4278
4279#[gpui::test]
4280async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4281 init_test(cx);
4282
4283 let fs = FakeFs::new(cx.executor());
4284 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4285
4286 fs.insert_tree(
4287 path!("/dir"),
4288 json!({
4289 "data_a.txt": "data about a"
4290 }),
4291 )
4292 .await;
4293
4294 let buffer = project
4295 .update(cx, |project, cx| {
4296 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4297 })
4298 .await
4299 .unwrap();
4300
4301 buffer.update(cx, |buffer, cx| {
4302 buffer.edit([(11..12, "b")], None, cx);
4303 });
4304
4305 // Save buffer's contents as a new file and confirm that the buffer's now
4306 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4307 // file associated with the buffer has now been updated to `data_b.txt`
4308 project
4309 .update(cx, |project, cx| {
4310 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4311 let new_path = ProjectPath {
4312 worktree_id,
4313 path: rel_path("data_b.txt").into(),
4314 };
4315
4316 project.save_buffer_as(buffer.clone(), new_path, cx)
4317 })
4318 .await
4319 .unwrap();
4320
4321 buffer.update(cx, |buffer, cx| {
4322 assert_eq!(
4323 buffer.file().unwrap().full_path(cx),
4324 Path::new("dir/data_b.txt")
4325 )
4326 });
4327
4328 // Open the original `data_a.txt` file, confirming that its contents are
4329 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4330 let original_buffer = project
4331 .update(cx, |project, cx| {
4332 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4333 })
4334 .await
4335 .unwrap();
4336
4337 original_buffer.update(cx, |buffer, cx| {
4338 assert_eq!(buffer.text(), "data about a");
4339 assert_eq!(
4340 buffer.file().unwrap().full_path(cx),
4341 Path::new("dir/data_a.txt")
4342 )
4343 });
4344}
4345
4346#[gpui::test(retries = 5)]
4347async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4348 use worktree::WorktreeModelHandle as _;
4349
4350 init_test(cx);
4351 cx.executor().allow_parking();
4352
4353 let dir = TempTree::new(json!({
4354 "a": {
4355 "file1": "",
4356 "file2": "",
4357 "file3": "",
4358 },
4359 "b": {
4360 "c": {
4361 "file4": "",
4362 "file5": "",
4363 }
4364 }
4365 }));
4366
4367 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4368
4369 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4370 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4371 async move { buffer.await.unwrap() }
4372 };
4373 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4374 project.update(cx, |project, cx| {
4375 let tree = project.worktrees(cx).next().unwrap();
4376 tree.read(cx)
4377 .entry_for_path(rel_path(path))
4378 .unwrap_or_else(|| panic!("no entry for path {}", path))
4379 .id
4380 })
4381 };
4382
4383 let buffer2 = buffer_for_path("a/file2", cx).await;
4384 let buffer3 = buffer_for_path("a/file3", cx).await;
4385 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4386 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4387
4388 let file2_id = id_for_path("a/file2", cx);
4389 let file3_id = id_for_path("a/file3", cx);
4390 let file4_id = id_for_path("b/c/file4", cx);
4391
4392 // Create a remote copy of this worktree.
4393 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4394 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4395
4396 let updates = Arc::new(Mutex::new(Vec::new()));
4397 tree.update(cx, |tree, cx| {
4398 let updates = updates.clone();
4399 tree.observe_updates(0, cx, move |update| {
4400 updates.lock().push(update);
4401 async { true }
4402 });
4403 });
4404
4405 let remote = cx.update(|cx| {
4406 Worktree::remote(
4407 0,
4408 ReplicaId::REMOTE_SERVER,
4409 metadata,
4410 project.read(cx).client().into(),
4411 project.read(cx).path_style(cx),
4412 cx,
4413 )
4414 });
4415
4416 cx.executor().run_until_parked();
4417
4418 cx.update(|cx| {
4419 assert!(!buffer2.read(cx).is_dirty());
4420 assert!(!buffer3.read(cx).is_dirty());
4421 assert!(!buffer4.read(cx).is_dirty());
4422 assert!(!buffer5.read(cx).is_dirty());
4423 });
4424
4425 // Rename and delete files and directories.
4426 tree.flush_fs_events(cx).await;
4427 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4428 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4429 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4430 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4431 tree.flush_fs_events(cx).await;
4432
4433 cx.update(|app| {
4434 assert_eq!(
4435 tree.read(app).paths().collect::<Vec<_>>(),
4436 vec![
4437 rel_path("a"),
4438 rel_path("a/file1"),
4439 rel_path("a/file2.new"),
4440 rel_path("b"),
4441 rel_path("d"),
4442 rel_path("d/file3"),
4443 rel_path("d/file4"),
4444 ]
4445 );
4446 });
4447
4448 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4449 assert_eq!(id_for_path("d/file3", cx), file3_id);
4450 assert_eq!(id_for_path("d/file4", cx), file4_id);
4451
4452 cx.update(|cx| {
4453 assert_eq!(
4454 buffer2.read(cx).file().unwrap().path().as_ref(),
4455 rel_path("a/file2.new")
4456 );
4457 assert_eq!(
4458 buffer3.read(cx).file().unwrap().path().as_ref(),
4459 rel_path("d/file3")
4460 );
4461 assert_eq!(
4462 buffer4.read(cx).file().unwrap().path().as_ref(),
4463 rel_path("d/file4")
4464 );
4465 assert_eq!(
4466 buffer5.read(cx).file().unwrap().path().as_ref(),
4467 rel_path("b/c/file5")
4468 );
4469
4470 assert_matches!(
4471 buffer2.read(cx).file().unwrap().disk_state(),
4472 DiskState::Present { .. }
4473 );
4474 assert_matches!(
4475 buffer3.read(cx).file().unwrap().disk_state(),
4476 DiskState::Present { .. }
4477 );
4478 assert_matches!(
4479 buffer4.read(cx).file().unwrap().disk_state(),
4480 DiskState::Present { .. }
4481 );
4482 assert_eq!(
4483 buffer5.read(cx).file().unwrap().disk_state(),
4484 DiskState::Deleted
4485 );
4486 });
4487
4488 // Update the remote worktree. Check that it becomes consistent with the
4489 // local worktree.
4490 cx.executor().run_until_parked();
4491
4492 remote.update(cx, |remote, _| {
4493 for update in updates.lock().drain(..) {
4494 remote.as_remote_mut().unwrap().update_from_remote(update);
4495 }
4496 });
4497 cx.executor().run_until_parked();
4498 remote.update(cx, |remote, _| {
4499 assert_eq!(
4500 remote.paths().collect::<Vec<_>>(),
4501 vec![
4502 rel_path("a"),
4503 rel_path("a/file1"),
4504 rel_path("a/file2.new"),
4505 rel_path("b"),
4506 rel_path("d"),
4507 rel_path("d/file3"),
4508 rel_path("d/file4"),
4509 ]
4510 );
4511 });
4512}
4513
4514#[gpui::test(iterations = 10)]
4515async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4516 init_test(cx);
4517
4518 let fs = FakeFs::new(cx.executor());
4519 fs.insert_tree(
4520 path!("/dir"),
4521 json!({
4522 "a": {
4523 "file1": "",
4524 }
4525 }),
4526 )
4527 .await;
4528
4529 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4530 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4531 let tree_id = tree.update(cx, |tree, _| tree.id());
4532
4533 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4534 project.update(cx, |project, cx| {
4535 let tree = project.worktrees(cx).next().unwrap();
4536 tree.read(cx)
4537 .entry_for_path(rel_path(path))
4538 .unwrap_or_else(|| panic!("no entry for path {}", path))
4539 .id
4540 })
4541 };
4542
4543 let dir_id = id_for_path("a", cx);
4544 let file_id = id_for_path("a/file1", cx);
4545 let buffer = project
4546 .update(cx, |p, cx| {
4547 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4548 })
4549 .await
4550 .unwrap();
4551 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4552
4553 project
4554 .update(cx, |project, cx| {
4555 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4556 })
4557 .unwrap()
4558 .await
4559 .into_included()
4560 .unwrap();
4561 cx.executor().run_until_parked();
4562
4563 assert_eq!(id_for_path("b", cx), dir_id);
4564 assert_eq!(id_for_path("b/file1", cx), file_id);
4565 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4566}
4567
4568#[gpui::test]
4569async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4570 init_test(cx);
4571
4572 let fs = FakeFs::new(cx.executor());
4573 fs.insert_tree(
4574 "/dir",
4575 json!({
4576 "a.txt": "a-contents",
4577 "b.txt": "b-contents",
4578 }),
4579 )
4580 .await;
4581
4582 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4583
4584 // Spawn multiple tasks to open paths, repeating some paths.
4585 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4586 (
4587 p.open_local_buffer("/dir/a.txt", cx),
4588 p.open_local_buffer("/dir/b.txt", cx),
4589 p.open_local_buffer("/dir/a.txt", cx),
4590 )
4591 });
4592
4593 let buffer_a_1 = buffer_a_1.await.unwrap();
4594 let buffer_a_2 = buffer_a_2.await.unwrap();
4595 let buffer_b = buffer_b.await.unwrap();
4596 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4597 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4598
4599 // There is only one buffer per path.
4600 let buffer_a_id = buffer_a_1.entity_id();
4601 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4602
4603 // Open the same path again while it is still open.
4604 drop(buffer_a_1);
4605 let buffer_a_3 = project
4606 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4607 .await
4608 .unwrap();
4609
4610 // There's still only one buffer per path.
4611 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4612}
4613
4614#[gpui::test]
4615async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4616 init_test(cx);
4617
4618 let fs = FakeFs::new(cx.executor());
4619 fs.insert_tree(
4620 path!("/dir"),
4621 json!({
4622 "file1": "abc",
4623 "file2": "def",
4624 "file3": "ghi",
4625 }),
4626 )
4627 .await;
4628
4629 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4630
4631 let buffer1 = project
4632 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4633 .await
4634 .unwrap();
4635 let events = Arc::new(Mutex::new(Vec::new()));
4636
4637 // initially, the buffer isn't dirty.
4638 buffer1.update(cx, |buffer, cx| {
4639 cx.subscribe(&buffer1, {
4640 let events = events.clone();
4641 move |_, _, event, _| match event {
4642 BufferEvent::Operation { .. } => {}
4643 _ => events.lock().push(event.clone()),
4644 }
4645 })
4646 .detach();
4647
4648 assert!(!buffer.is_dirty());
4649 assert!(events.lock().is_empty());
4650
4651 buffer.edit([(1..2, "")], None, cx);
4652 });
4653
4654 // after the first edit, the buffer is dirty, and emits a dirtied event.
4655 buffer1.update(cx, |buffer, cx| {
4656 assert!(buffer.text() == "ac");
4657 assert!(buffer.is_dirty());
4658 assert_eq!(
4659 *events.lock(),
4660 &[
4661 language::BufferEvent::Edited,
4662 language::BufferEvent::DirtyChanged
4663 ]
4664 );
4665 events.lock().clear();
4666 buffer.did_save(
4667 buffer.version(),
4668 buffer.file().unwrap().disk_state().mtime(),
4669 cx,
4670 );
4671 });
4672
4673 // after saving, the buffer is not dirty, and emits a saved event.
4674 buffer1.update(cx, |buffer, cx| {
4675 assert!(!buffer.is_dirty());
4676 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4677 events.lock().clear();
4678
4679 buffer.edit([(1..1, "B")], None, cx);
4680 buffer.edit([(2..2, "D")], None, cx);
4681 });
4682
4683 // after editing again, the buffer is dirty, and emits another dirty event.
4684 buffer1.update(cx, |buffer, cx| {
4685 assert!(buffer.text() == "aBDc");
4686 assert!(buffer.is_dirty());
4687 assert_eq!(
4688 *events.lock(),
4689 &[
4690 language::BufferEvent::Edited,
4691 language::BufferEvent::DirtyChanged,
4692 language::BufferEvent::Edited,
4693 ],
4694 );
4695 events.lock().clear();
4696
4697 // After restoring the buffer to its previously-saved state,
4698 // the buffer is not considered dirty anymore.
4699 buffer.edit([(1..3, "")], None, cx);
4700 assert!(buffer.text() == "ac");
4701 assert!(!buffer.is_dirty());
4702 });
4703
4704 assert_eq!(
4705 *events.lock(),
4706 &[
4707 language::BufferEvent::Edited,
4708 language::BufferEvent::DirtyChanged
4709 ]
4710 );
4711
4712 // When a file is deleted, it is not considered dirty.
4713 let events = Arc::new(Mutex::new(Vec::new()));
4714 let buffer2 = project
4715 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4716 .await
4717 .unwrap();
4718 buffer2.update(cx, |_, cx| {
4719 cx.subscribe(&buffer2, {
4720 let events = events.clone();
4721 move |_, _, event, _| match event {
4722 BufferEvent::Operation { .. } => {}
4723 _ => events.lock().push(event.clone()),
4724 }
4725 })
4726 .detach();
4727 });
4728
4729 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4730 .await
4731 .unwrap();
4732 cx.executor().run_until_parked();
4733 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4734 assert_eq!(
4735 mem::take(&mut *events.lock()),
4736 &[language::BufferEvent::FileHandleChanged]
4737 );
4738
4739 // Buffer becomes dirty when edited.
4740 buffer2.update(cx, |buffer, cx| {
4741 buffer.edit([(2..3, "")], None, cx);
4742 assert_eq!(buffer.is_dirty(), true);
4743 });
4744 assert_eq!(
4745 mem::take(&mut *events.lock()),
4746 &[
4747 language::BufferEvent::Edited,
4748 language::BufferEvent::DirtyChanged
4749 ]
4750 );
4751
4752 // Buffer becomes clean again when all of its content is removed, because
4753 // the file was deleted.
4754 buffer2.update(cx, |buffer, cx| {
4755 buffer.edit([(0..2, "")], None, cx);
4756 assert_eq!(buffer.is_empty(), true);
4757 assert_eq!(buffer.is_dirty(), false);
4758 });
4759 assert_eq!(
4760 *events.lock(),
4761 &[
4762 language::BufferEvent::Edited,
4763 language::BufferEvent::DirtyChanged
4764 ]
4765 );
4766
4767 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4768 let events = Arc::new(Mutex::new(Vec::new()));
4769 let buffer3 = project
4770 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4771 .await
4772 .unwrap();
4773 buffer3.update(cx, |_, cx| {
4774 cx.subscribe(&buffer3, {
4775 let events = events.clone();
4776 move |_, _, event, _| match event {
4777 BufferEvent::Operation { .. } => {}
4778 _ => events.lock().push(event.clone()),
4779 }
4780 })
4781 .detach();
4782 });
4783
4784 buffer3.update(cx, |buffer, cx| {
4785 buffer.edit([(0..0, "x")], None, cx);
4786 });
4787 events.lock().clear();
4788 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4789 .await
4790 .unwrap();
4791 cx.executor().run_until_parked();
4792 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4793 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4794}
4795
4796#[gpui::test]
4797async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4798 init_test(cx);
4799
4800 let (initial_contents, initial_offsets) =
4801 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4802 let fs = FakeFs::new(cx.executor());
4803 fs.insert_tree(
4804 path!("/dir"),
4805 json!({
4806 "the-file": initial_contents,
4807 }),
4808 )
4809 .await;
4810 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4811 let buffer = project
4812 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4813 .await
4814 .unwrap();
4815
4816 let anchors = initial_offsets
4817 .iter()
4818 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4819 .collect::<Vec<_>>();
4820
4821 // Change the file on disk, adding two new lines of text, and removing
4822 // one line.
4823 buffer.update(cx, |buffer, _| {
4824 assert!(!buffer.is_dirty());
4825 assert!(!buffer.has_conflict());
4826 });
4827
4828 let (new_contents, new_offsets) =
4829 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4830
4831 let encoding = Encoding::new(UTF_8);
4832
4833 fs.save(
4834 path!("/dir/the-file").as_ref(),
4835 &Rope::from_str(new_contents.as_str(), cx.background_executor()),
4836 LineEnding::Unix,
4837 encoding,
4838 )
4839 .await
4840 .unwrap();
4841
4842 // Because the buffer was not modified, it is reloaded from disk. Its
4843 // contents are edited according to the diff between the old and new
4844 // file contents.
4845 cx.executor().run_until_parked();
4846 buffer.update(cx, |buffer, _| {
4847 assert_eq!(buffer.text(), new_contents);
4848 assert!(!buffer.is_dirty());
4849 assert!(!buffer.has_conflict());
4850
4851 let anchor_offsets = anchors
4852 .iter()
4853 .map(|anchor| anchor.to_offset(&*buffer))
4854 .collect::<Vec<_>>();
4855 assert_eq!(anchor_offsets, new_offsets);
4856 });
4857
4858 // Modify the buffer
4859 buffer.update(cx, |buffer, cx| {
4860 buffer.edit([(0..0, " ")], None, cx);
4861 assert!(buffer.is_dirty());
4862 assert!(!buffer.has_conflict());
4863 });
4864
4865 let encoding = Encoding::new(UTF_8);
4866
4867 // Change the file on disk again, adding blank lines to the beginning.
4868 fs.save(
4869 path!("/dir/the-file").as_ref(),
4870 &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
4871 LineEnding::Unix,
4872 encoding,
4873 )
4874 .await
4875 .unwrap();
4876
4877 // Because the buffer is modified, it doesn't reload from disk, but is
4878 // marked as having a conflict.
4879 cx.executor().run_until_parked();
4880 buffer.update(cx, |buffer, _| {
4881 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4882 assert!(buffer.has_conflict());
4883 });
4884}
4885
4886#[gpui::test]
4887async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4888 init_test(cx);
4889
4890 let fs = FakeFs::new(cx.executor());
4891 fs.insert_tree(
4892 path!("/dir"),
4893 json!({
4894 "file1": "a\nb\nc\n",
4895 "file2": "one\r\ntwo\r\nthree\r\n",
4896 }),
4897 )
4898 .await;
4899
4900 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4901 let buffer1 = project
4902 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4903 .await
4904 .unwrap();
4905 let buffer2 = project
4906 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4907 .await
4908 .unwrap();
4909
4910 buffer1.update(cx, |buffer, _| {
4911 assert_eq!(buffer.text(), "a\nb\nc\n");
4912 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4913 });
4914 buffer2.update(cx, |buffer, _| {
4915 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4916 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4917 });
4918
4919 let encoding = Encoding::new(UTF_8);
4920
4921 // Change a file's line endings on disk from unix to windows. The buffer's
4922 // state updates correctly.
4923 fs.save(
4924 path!("/dir/file1").as_ref(),
4925 &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
4926 LineEnding::Windows,
4927 encoding,
4928 )
4929 .await
4930 .unwrap();
4931 cx.executor().run_until_parked();
4932 buffer1.update(cx, |buffer, _| {
4933 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4934 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4935 });
4936
4937 // Save a file with windows line endings. The file is written correctly.
4938 buffer2.update(cx, |buffer, cx| {
4939 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4940 });
4941 project
4942 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4943 .await
4944 .unwrap();
4945 assert_eq!(
4946 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4947 "one\r\ntwo\r\nthree\r\nfour\r\n",
4948 );
4949}
4950
4951#[gpui::test]
4952async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4953 init_test(cx);
4954
4955 let fs = FakeFs::new(cx.executor());
4956 fs.insert_tree(
4957 path!("/dir"),
4958 json!({
4959 "a.rs": "
4960 fn foo(mut v: Vec<usize>) {
4961 for x in &v {
4962 v.push(1);
4963 }
4964 }
4965 "
4966 .unindent(),
4967 }),
4968 )
4969 .await;
4970
4971 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4972 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4973 let buffer = project
4974 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4975 .await
4976 .unwrap();
4977
4978 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4979 let message = lsp::PublishDiagnosticsParams {
4980 uri: buffer_uri.clone(),
4981 diagnostics: vec![
4982 lsp::Diagnostic {
4983 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4984 severity: Some(DiagnosticSeverity::WARNING),
4985 message: "error 1".to_string(),
4986 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4987 location: lsp::Location {
4988 uri: buffer_uri.clone(),
4989 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4990 },
4991 message: "error 1 hint 1".to_string(),
4992 }]),
4993 ..Default::default()
4994 },
4995 lsp::Diagnostic {
4996 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4997 severity: Some(DiagnosticSeverity::HINT),
4998 message: "error 1 hint 1".to_string(),
4999 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5000 location: lsp::Location {
5001 uri: buffer_uri.clone(),
5002 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5003 },
5004 message: "original diagnostic".to_string(),
5005 }]),
5006 ..Default::default()
5007 },
5008 lsp::Diagnostic {
5009 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5010 severity: Some(DiagnosticSeverity::ERROR),
5011 message: "error 2".to_string(),
5012 related_information: Some(vec![
5013 lsp::DiagnosticRelatedInformation {
5014 location: lsp::Location {
5015 uri: buffer_uri.clone(),
5016 range: lsp::Range::new(
5017 lsp::Position::new(1, 13),
5018 lsp::Position::new(1, 15),
5019 ),
5020 },
5021 message: "error 2 hint 1".to_string(),
5022 },
5023 lsp::DiagnosticRelatedInformation {
5024 location: lsp::Location {
5025 uri: buffer_uri.clone(),
5026 range: lsp::Range::new(
5027 lsp::Position::new(1, 13),
5028 lsp::Position::new(1, 15),
5029 ),
5030 },
5031 message: "error 2 hint 2".to_string(),
5032 },
5033 ]),
5034 ..Default::default()
5035 },
5036 lsp::Diagnostic {
5037 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5038 severity: Some(DiagnosticSeverity::HINT),
5039 message: "error 2 hint 1".to_string(),
5040 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5041 location: lsp::Location {
5042 uri: buffer_uri.clone(),
5043 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5044 },
5045 message: "original diagnostic".to_string(),
5046 }]),
5047 ..Default::default()
5048 },
5049 lsp::Diagnostic {
5050 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5051 severity: Some(DiagnosticSeverity::HINT),
5052 message: "error 2 hint 2".to_string(),
5053 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5054 location: lsp::Location {
5055 uri: buffer_uri,
5056 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5057 },
5058 message: "original diagnostic".to_string(),
5059 }]),
5060 ..Default::default()
5061 },
5062 ],
5063 version: None,
5064 };
5065
5066 lsp_store
5067 .update(cx, |lsp_store, cx| {
5068 lsp_store.update_diagnostics(
5069 LanguageServerId(0),
5070 message,
5071 None,
5072 DiagnosticSourceKind::Pushed,
5073 &[],
5074 cx,
5075 )
5076 })
5077 .unwrap();
5078 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5079
5080 assert_eq!(
5081 buffer
5082 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5083 .collect::<Vec<_>>(),
5084 &[
5085 DiagnosticEntry {
5086 range: Point::new(1, 8)..Point::new(1, 9),
5087 diagnostic: Diagnostic {
5088 severity: DiagnosticSeverity::WARNING,
5089 message: "error 1".to_string(),
5090 group_id: 1,
5091 is_primary: true,
5092 source_kind: DiagnosticSourceKind::Pushed,
5093 ..Diagnostic::default()
5094 }
5095 },
5096 DiagnosticEntry {
5097 range: Point::new(1, 8)..Point::new(1, 9),
5098 diagnostic: Diagnostic {
5099 severity: DiagnosticSeverity::HINT,
5100 message: "error 1 hint 1".to_string(),
5101 group_id: 1,
5102 is_primary: false,
5103 source_kind: DiagnosticSourceKind::Pushed,
5104 ..Diagnostic::default()
5105 }
5106 },
5107 DiagnosticEntry {
5108 range: Point::new(1, 13)..Point::new(1, 15),
5109 diagnostic: Diagnostic {
5110 severity: DiagnosticSeverity::HINT,
5111 message: "error 2 hint 1".to_string(),
5112 group_id: 0,
5113 is_primary: false,
5114 source_kind: DiagnosticSourceKind::Pushed,
5115 ..Diagnostic::default()
5116 }
5117 },
5118 DiagnosticEntry {
5119 range: Point::new(1, 13)..Point::new(1, 15),
5120 diagnostic: Diagnostic {
5121 severity: DiagnosticSeverity::HINT,
5122 message: "error 2 hint 2".to_string(),
5123 group_id: 0,
5124 is_primary: false,
5125 source_kind: DiagnosticSourceKind::Pushed,
5126 ..Diagnostic::default()
5127 }
5128 },
5129 DiagnosticEntry {
5130 range: Point::new(2, 8)..Point::new(2, 17),
5131 diagnostic: Diagnostic {
5132 severity: DiagnosticSeverity::ERROR,
5133 message: "error 2".to_string(),
5134 group_id: 0,
5135 is_primary: true,
5136 source_kind: DiagnosticSourceKind::Pushed,
5137 ..Diagnostic::default()
5138 }
5139 }
5140 ]
5141 );
5142
5143 assert_eq!(
5144 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5145 &[
5146 DiagnosticEntry {
5147 range: Point::new(1, 13)..Point::new(1, 15),
5148 diagnostic: Diagnostic {
5149 severity: DiagnosticSeverity::HINT,
5150 message: "error 2 hint 1".to_string(),
5151 group_id: 0,
5152 is_primary: false,
5153 source_kind: DiagnosticSourceKind::Pushed,
5154 ..Diagnostic::default()
5155 }
5156 },
5157 DiagnosticEntry {
5158 range: Point::new(1, 13)..Point::new(1, 15),
5159 diagnostic: Diagnostic {
5160 severity: DiagnosticSeverity::HINT,
5161 message: "error 2 hint 2".to_string(),
5162 group_id: 0,
5163 is_primary: false,
5164 source_kind: DiagnosticSourceKind::Pushed,
5165 ..Diagnostic::default()
5166 }
5167 },
5168 DiagnosticEntry {
5169 range: Point::new(2, 8)..Point::new(2, 17),
5170 diagnostic: Diagnostic {
5171 severity: DiagnosticSeverity::ERROR,
5172 message: "error 2".to_string(),
5173 group_id: 0,
5174 is_primary: true,
5175 source_kind: DiagnosticSourceKind::Pushed,
5176 ..Diagnostic::default()
5177 }
5178 }
5179 ]
5180 );
5181
5182 assert_eq!(
5183 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5184 &[
5185 DiagnosticEntry {
5186 range: Point::new(1, 8)..Point::new(1, 9),
5187 diagnostic: Diagnostic {
5188 severity: DiagnosticSeverity::WARNING,
5189 message: "error 1".to_string(),
5190 group_id: 1,
5191 is_primary: true,
5192 source_kind: DiagnosticSourceKind::Pushed,
5193 ..Diagnostic::default()
5194 }
5195 },
5196 DiagnosticEntry {
5197 range: Point::new(1, 8)..Point::new(1, 9),
5198 diagnostic: Diagnostic {
5199 severity: DiagnosticSeverity::HINT,
5200 message: "error 1 hint 1".to_string(),
5201 group_id: 1,
5202 is_primary: false,
5203 source_kind: DiagnosticSourceKind::Pushed,
5204 ..Diagnostic::default()
5205 }
5206 },
5207 ]
5208 );
5209}
5210
5211#[gpui::test]
5212async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5213 init_test(cx);
5214
5215 let fs = FakeFs::new(cx.executor());
5216 fs.insert_tree(
5217 path!("/dir"),
5218 json!({
5219 "one.rs": "const ONE: usize = 1;",
5220 "two": {
5221 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5222 }
5223
5224 }),
5225 )
5226 .await;
5227 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5228
5229 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5230 language_registry.add(rust_lang());
5231 let watched_paths = lsp::FileOperationRegistrationOptions {
5232 filters: vec![
5233 FileOperationFilter {
5234 scheme: Some("file".to_owned()),
5235 pattern: lsp::FileOperationPattern {
5236 glob: "**/*.rs".to_owned(),
5237 matches: Some(lsp::FileOperationPatternKind::File),
5238 options: None,
5239 },
5240 },
5241 FileOperationFilter {
5242 scheme: Some("file".to_owned()),
5243 pattern: lsp::FileOperationPattern {
5244 glob: "**/**".to_owned(),
5245 matches: Some(lsp::FileOperationPatternKind::Folder),
5246 options: None,
5247 },
5248 },
5249 ],
5250 };
5251 let mut fake_servers = language_registry.register_fake_lsp(
5252 "Rust",
5253 FakeLspAdapter {
5254 capabilities: lsp::ServerCapabilities {
5255 workspace: Some(lsp::WorkspaceServerCapabilities {
5256 workspace_folders: None,
5257 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5258 did_rename: Some(watched_paths.clone()),
5259 will_rename: Some(watched_paths),
5260 ..Default::default()
5261 }),
5262 }),
5263 ..Default::default()
5264 },
5265 ..Default::default()
5266 },
5267 );
5268
5269 let _ = project
5270 .update(cx, |project, cx| {
5271 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5272 })
5273 .await
5274 .unwrap();
5275
5276 let fake_server = fake_servers.next().await.unwrap();
5277 let response = project.update(cx, |project, cx| {
5278 let worktree = project.worktrees(cx).next().unwrap();
5279 let entry = worktree
5280 .read(cx)
5281 .entry_for_path(rel_path("one.rs"))
5282 .unwrap();
5283 project.rename_entry(
5284 entry.id,
5285 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5286 cx,
5287 )
5288 });
5289 let expected_edit = lsp::WorkspaceEdit {
5290 changes: None,
5291 document_changes: Some(DocumentChanges::Edits({
5292 vec![TextDocumentEdit {
5293 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5294 range: lsp::Range {
5295 start: lsp::Position {
5296 line: 0,
5297 character: 1,
5298 },
5299 end: lsp::Position {
5300 line: 0,
5301 character: 3,
5302 },
5303 },
5304 new_text: "This is not a drill".to_owned(),
5305 })],
5306 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5307 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5308 version: Some(1337),
5309 },
5310 }]
5311 })),
5312 change_annotations: None,
5313 };
5314 let resolved_workspace_edit = Arc::new(OnceLock::new());
5315 fake_server
5316 .set_request_handler::<WillRenameFiles, _, _>({
5317 let resolved_workspace_edit = resolved_workspace_edit.clone();
5318 let expected_edit = expected_edit.clone();
5319 move |params, _| {
5320 let resolved_workspace_edit = resolved_workspace_edit.clone();
5321 let expected_edit = expected_edit.clone();
5322 async move {
5323 assert_eq!(params.files.len(), 1);
5324 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5325 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5326 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5327 Ok(Some(expected_edit))
5328 }
5329 }
5330 })
5331 .next()
5332 .await
5333 .unwrap();
5334 let _ = response.await.unwrap();
5335 fake_server
5336 .handle_notification::<DidRenameFiles, _>(|params, _| {
5337 assert_eq!(params.files.len(), 1);
5338 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5339 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5340 })
5341 .next()
5342 .await
5343 .unwrap();
5344 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5345}
5346
5347#[gpui::test]
5348async fn test_rename(cx: &mut gpui::TestAppContext) {
5349 // hi
5350 init_test(cx);
5351
5352 let fs = FakeFs::new(cx.executor());
5353 fs.insert_tree(
5354 path!("/dir"),
5355 json!({
5356 "one.rs": "const ONE: usize = 1;",
5357 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5358 }),
5359 )
5360 .await;
5361
5362 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5363
5364 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5365 language_registry.add(rust_lang());
5366 let mut fake_servers = language_registry.register_fake_lsp(
5367 "Rust",
5368 FakeLspAdapter {
5369 capabilities: lsp::ServerCapabilities {
5370 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5371 prepare_provider: Some(true),
5372 work_done_progress_options: Default::default(),
5373 })),
5374 ..Default::default()
5375 },
5376 ..Default::default()
5377 },
5378 );
5379
5380 let (buffer, _handle) = project
5381 .update(cx, |project, cx| {
5382 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5383 })
5384 .await
5385 .unwrap();
5386
5387 let fake_server = fake_servers.next().await.unwrap();
5388
5389 let response = project.update(cx, |project, cx| {
5390 project.prepare_rename(buffer.clone(), 7, cx)
5391 });
5392 fake_server
5393 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5394 assert_eq!(
5395 params.text_document.uri.as_str(),
5396 uri!("file:///dir/one.rs")
5397 );
5398 assert_eq!(params.position, lsp::Position::new(0, 7));
5399 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5400 lsp::Position::new(0, 6),
5401 lsp::Position::new(0, 9),
5402 ))))
5403 })
5404 .next()
5405 .await
5406 .unwrap();
5407 let response = response.await.unwrap();
5408 let PrepareRenameResponse::Success(range) = response else {
5409 panic!("{:?}", response);
5410 };
5411 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5412 assert_eq!(range, 6..9);
5413
5414 let response = project.update(cx, |project, cx| {
5415 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5416 });
5417 fake_server
5418 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5419 assert_eq!(
5420 params.text_document_position.text_document.uri.as_str(),
5421 uri!("file:///dir/one.rs")
5422 );
5423 assert_eq!(
5424 params.text_document_position.position,
5425 lsp::Position::new(0, 7)
5426 );
5427 assert_eq!(params.new_name, "THREE");
5428 Ok(Some(lsp::WorkspaceEdit {
5429 changes: Some(
5430 [
5431 (
5432 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5433 vec![lsp::TextEdit::new(
5434 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5435 "THREE".to_string(),
5436 )],
5437 ),
5438 (
5439 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5440 vec![
5441 lsp::TextEdit::new(
5442 lsp::Range::new(
5443 lsp::Position::new(0, 24),
5444 lsp::Position::new(0, 27),
5445 ),
5446 "THREE".to_string(),
5447 ),
5448 lsp::TextEdit::new(
5449 lsp::Range::new(
5450 lsp::Position::new(0, 35),
5451 lsp::Position::new(0, 38),
5452 ),
5453 "THREE".to_string(),
5454 ),
5455 ],
5456 ),
5457 ]
5458 .into_iter()
5459 .collect(),
5460 ),
5461 ..Default::default()
5462 }))
5463 })
5464 .next()
5465 .await
5466 .unwrap();
5467 let mut transaction = response.await.unwrap().0;
5468 assert_eq!(transaction.len(), 2);
5469 assert_eq!(
5470 transaction
5471 .remove_entry(&buffer)
5472 .unwrap()
5473 .0
5474 .update(cx, |buffer, _| buffer.text()),
5475 "const THREE: usize = 1;"
5476 );
5477 assert_eq!(
5478 transaction
5479 .into_keys()
5480 .next()
5481 .unwrap()
5482 .update(cx, |buffer, _| buffer.text()),
5483 "const TWO: usize = one::THREE + one::THREE;"
5484 );
5485}
5486
5487#[gpui::test]
5488async fn test_search(cx: &mut gpui::TestAppContext) {
5489 init_test(cx);
5490
5491 let fs = FakeFs::new(cx.executor());
5492 fs.insert_tree(
5493 path!("/dir"),
5494 json!({
5495 "one.rs": "const ONE: usize = 1;",
5496 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5497 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5498 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5499 }),
5500 )
5501 .await;
5502 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5503 assert_eq!(
5504 search(
5505 &project,
5506 SearchQuery::text(
5507 "TWO",
5508 false,
5509 true,
5510 false,
5511 Default::default(),
5512 Default::default(),
5513 false,
5514 None
5515 )
5516 .unwrap(),
5517 cx
5518 )
5519 .await
5520 .unwrap(),
5521 HashMap::from_iter([
5522 (path!("dir/two.rs").to_string(), vec![6..9]),
5523 (path!("dir/three.rs").to_string(), vec![37..40])
5524 ])
5525 );
5526
5527 let buffer_4 = project
5528 .update(cx, |project, cx| {
5529 project.open_local_buffer(path!("/dir/four.rs"), cx)
5530 })
5531 .await
5532 .unwrap();
5533 buffer_4.update(cx, |buffer, cx| {
5534 let text = "two::TWO";
5535 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5536 });
5537
5538 assert_eq!(
5539 search(
5540 &project,
5541 SearchQuery::text(
5542 "TWO",
5543 false,
5544 true,
5545 false,
5546 Default::default(),
5547 Default::default(),
5548 false,
5549 None,
5550 )
5551 .unwrap(),
5552 cx
5553 )
5554 .await
5555 .unwrap(),
5556 HashMap::from_iter([
5557 (path!("dir/two.rs").to_string(), vec![6..9]),
5558 (path!("dir/three.rs").to_string(), vec![37..40]),
5559 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5560 ])
5561 );
5562}
5563
5564#[gpui::test]
5565async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5566 init_test(cx);
5567
5568 let search_query = "file";
5569
5570 let fs = FakeFs::new(cx.executor());
5571 fs.insert_tree(
5572 path!("/dir"),
5573 json!({
5574 "one.rs": r#"// Rust file one"#,
5575 "one.ts": r#"// TypeScript file one"#,
5576 "two.rs": r#"// Rust file two"#,
5577 "two.ts": r#"// TypeScript file two"#,
5578 }),
5579 )
5580 .await;
5581 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5582
5583 assert!(
5584 search(
5585 &project,
5586 SearchQuery::text(
5587 search_query,
5588 false,
5589 true,
5590 false,
5591 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5592 Default::default(),
5593 false,
5594 None
5595 )
5596 .unwrap(),
5597 cx
5598 )
5599 .await
5600 .unwrap()
5601 .is_empty(),
5602 "If no inclusions match, no files should be returned"
5603 );
5604
5605 assert_eq!(
5606 search(
5607 &project,
5608 SearchQuery::text(
5609 search_query,
5610 false,
5611 true,
5612 false,
5613 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5614 Default::default(),
5615 false,
5616 None
5617 )
5618 .unwrap(),
5619 cx
5620 )
5621 .await
5622 .unwrap(),
5623 HashMap::from_iter([
5624 (path!("dir/one.rs").to_string(), vec![8..12]),
5625 (path!("dir/two.rs").to_string(), vec![8..12]),
5626 ]),
5627 "Rust only search should give only Rust files"
5628 );
5629
5630 assert_eq!(
5631 search(
5632 &project,
5633 SearchQuery::text(
5634 search_query,
5635 false,
5636 true,
5637 false,
5638 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5639 .unwrap(),
5640 Default::default(),
5641 false,
5642 None,
5643 )
5644 .unwrap(),
5645 cx
5646 )
5647 .await
5648 .unwrap(),
5649 HashMap::from_iter([
5650 (path!("dir/one.ts").to_string(), vec![14..18]),
5651 (path!("dir/two.ts").to_string(), vec![14..18]),
5652 ]),
5653 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5654 );
5655
5656 assert_eq!(
5657 search(
5658 &project,
5659 SearchQuery::text(
5660 search_query,
5661 false,
5662 true,
5663 false,
5664 PathMatcher::new(
5665 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5666 PathStyle::local()
5667 )
5668 .unwrap(),
5669 Default::default(),
5670 false,
5671 None,
5672 )
5673 .unwrap(),
5674 cx
5675 )
5676 .await
5677 .unwrap(),
5678 HashMap::from_iter([
5679 (path!("dir/two.ts").to_string(), vec![14..18]),
5680 (path!("dir/one.rs").to_string(), vec![8..12]),
5681 (path!("dir/one.ts").to_string(), vec![14..18]),
5682 (path!("dir/two.rs").to_string(), vec![8..12]),
5683 ]),
5684 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5685 );
5686}
5687
5688#[gpui::test]
5689async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5690 init_test(cx);
5691
5692 let search_query = "file";
5693
5694 let fs = FakeFs::new(cx.executor());
5695 fs.insert_tree(
5696 path!("/dir"),
5697 json!({
5698 "one.rs": r#"// Rust file one"#,
5699 "one.ts": r#"// TypeScript file one"#,
5700 "two.rs": r#"// Rust file two"#,
5701 "two.ts": r#"// TypeScript file two"#,
5702 }),
5703 )
5704 .await;
5705 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5706
5707 assert_eq!(
5708 search(
5709 &project,
5710 SearchQuery::text(
5711 search_query,
5712 false,
5713 true,
5714 false,
5715 Default::default(),
5716 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5717 false,
5718 None,
5719 )
5720 .unwrap(),
5721 cx
5722 )
5723 .await
5724 .unwrap(),
5725 HashMap::from_iter([
5726 (path!("dir/one.rs").to_string(), vec![8..12]),
5727 (path!("dir/one.ts").to_string(), vec![14..18]),
5728 (path!("dir/two.rs").to_string(), vec![8..12]),
5729 (path!("dir/two.ts").to_string(), vec![14..18]),
5730 ]),
5731 "If no exclusions match, all files should be returned"
5732 );
5733
5734 assert_eq!(
5735 search(
5736 &project,
5737 SearchQuery::text(
5738 search_query,
5739 false,
5740 true,
5741 false,
5742 Default::default(),
5743 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5744 false,
5745 None,
5746 )
5747 .unwrap(),
5748 cx
5749 )
5750 .await
5751 .unwrap(),
5752 HashMap::from_iter([
5753 (path!("dir/one.ts").to_string(), vec![14..18]),
5754 (path!("dir/two.ts").to_string(), vec![14..18]),
5755 ]),
5756 "Rust exclusion search should give only TypeScript files"
5757 );
5758
5759 assert_eq!(
5760 search(
5761 &project,
5762 SearchQuery::text(
5763 search_query,
5764 false,
5765 true,
5766 false,
5767 Default::default(),
5768 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5769 .unwrap(),
5770 false,
5771 None,
5772 )
5773 .unwrap(),
5774 cx
5775 )
5776 .await
5777 .unwrap(),
5778 HashMap::from_iter([
5779 (path!("dir/one.rs").to_string(), vec![8..12]),
5780 (path!("dir/two.rs").to_string(), vec![8..12]),
5781 ]),
5782 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5783 );
5784
5785 assert!(
5786 search(
5787 &project,
5788 SearchQuery::text(
5789 search_query,
5790 false,
5791 true,
5792 false,
5793 Default::default(),
5794 PathMatcher::new(
5795 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5796 PathStyle::local(),
5797 )
5798 .unwrap(),
5799 false,
5800 None,
5801 )
5802 .unwrap(),
5803 cx
5804 )
5805 .await
5806 .unwrap()
5807 .is_empty(),
5808 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5809 );
5810}
5811
5812#[gpui::test]
5813async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5814 init_test(cx);
5815
5816 let search_query = "file";
5817
5818 let fs = FakeFs::new(cx.executor());
5819 fs.insert_tree(
5820 path!("/dir"),
5821 json!({
5822 "one.rs": r#"// Rust file one"#,
5823 "one.ts": r#"// TypeScript file one"#,
5824 "two.rs": r#"// Rust file two"#,
5825 "two.ts": r#"// TypeScript file two"#,
5826 }),
5827 )
5828 .await;
5829
5830 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5831 let path_style = PathStyle::local();
5832 let _buffer = project.update(cx, |project, cx| {
5833 project.create_local_buffer("file", None, false, cx)
5834 });
5835
5836 assert_eq!(
5837 search(
5838 &project,
5839 SearchQuery::text(
5840 search_query,
5841 false,
5842 true,
5843 false,
5844 Default::default(),
5845 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5846 false,
5847 None,
5848 )
5849 .unwrap(),
5850 cx
5851 )
5852 .await
5853 .unwrap(),
5854 HashMap::from_iter([
5855 (path!("dir/one.rs").to_string(), vec![8..12]),
5856 (path!("dir/one.ts").to_string(), vec![14..18]),
5857 (path!("dir/two.rs").to_string(), vec![8..12]),
5858 (path!("dir/two.ts").to_string(), vec![14..18]),
5859 ]),
5860 "If no exclusions match, all files should be returned"
5861 );
5862
5863 assert_eq!(
5864 search(
5865 &project,
5866 SearchQuery::text(
5867 search_query,
5868 false,
5869 true,
5870 false,
5871 Default::default(),
5872 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5873 false,
5874 None,
5875 )
5876 .unwrap(),
5877 cx
5878 )
5879 .await
5880 .unwrap(),
5881 HashMap::from_iter([
5882 (path!("dir/one.ts").to_string(), vec![14..18]),
5883 (path!("dir/two.ts").to_string(), vec![14..18]),
5884 ]),
5885 "Rust exclusion search should give only TypeScript files"
5886 );
5887
5888 assert_eq!(
5889 search(
5890 &project,
5891 SearchQuery::text(
5892 search_query,
5893 false,
5894 true,
5895 false,
5896 Default::default(),
5897 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5898 false,
5899 None,
5900 )
5901 .unwrap(),
5902 cx
5903 )
5904 .await
5905 .unwrap(),
5906 HashMap::from_iter([
5907 (path!("dir/one.rs").to_string(), vec![8..12]),
5908 (path!("dir/two.rs").to_string(), vec![8..12]),
5909 ]),
5910 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5911 );
5912
5913 assert!(
5914 search(
5915 &project,
5916 SearchQuery::text(
5917 search_query,
5918 false,
5919 true,
5920 false,
5921 Default::default(),
5922 PathMatcher::new(
5923 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5924 PathStyle::local(),
5925 )
5926 .unwrap(),
5927 false,
5928 None,
5929 )
5930 .unwrap(),
5931 cx
5932 )
5933 .await
5934 .unwrap()
5935 .is_empty(),
5936 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5937 );
5938}
5939
5940#[gpui::test]
5941async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5942 init_test(cx);
5943
5944 let search_query = "file";
5945
5946 let fs = FakeFs::new(cx.executor());
5947 fs.insert_tree(
5948 path!("/dir"),
5949 json!({
5950 "one.rs": r#"// Rust file one"#,
5951 "one.ts": r#"// TypeScript file one"#,
5952 "two.rs": r#"// Rust file two"#,
5953 "two.ts": r#"// TypeScript file two"#,
5954 }),
5955 )
5956 .await;
5957 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5958 assert!(
5959 search(
5960 &project,
5961 SearchQuery::text(
5962 search_query,
5963 false,
5964 true,
5965 false,
5966 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5967 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5968 false,
5969 None,
5970 )
5971 .unwrap(),
5972 cx
5973 )
5974 .await
5975 .unwrap()
5976 .is_empty(),
5977 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5978 );
5979
5980 assert!(
5981 search(
5982 &project,
5983 SearchQuery::text(
5984 search_query,
5985 false,
5986 true,
5987 false,
5988 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5989 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5990 false,
5991 None,
5992 )
5993 .unwrap(),
5994 cx
5995 )
5996 .await
5997 .unwrap()
5998 .is_empty(),
5999 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6000 );
6001
6002 assert!(
6003 search(
6004 &project,
6005 SearchQuery::text(
6006 search_query,
6007 false,
6008 true,
6009 false,
6010 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6011 .unwrap(),
6012 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6013 .unwrap(),
6014 false,
6015 None,
6016 )
6017 .unwrap(),
6018 cx
6019 )
6020 .await
6021 .unwrap()
6022 .is_empty(),
6023 "Non-matching inclusions and exclusions should not change that."
6024 );
6025
6026 assert_eq!(
6027 search(
6028 &project,
6029 SearchQuery::text(
6030 search_query,
6031 false,
6032 true,
6033 false,
6034 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6035 .unwrap(),
6036 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6037 .unwrap(),
6038 false,
6039 None,
6040 )
6041 .unwrap(),
6042 cx
6043 )
6044 .await
6045 .unwrap(),
6046 HashMap::from_iter([
6047 (path!("dir/one.ts").to_string(), vec![14..18]),
6048 (path!("dir/two.ts").to_string(), vec![14..18]),
6049 ]),
6050 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6051 );
6052}
6053
6054#[gpui::test]
6055async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6056 init_test(cx);
6057
6058 let fs = FakeFs::new(cx.executor());
6059 fs.insert_tree(
6060 path!("/worktree-a"),
6061 json!({
6062 "haystack.rs": r#"// NEEDLE"#,
6063 "haystack.ts": r#"// NEEDLE"#,
6064 }),
6065 )
6066 .await;
6067 fs.insert_tree(
6068 path!("/worktree-b"),
6069 json!({
6070 "haystack.rs": r#"// NEEDLE"#,
6071 "haystack.ts": r#"// NEEDLE"#,
6072 }),
6073 )
6074 .await;
6075
6076 let path_style = PathStyle::local();
6077 let project = Project::test(
6078 fs.clone(),
6079 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6080 cx,
6081 )
6082 .await;
6083
6084 assert_eq!(
6085 search(
6086 &project,
6087 SearchQuery::text(
6088 "NEEDLE",
6089 false,
6090 true,
6091 false,
6092 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6093 Default::default(),
6094 true,
6095 None,
6096 )
6097 .unwrap(),
6098 cx
6099 )
6100 .await
6101 .unwrap(),
6102 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6103 "should only return results from included worktree"
6104 );
6105 assert_eq!(
6106 search(
6107 &project,
6108 SearchQuery::text(
6109 "NEEDLE",
6110 false,
6111 true,
6112 false,
6113 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6114 Default::default(),
6115 true,
6116 None,
6117 )
6118 .unwrap(),
6119 cx
6120 )
6121 .await
6122 .unwrap(),
6123 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6124 "should only return results from included worktree"
6125 );
6126
6127 assert_eq!(
6128 search(
6129 &project,
6130 SearchQuery::text(
6131 "NEEDLE",
6132 false,
6133 true,
6134 false,
6135 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6136 Default::default(),
6137 false,
6138 None,
6139 )
6140 .unwrap(),
6141 cx
6142 )
6143 .await
6144 .unwrap(),
6145 HashMap::from_iter([
6146 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6147 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6148 ]),
6149 "should return results from both worktrees"
6150 );
6151}
6152
6153#[gpui::test]
6154async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6155 init_test(cx);
6156
6157 let fs = FakeFs::new(cx.background_executor.clone());
6158 fs.insert_tree(
6159 path!("/dir"),
6160 json!({
6161 ".git": {},
6162 ".gitignore": "**/target\n/node_modules\n",
6163 "target": {
6164 "index.txt": "index_key:index_value"
6165 },
6166 "node_modules": {
6167 "eslint": {
6168 "index.ts": "const eslint_key = 'eslint value'",
6169 "package.json": r#"{ "some_key": "some value" }"#,
6170 },
6171 "prettier": {
6172 "index.ts": "const prettier_key = 'prettier value'",
6173 "package.json": r#"{ "other_key": "other value" }"#,
6174 },
6175 },
6176 "package.json": r#"{ "main_key": "main value" }"#,
6177 }),
6178 )
6179 .await;
6180 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6181
6182 let query = "key";
6183 assert_eq!(
6184 search(
6185 &project,
6186 SearchQuery::text(
6187 query,
6188 false,
6189 false,
6190 false,
6191 Default::default(),
6192 Default::default(),
6193 false,
6194 None,
6195 )
6196 .unwrap(),
6197 cx
6198 )
6199 .await
6200 .unwrap(),
6201 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6202 "Only one non-ignored file should have the query"
6203 );
6204
6205 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6206 let path_style = PathStyle::local();
6207 assert_eq!(
6208 search(
6209 &project,
6210 SearchQuery::text(
6211 query,
6212 false,
6213 false,
6214 true,
6215 Default::default(),
6216 Default::default(),
6217 false,
6218 None,
6219 )
6220 .unwrap(),
6221 cx
6222 )
6223 .await
6224 .unwrap(),
6225 HashMap::from_iter([
6226 (path!("dir/package.json").to_string(), vec![8..11]),
6227 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6228 (
6229 path!("dir/node_modules/prettier/package.json").to_string(),
6230 vec![9..12]
6231 ),
6232 (
6233 path!("dir/node_modules/prettier/index.ts").to_string(),
6234 vec![15..18]
6235 ),
6236 (
6237 path!("dir/node_modules/eslint/index.ts").to_string(),
6238 vec![13..16]
6239 ),
6240 (
6241 path!("dir/node_modules/eslint/package.json").to_string(),
6242 vec![8..11]
6243 ),
6244 ]),
6245 "Unrestricted search with ignored directories should find every file with the query"
6246 );
6247
6248 let files_to_include =
6249 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6250 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6252 assert_eq!(
6253 search(
6254 &project,
6255 SearchQuery::text(
6256 query,
6257 false,
6258 false,
6259 true,
6260 files_to_include,
6261 files_to_exclude,
6262 false,
6263 None,
6264 )
6265 .unwrap(),
6266 cx
6267 )
6268 .await
6269 .unwrap(),
6270 HashMap::from_iter([(
6271 path!("dir/node_modules/prettier/package.json").to_string(),
6272 vec![9..12]
6273 )]),
6274 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6275 );
6276}
6277
6278#[gpui::test]
6279async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6280 init_test(cx);
6281
6282 let fs = FakeFs::new(cx.executor());
6283 fs.insert_tree(
6284 path!("/dir"),
6285 json!({
6286 "one.rs": "// ПРИВЕТ? привет!",
6287 "two.rs": "// ПРИВЕТ.",
6288 "three.rs": "// привет",
6289 }),
6290 )
6291 .await;
6292 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6293 let unicode_case_sensitive_query = SearchQuery::text(
6294 "привет",
6295 false,
6296 true,
6297 false,
6298 Default::default(),
6299 Default::default(),
6300 false,
6301 None,
6302 );
6303 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6304 assert_eq!(
6305 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6306 .await
6307 .unwrap(),
6308 HashMap::from_iter([
6309 (path!("dir/one.rs").to_string(), vec![17..29]),
6310 (path!("dir/three.rs").to_string(), vec![3..15]),
6311 ])
6312 );
6313
6314 let unicode_case_insensitive_query = SearchQuery::text(
6315 "привет",
6316 false,
6317 false,
6318 false,
6319 Default::default(),
6320 Default::default(),
6321 false,
6322 None,
6323 );
6324 assert_matches!(
6325 unicode_case_insensitive_query,
6326 Ok(SearchQuery::Regex { .. })
6327 );
6328 assert_eq!(
6329 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6330 .await
6331 .unwrap(),
6332 HashMap::from_iter([
6333 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6334 (path!("dir/two.rs").to_string(), vec![3..15]),
6335 (path!("dir/three.rs").to_string(), vec![3..15]),
6336 ])
6337 );
6338
6339 assert_eq!(
6340 search(
6341 &project,
6342 SearchQuery::text(
6343 "привет.",
6344 false,
6345 false,
6346 false,
6347 Default::default(),
6348 Default::default(),
6349 false,
6350 None,
6351 )
6352 .unwrap(),
6353 cx
6354 )
6355 .await
6356 .unwrap(),
6357 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6358 );
6359}
6360
6361#[gpui::test]
6362async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6363 init_test(cx);
6364
6365 let fs = FakeFs::new(cx.executor());
6366 fs.insert_tree(
6367 "/one/two",
6368 json!({
6369 "three": {
6370 "a.txt": "",
6371 "four": {}
6372 },
6373 "c.rs": ""
6374 }),
6375 )
6376 .await;
6377
6378 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6379 project
6380 .update(cx, |project, cx| {
6381 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6382 project.create_entry((id, rel_path("b..")), true, cx)
6383 })
6384 .await
6385 .unwrap()
6386 .into_included()
6387 .unwrap();
6388
6389 assert_eq!(
6390 fs.paths(true),
6391 vec![
6392 PathBuf::from(path!("/")),
6393 PathBuf::from(path!("/one")),
6394 PathBuf::from(path!("/one/two")),
6395 PathBuf::from(path!("/one/two/c.rs")),
6396 PathBuf::from(path!("/one/two/three")),
6397 PathBuf::from(path!("/one/two/three/a.txt")),
6398 PathBuf::from(path!("/one/two/three/b..")),
6399 PathBuf::from(path!("/one/two/three/four")),
6400 ]
6401 );
6402}
6403
6404#[gpui::test]
6405async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6406 init_test(cx);
6407
6408 let fs = FakeFs::new(cx.executor());
6409 fs.insert_tree(
6410 path!("/dir"),
6411 json!({
6412 "a.tsx": "a",
6413 }),
6414 )
6415 .await;
6416
6417 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6418
6419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6420 language_registry.add(tsx_lang());
6421 let language_server_names = [
6422 "TypeScriptServer",
6423 "TailwindServer",
6424 "ESLintServer",
6425 "NoHoverCapabilitiesServer",
6426 ];
6427 let mut language_servers = [
6428 language_registry.register_fake_lsp(
6429 "tsx",
6430 FakeLspAdapter {
6431 name: language_server_names[0],
6432 capabilities: lsp::ServerCapabilities {
6433 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6434 ..lsp::ServerCapabilities::default()
6435 },
6436 ..FakeLspAdapter::default()
6437 },
6438 ),
6439 language_registry.register_fake_lsp(
6440 "tsx",
6441 FakeLspAdapter {
6442 name: language_server_names[1],
6443 capabilities: lsp::ServerCapabilities {
6444 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6445 ..lsp::ServerCapabilities::default()
6446 },
6447 ..FakeLspAdapter::default()
6448 },
6449 ),
6450 language_registry.register_fake_lsp(
6451 "tsx",
6452 FakeLspAdapter {
6453 name: language_server_names[2],
6454 capabilities: lsp::ServerCapabilities {
6455 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6456 ..lsp::ServerCapabilities::default()
6457 },
6458 ..FakeLspAdapter::default()
6459 },
6460 ),
6461 language_registry.register_fake_lsp(
6462 "tsx",
6463 FakeLspAdapter {
6464 name: language_server_names[3],
6465 capabilities: lsp::ServerCapabilities {
6466 hover_provider: None,
6467 ..lsp::ServerCapabilities::default()
6468 },
6469 ..FakeLspAdapter::default()
6470 },
6471 ),
6472 ];
6473
6474 let (buffer, _handle) = project
6475 .update(cx, |p, cx| {
6476 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6477 })
6478 .await
6479 .unwrap();
6480 cx.executor().run_until_parked();
6481
6482 let mut servers_with_hover_requests = HashMap::default();
6483 for i in 0..language_server_names.len() {
6484 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6485 panic!(
6486 "Failed to get language server #{i} with name {}",
6487 &language_server_names[i]
6488 )
6489 });
6490 let new_server_name = new_server.server.name();
6491 assert!(
6492 !servers_with_hover_requests.contains_key(&new_server_name),
6493 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6494 );
6495 match new_server_name.as_ref() {
6496 "TailwindServer" | "TypeScriptServer" => {
6497 servers_with_hover_requests.insert(
6498 new_server_name.clone(),
6499 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6500 move |_, _| {
6501 let name = new_server_name.clone();
6502 async move {
6503 Ok(Some(lsp::Hover {
6504 contents: lsp::HoverContents::Scalar(
6505 lsp::MarkedString::String(format!("{name} hover")),
6506 ),
6507 range: None,
6508 }))
6509 }
6510 },
6511 ),
6512 );
6513 }
6514 "ESLintServer" => {
6515 servers_with_hover_requests.insert(
6516 new_server_name,
6517 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6518 |_, _| async move { Ok(None) },
6519 ),
6520 );
6521 }
6522 "NoHoverCapabilitiesServer" => {
6523 let _never_handled = new_server
6524 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6525 panic!(
6526 "Should not call for hovers server with no corresponding capabilities"
6527 )
6528 });
6529 }
6530 unexpected => panic!("Unexpected server name: {unexpected}"),
6531 }
6532 }
6533
6534 let hover_task = project.update(cx, |project, cx| {
6535 project.hover(&buffer, Point::new(0, 0), cx)
6536 });
6537 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6538 |mut hover_request| async move {
6539 hover_request
6540 .next()
6541 .await
6542 .expect("All hover requests should have been triggered")
6543 },
6544 ))
6545 .await;
6546 assert_eq!(
6547 vec!["TailwindServer hover", "TypeScriptServer hover"],
6548 hover_task
6549 .await
6550 .into_iter()
6551 .flatten()
6552 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6553 .sorted()
6554 .collect::<Vec<_>>(),
6555 "Should receive hover responses from all related servers with hover capabilities"
6556 );
6557}
6558
6559#[gpui::test]
6560async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6561 init_test(cx);
6562
6563 let fs = FakeFs::new(cx.executor());
6564 fs.insert_tree(
6565 path!("/dir"),
6566 json!({
6567 "a.ts": "a",
6568 }),
6569 )
6570 .await;
6571
6572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6573
6574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6575 language_registry.add(typescript_lang());
6576 let mut fake_language_servers = language_registry.register_fake_lsp(
6577 "TypeScript",
6578 FakeLspAdapter {
6579 capabilities: lsp::ServerCapabilities {
6580 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6581 ..lsp::ServerCapabilities::default()
6582 },
6583 ..FakeLspAdapter::default()
6584 },
6585 );
6586
6587 let (buffer, _handle) = project
6588 .update(cx, |p, cx| {
6589 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6590 })
6591 .await
6592 .unwrap();
6593 cx.executor().run_until_parked();
6594
6595 let fake_server = fake_language_servers
6596 .next()
6597 .await
6598 .expect("failed to get the language server");
6599
6600 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6601 move |_, _| async move {
6602 Ok(Some(lsp::Hover {
6603 contents: lsp::HoverContents::Array(vec![
6604 lsp::MarkedString::String("".to_string()),
6605 lsp::MarkedString::String(" ".to_string()),
6606 lsp::MarkedString::String("\n\n\n".to_string()),
6607 ]),
6608 range: None,
6609 }))
6610 },
6611 );
6612
6613 let hover_task = project.update(cx, |project, cx| {
6614 project.hover(&buffer, Point::new(0, 0), cx)
6615 });
6616 let () = request_handled
6617 .next()
6618 .await
6619 .expect("All hover requests should have been triggered");
6620 assert_eq!(
6621 Vec::<String>::new(),
6622 hover_task
6623 .await
6624 .into_iter()
6625 .flatten()
6626 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6627 .sorted()
6628 .collect::<Vec<_>>(),
6629 "Empty hover parts should be ignored"
6630 );
6631}
6632
6633#[gpui::test]
6634async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6635 init_test(cx);
6636
6637 let fs = FakeFs::new(cx.executor());
6638 fs.insert_tree(
6639 path!("/dir"),
6640 json!({
6641 "a.ts": "a",
6642 }),
6643 )
6644 .await;
6645
6646 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6647
6648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6649 language_registry.add(typescript_lang());
6650 let mut fake_language_servers = language_registry.register_fake_lsp(
6651 "TypeScript",
6652 FakeLspAdapter {
6653 capabilities: lsp::ServerCapabilities {
6654 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6655 ..lsp::ServerCapabilities::default()
6656 },
6657 ..FakeLspAdapter::default()
6658 },
6659 );
6660
6661 let (buffer, _handle) = project
6662 .update(cx, |p, cx| {
6663 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6664 })
6665 .await
6666 .unwrap();
6667 cx.executor().run_until_parked();
6668
6669 let fake_server = fake_language_servers
6670 .next()
6671 .await
6672 .expect("failed to get the language server");
6673
6674 let mut request_handled = fake_server
6675 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6676 Ok(Some(vec![
6677 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6678 title: "organize imports".to_string(),
6679 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6680 ..lsp::CodeAction::default()
6681 }),
6682 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6683 title: "fix code".to_string(),
6684 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6685 ..lsp::CodeAction::default()
6686 }),
6687 ]))
6688 });
6689
6690 let code_actions_task = project.update(cx, |project, cx| {
6691 project.code_actions(
6692 &buffer,
6693 0..buffer.read(cx).len(),
6694 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6695 cx,
6696 )
6697 });
6698
6699 let () = request_handled
6700 .next()
6701 .await
6702 .expect("The code action request should have been triggered");
6703
6704 let code_actions = code_actions_task.await.unwrap().unwrap();
6705 assert_eq!(code_actions.len(), 1);
6706 assert_eq!(
6707 code_actions[0].lsp_action.action_kind(),
6708 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6709 );
6710}
6711
6712#[gpui::test]
6713async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6714 init_test(cx);
6715
6716 let fs = FakeFs::new(cx.executor());
6717 fs.insert_tree(
6718 path!("/dir"),
6719 json!({
6720 "a.tsx": "a",
6721 }),
6722 )
6723 .await;
6724
6725 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6726
6727 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6728 language_registry.add(tsx_lang());
6729 let language_server_names = [
6730 "TypeScriptServer",
6731 "TailwindServer",
6732 "ESLintServer",
6733 "NoActionsCapabilitiesServer",
6734 ];
6735
6736 let mut language_server_rxs = [
6737 language_registry.register_fake_lsp(
6738 "tsx",
6739 FakeLspAdapter {
6740 name: language_server_names[0],
6741 capabilities: lsp::ServerCapabilities {
6742 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6743 ..lsp::ServerCapabilities::default()
6744 },
6745 ..FakeLspAdapter::default()
6746 },
6747 ),
6748 language_registry.register_fake_lsp(
6749 "tsx",
6750 FakeLspAdapter {
6751 name: language_server_names[1],
6752 capabilities: lsp::ServerCapabilities {
6753 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6754 ..lsp::ServerCapabilities::default()
6755 },
6756 ..FakeLspAdapter::default()
6757 },
6758 ),
6759 language_registry.register_fake_lsp(
6760 "tsx",
6761 FakeLspAdapter {
6762 name: language_server_names[2],
6763 capabilities: lsp::ServerCapabilities {
6764 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6765 ..lsp::ServerCapabilities::default()
6766 },
6767 ..FakeLspAdapter::default()
6768 },
6769 ),
6770 language_registry.register_fake_lsp(
6771 "tsx",
6772 FakeLspAdapter {
6773 name: language_server_names[3],
6774 capabilities: lsp::ServerCapabilities {
6775 code_action_provider: None,
6776 ..lsp::ServerCapabilities::default()
6777 },
6778 ..FakeLspAdapter::default()
6779 },
6780 ),
6781 ];
6782
6783 let (buffer, _handle) = project
6784 .update(cx, |p, cx| {
6785 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6786 })
6787 .await
6788 .unwrap();
6789 cx.executor().run_until_parked();
6790
6791 let mut servers_with_actions_requests = HashMap::default();
6792 for i in 0..language_server_names.len() {
6793 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6794 panic!(
6795 "Failed to get language server #{i} with name {}",
6796 &language_server_names[i]
6797 )
6798 });
6799 let new_server_name = new_server.server.name();
6800
6801 assert!(
6802 !servers_with_actions_requests.contains_key(&new_server_name),
6803 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6804 );
6805 match new_server_name.0.as_ref() {
6806 "TailwindServer" | "TypeScriptServer" => {
6807 servers_with_actions_requests.insert(
6808 new_server_name.clone(),
6809 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6810 move |_, _| {
6811 let name = new_server_name.clone();
6812 async move {
6813 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6814 lsp::CodeAction {
6815 title: format!("{name} code action"),
6816 ..lsp::CodeAction::default()
6817 },
6818 )]))
6819 }
6820 },
6821 ),
6822 );
6823 }
6824 "ESLintServer" => {
6825 servers_with_actions_requests.insert(
6826 new_server_name,
6827 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6828 |_, _| async move { Ok(None) },
6829 ),
6830 );
6831 }
6832 "NoActionsCapabilitiesServer" => {
6833 let _never_handled = new_server
6834 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6835 panic!(
6836 "Should not call for code actions server with no corresponding capabilities"
6837 )
6838 });
6839 }
6840 unexpected => panic!("Unexpected server name: {unexpected}"),
6841 }
6842 }
6843
6844 let code_actions_task = project.update(cx, |project, cx| {
6845 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6846 });
6847
6848 // cx.run_until_parked();
6849 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6850 |mut code_actions_request| async move {
6851 code_actions_request
6852 .next()
6853 .await
6854 .expect("All code actions requests should have been triggered")
6855 },
6856 ))
6857 .await;
6858 assert_eq!(
6859 vec!["TailwindServer code action", "TypeScriptServer code action"],
6860 code_actions_task
6861 .await
6862 .unwrap()
6863 .unwrap()
6864 .into_iter()
6865 .map(|code_action| code_action.lsp_action.title().to_owned())
6866 .sorted()
6867 .collect::<Vec<_>>(),
6868 "Should receive code actions responses from all related servers with hover capabilities"
6869 );
6870}
6871
6872#[gpui::test]
6873async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6874 init_test(cx);
6875
6876 let fs = FakeFs::new(cx.executor());
6877 fs.insert_tree(
6878 "/dir",
6879 json!({
6880 "a.rs": "let a = 1;",
6881 "b.rs": "let b = 2;",
6882 "c.rs": "let c = 2;",
6883 }),
6884 )
6885 .await;
6886
6887 let project = Project::test(
6888 fs,
6889 [
6890 "/dir/a.rs".as_ref(),
6891 "/dir/b.rs".as_ref(),
6892 "/dir/c.rs".as_ref(),
6893 ],
6894 cx,
6895 )
6896 .await;
6897
6898 // check the initial state and get the worktrees
6899 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6900 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6901 assert_eq!(worktrees.len(), 3);
6902
6903 let worktree_a = worktrees[0].read(cx);
6904 let worktree_b = worktrees[1].read(cx);
6905 let worktree_c = worktrees[2].read(cx);
6906
6907 // check they start in the right order
6908 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6909 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6910 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6911
6912 (
6913 worktrees[0].clone(),
6914 worktrees[1].clone(),
6915 worktrees[2].clone(),
6916 )
6917 });
6918
6919 // move first worktree to after the second
6920 // [a, b, c] -> [b, a, c]
6921 project
6922 .update(cx, |project, cx| {
6923 let first = worktree_a.read(cx);
6924 let second = worktree_b.read(cx);
6925 project.move_worktree(first.id(), second.id(), cx)
6926 })
6927 .expect("moving first after second");
6928
6929 // check the state after moving
6930 project.update(cx, |project, cx| {
6931 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6932 assert_eq!(worktrees.len(), 3);
6933
6934 let first = worktrees[0].read(cx);
6935 let second = worktrees[1].read(cx);
6936 let third = worktrees[2].read(cx);
6937
6938 // check they are now in the right order
6939 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6940 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6941 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6942 });
6943
6944 // move the second worktree to before the first
6945 // [b, a, c] -> [a, b, c]
6946 project
6947 .update(cx, |project, cx| {
6948 let second = worktree_a.read(cx);
6949 let first = worktree_b.read(cx);
6950 project.move_worktree(first.id(), second.id(), cx)
6951 })
6952 .expect("moving second before first");
6953
6954 // check the state after moving
6955 project.update(cx, |project, cx| {
6956 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6957 assert_eq!(worktrees.len(), 3);
6958
6959 let first = worktrees[0].read(cx);
6960 let second = worktrees[1].read(cx);
6961 let third = worktrees[2].read(cx);
6962
6963 // check they are now in the right order
6964 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6965 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6966 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6967 });
6968
6969 // move the second worktree to after the third
6970 // [a, b, c] -> [a, c, b]
6971 project
6972 .update(cx, |project, cx| {
6973 let second = worktree_b.read(cx);
6974 let third = worktree_c.read(cx);
6975 project.move_worktree(second.id(), third.id(), cx)
6976 })
6977 .expect("moving second after third");
6978
6979 // check the state after moving
6980 project.update(cx, |project, cx| {
6981 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6982 assert_eq!(worktrees.len(), 3);
6983
6984 let first = worktrees[0].read(cx);
6985 let second = worktrees[1].read(cx);
6986 let third = worktrees[2].read(cx);
6987
6988 // check they are now in the right order
6989 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6990 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6991 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6992 });
6993
6994 // move the third worktree to before the second
6995 // [a, c, b] -> [a, b, c]
6996 project
6997 .update(cx, |project, cx| {
6998 let third = worktree_c.read(cx);
6999 let second = worktree_b.read(cx);
7000 project.move_worktree(third.id(), second.id(), cx)
7001 })
7002 .expect("moving third before second");
7003
7004 // check the state after moving
7005 project.update(cx, |project, cx| {
7006 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7007 assert_eq!(worktrees.len(), 3);
7008
7009 let first = worktrees[0].read(cx);
7010 let second = worktrees[1].read(cx);
7011 let third = worktrees[2].read(cx);
7012
7013 // check they are now in the right order
7014 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7015 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7016 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7017 });
7018
7019 // move the first worktree to after the third
7020 // [a, b, c] -> [b, c, a]
7021 project
7022 .update(cx, |project, cx| {
7023 let first = worktree_a.read(cx);
7024 let third = worktree_c.read(cx);
7025 project.move_worktree(first.id(), third.id(), cx)
7026 })
7027 .expect("moving first after third");
7028
7029 // check the state after moving
7030 project.update(cx, |project, cx| {
7031 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7032 assert_eq!(worktrees.len(), 3);
7033
7034 let first = worktrees[0].read(cx);
7035 let second = worktrees[1].read(cx);
7036 let third = worktrees[2].read(cx);
7037
7038 // check they are now in the right order
7039 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7040 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7041 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7042 });
7043
7044 // move the third worktree to before the first
7045 // [b, c, a] -> [a, b, c]
7046 project
7047 .update(cx, |project, cx| {
7048 let third = worktree_a.read(cx);
7049 let first = worktree_b.read(cx);
7050 project.move_worktree(third.id(), first.id(), cx)
7051 })
7052 .expect("moving third before first");
7053
7054 // check the state after moving
7055 project.update(cx, |project, cx| {
7056 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7057 assert_eq!(worktrees.len(), 3);
7058
7059 let first = worktrees[0].read(cx);
7060 let second = worktrees[1].read(cx);
7061 let third = worktrees[2].read(cx);
7062
7063 // check they are now in the right order
7064 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7065 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7066 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7067 });
7068}
7069
7070#[gpui::test]
7071async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7072 init_test(cx);
7073
7074 let staged_contents = r#"
7075 fn main() {
7076 println!("hello world");
7077 }
7078 "#
7079 .unindent();
7080 let file_contents = r#"
7081 // print goodbye
7082 fn main() {
7083 println!("goodbye world");
7084 }
7085 "#
7086 .unindent();
7087
7088 let fs = FakeFs::new(cx.background_executor.clone());
7089 fs.insert_tree(
7090 "/dir",
7091 json!({
7092 ".git": {},
7093 "src": {
7094 "main.rs": file_contents,
7095 }
7096 }),
7097 )
7098 .await;
7099
7100 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7101
7102 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7103
7104 let buffer = project
7105 .update(cx, |project, cx| {
7106 project.open_local_buffer("/dir/src/main.rs", cx)
7107 })
7108 .await
7109 .unwrap();
7110 let unstaged_diff = project
7111 .update(cx, |project, cx| {
7112 project.open_unstaged_diff(buffer.clone(), cx)
7113 })
7114 .await
7115 .unwrap();
7116
7117 cx.run_until_parked();
7118 unstaged_diff.update(cx, |unstaged_diff, cx| {
7119 let snapshot = buffer.read(cx).snapshot();
7120 assert_hunks(
7121 unstaged_diff.hunks(&snapshot, cx),
7122 &snapshot,
7123 &unstaged_diff.base_text_string().unwrap(),
7124 &[
7125 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7126 (
7127 2..3,
7128 " println!(\"hello world\");\n",
7129 " println!(\"goodbye world\");\n",
7130 DiffHunkStatus::modified_none(),
7131 ),
7132 ],
7133 );
7134 });
7135
7136 let staged_contents = r#"
7137 // print goodbye
7138 fn main() {
7139 }
7140 "#
7141 .unindent();
7142
7143 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7144
7145 cx.run_until_parked();
7146 unstaged_diff.update(cx, |unstaged_diff, cx| {
7147 let snapshot = buffer.read(cx).snapshot();
7148 assert_hunks(
7149 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7150 &snapshot,
7151 &unstaged_diff.base_text().text(),
7152 &[(
7153 2..3,
7154 "",
7155 " println!(\"goodbye world\");\n",
7156 DiffHunkStatus::added_none(),
7157 )],
7158 );
7159 });
7160}
7161
7162#[gpui::test]
7163async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7164 init_test(cx);
7165
7166 let committed_contents = r#"
7167 fn main() {
7168 println!("hello world");
7169 }
7170 "#
7171 .unindent();
7172 let staged_contents = r#"
7173 fn main() {
7174 println!("goodbye world");
7175 }
7176 "#
7177 .unindent();
7178 let file_contents = r#"
7179 // print goodbye
7180 fn main() {
7181 println!("goodbye world");
7182 }
7183 "#
7184 .unindent();
7185
7186 let fs = FakeFs::new(cx.background_executor.clone());
7187 fs.insert_tree(
7188 "/dir",
7189 json!({
7190 ".git": {},
7191 "src": {
7192 "modification.rs": file_contents,
7193 }
7194 }),
7195 )
7196 .await;
7197
7198 fs.set_head_for_repo(
7199 Path::new("/dir/.git"),
7200 &[
7201 ("src/modification.rs", committed_contents),
7202 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7203 ],
7204 "deadbeef",
7205 );
7206 fs.set_index_for_repo(
7207 Path::new("/dir/.git"),
7208 &[
7209 ("src/modification.rs", staged_contents),
7210 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7211 ],
7212 );
7213
7214 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7215 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7216 let language = rust_lang();
7217 language_registry.add(language.clone());
7218
7219 let buffer_1 = project
7220 .update(cx, |project, cx| {
7221 project.open_local_buffer("/dir/src/modification.rs", cx)
7222 })
7223 .await
7224 .unwrap();
7225 let diff_1 = project
7226 .update(cx, |project, cx| {
7227 project.open_uncommitted_diff(buffer_1.clone(), cx)
7228 })
7229 .await
7230 .unwrap();
7231 diff_1.read_with(cx, |diff, _| {
7232 assert_eq!(diff.base_text().language().cloned(), Some(language))
7233 });
7234 cx.run_until_parked();
7235 diff_1.update(cx, |diff, cx| {
7236 let snapshot = buffer_1.read(cx).snapshot();
7237 assert_hunks(
7238 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7239 &snapshot,
7240 &diff.base_text_string().unwrap(),
7241 &[
7242 (
7243 0..1,
7244 "",
7245 "// print goodbye\n",
7246 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7247 ),
7248 (
7249 2..3,
7250 " println!(\"hello world\");\n",
7251 " println!(\"goodbye world\");\n",
7252 DiffHunkStatus::modified_none(),
7253 ),
7254 ],
7255 );
7256 });
7257
7258 // Reset HEAD to a version that differs from both the buffer and the index.
7259 let committed_contents = r#"
7260 // print goodbye
7261 fn main() {
7262 }
7263 "#
7264 .unindent();
7265 fs.set_head_for_repo(
7266 Path::new("/dir/.git"),
7267 &[
7268 ("src/modification.rs", committed_contents.clone()),
7269 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7270 ],
7271 "deadbeef",
7272 );
7273
7274 // Buffer now has an unstaged hunk.
7275 cx.run_until_parked();
7276 diff_1.update(cx, |diff, cx| {
7277 let snapshot = buffer_1.read(cx).snapshot();
7278 assert_hunks(
7279 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7280 &snapshot,
7281 &diff.base_text().text(),
7282 &[(
7283 2..3,
7284 "",
7285 " println!(\"goodbye world\");\n",
7286 DiffHunkStatus::added_none(),
7287 )],
7288 );
7289 });
7290
7291 // Open a buffer for a file that's been deleted.
7292 let buffer_2 = project
7293 .update(cx, |project, cx| {
7294 project.open_local_buffer("/dir/src/deletion.rs", cx)
7295 })
7296 .await
7297 .unwrap();
7298 let diff_2 = project
7299 .update(cx, |project, cx| {
7300 project.open_uncommitted_diff(buffer_2.clone(), cx)
7301 })
7302 .await
7303 .unwrap();
7304 cx.run_until_parked();
7305 diff_2.update(cx, |diff, cx| {
7306 let snapshot = buffer_2.read(cx).snapshot();
7307 assert_hunks(
7308 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7309 &snapshot,
7310 &diff.base_text_string().unwrap(),
7311 &[(
7312 0..0,
7313 "// the-deleted-contents\n",
7314 "",
7315 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7316 )],
7317 );
7318 });
7319
7320 // Stage the deletion of this file
7321 fs.set_index_for_repo(
7322 Path::new("/dir/.git"),
7323 &[("src/modification.rs", committed_contents.clone())],
7324 );
7325 cx.run_until_parked();
7326 diff_2.update(cx, |diff, cx| {
7327 let snapshot = buffer_2.read(cx).snapshot();
7328 assert_hunks(
7329 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7330 &snapshot,
7331 &diff.base_text_string().unwrap(),
7332 &[(
7333 0..0,
7334 "// the-deleted-contents\n",
7335 "",
7336 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7337 )],
7338 );
7339 });
7340}
7341
7342#[gpui::test]
7343async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7344 use DiffHunkSecondaryStatus::*;
7345 init_test(cx);
7346
7347 let committed_contents = r#"
7348 zero
7349 one
7350 two
7351 three
7352 four
7353 five
7354 "#
7355 .unindent();
7356 let file_contents = r#"
7357 one
7358 TWO
7359 three
7360 FOUR
7361 five
7362 "#
7363 .unindent();
7364
7365 let fs = FakeFs::new(cx.background_executor.clone());
7366 fs.insert_tree(
7367 "/dir",
7368 json!({
7369 ".git": {},
7370 "file.txt": file_contents.clone()
7371 }),
7372 )
7373 .await;
7374
7375 fs.set_head_and_index_for_repo(
7376 path!("/dir/.git").as_ref(),
7377 &[("file.txt", committed_contents.clone())],
7378 );
7379
7380 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7381
7382 let buffer = project
7383 .update(cx, |project, cx| {
7384 project.open_local_buffer("/dir/file.txt", cx)
7385 })
7386 .await
7387 .unwrap();
7388 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7389 let uncommitted_diff = project
7390 .update(cx, |project, cx| {
7391 project.open_uncommitted_diff(buffer.clone(), cx)
7392 })
7393 .await
7394 .unwrap();
7395 let mut diff_events = cx.events(&uncommitted_diff);
7396
7397 // The hunks are initially unstaged.
7398 uncommitted_diff.read_with(cx, |diff, cx| {
7399 assert_hunks(
7400 diff.hunks(&snapshot, cx),
7401 &snapshot,
7402 &diff.base_text_string().unwrap(),
7403 &[
7404 (
7405 0..0,
7406 "zero\n",
7407 "",
7408 DiffHunkStatus::deleted(HasSecondaryHunk),
7409 ),
7410 (
7411 1..2,
7412 "two\n",
7413 "TWO\n",
7414 DiffHunkStatus::modified(HasSecondaryHunk),
7415 ),
7416 (
7417 3..4,
7418 "four\n",
7419 "FOUR\n",
7420 DiffHunkStatus::modified(HasSecondaryHunk),
7421 ),
7422 ],
7423 );
7424 });
7425
7426 // Stage a hunk. It appears as optimistically staged.
7427 uncommitted_diff.update(cx, |diff, cx| {
7428 let range =
7429 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7430 let hunks = diff
7431 .hunks_intersecting_range(range, &snapshot, cx)
7432 .collect::<Vec<_>>();
7433 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7434
7435 assert_hunks(
7436 diff.hunks(&snapshot, cx),
7437 &snapshot,
7438 &diff.base_text_string().unwrap(),
7439 &[
7440 (
7441 0..0,
7442 "zero\n",
7443 "",
7444 DiffHunkStatus::deleted(HasSecondaryHunk),
7445 ),
7446 (
7447 1..2,
7448 "two\n",
7449 "TWO\n",
7450 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7451 ),
7452 (
7453 3..4,
7454 "four\n",
7455 "FOUR\n",
7456 DiffHunkStatus::modified(HasSecondaryHunk),
7457 ),
7458 ],
7459 );
7460 });
7461
7462 // The diff emits a change event for the range of the staged hunk.
7463 assert!(matches!(
7464 diff_events.next().await.unwrap(),
7465 BufferDiffEvent::HunksStagedOrUnstaged(_)
7466 ));
7467 let event = diff_events.next().await.unwrap();
7468 if let BufferDiffEvent::DiffChanged {
7469 changed_range: Some(changed_range),
7470 } = event
7471 {
7472 let changed_range = changed_range.to_point(&snapshot);
7473 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7474 } else {
7475 panic!("Unexpected event {event:?}");
7476 }
7477
7478 // When the write to the index completes, it appears as staged.
7479 cx.run_until_parked();
7480 uncommitted_diff.update(cx, |diff, cx| {
7481 assert_hunks(
7482 diff.hunks(&snapshot, cx),
7483 &snapshot,
7484 &diff.base_text_string().unwrap(),
7485 &[
7486 (
7487 0..0,
7488 "zero\n",
7489 "",
7490 DiffHunkStatus::deleted(HasSecondaryHunk),
7491 ),
7492 (
7493 1..2,
7494 "two\n",
7495 "TWO\n",
7496 DiffHunkStatus::modified(NoSecondaryHunk),
7497 ),
7498 (
7499 3..4,
7500 "four\n",
7501 "FOUR\n",
7502 DiffHunkStatus::modified(HasSecondaryHunk),
7503 ),
7504 ],
7505 );
7506 });
7507
7508 // The diff emits a change event for the changed index text.
7509 let event = diff_events.next().await.unwrap();
7510 if let BufferDiffEvent::DiffChanged {
7511 changed_range: Some(changed_range),
7512 } = event
7513 {
7514 let changed_range = changed_range.to_point(&snapshot);
7515 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7516 } else {
7517 panic!("Unexpected event {event:?}");
7518 }
7519
7520 // Simulate a problem writing to the git index.
7521 fs.set_error_message_for_index_write(
7522 "/dir/.git".as_ref(),
7523 Some("failed to write git index".into()),
7524 );
7525
7526 // Stage another hunk.
7527 uncommitted_diff.update(cx, |diff, cx| {
7528 let range =
7529 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7530 let hunks = diff
7531 .hunks_intersecting_range(range, &snapshot, cx)
7532 .collect::<Vec<_>>();
7533 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7534
7535 assert_hunks(
7536 diff.hunks(&snapshot, cx),
7537 &snapshot,
7538 &diff.base_text_string().unwrap(),
7539 &[
7540 (
7541 0..0,
7542 "zero\n",
7543 "",
7544 DiffHunkStatus::deleted(HasSecondaryHunk),
7545 ),
7546 (
7547 1..2,
7548 "two\n",
7549 "TWO\n",
7550 DiffHunkStatus::modified(NoSecondaryHunk),
7551 ),
7552 (
7553 3..4,
7554 "four\n",
7555 "FOUR\n",
7556 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7557 ),
7558 ],
7559 );
7560 });
7561 assert!(matches!(
7562 diff_events.next().await.unwrap(),
7563 BufferDiffEvent::HunksStagedOrUnstaged(_)
7564 ));
7565 let event = diff_events.next().await.unwrap();
7566 if let BufferDiffEvent::DiffChanged {
7567 changed_range: Some(changed_range),
7568 } = event
7569 {
7570 let changed_range = changed_range.to_point(&snapshot);
7571 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7572 } else {
7573 panic!("Unexpected event {event:?}");
7574 }
7575
7576 // When the write fails, the hunk returns to being unstaged.
7577 cx.run_until_parked();
7578 uncommitted_diff.update(cx, |diff, cx| {
7579 assert_hunks(
7580 diff.hunks(&snapshot, cx),
7581 &snapshot,
7582 &diff.base_text_string().unwrap(),
7583 &[
7584 (
7585 0..0,
7586 "zero\n",
7587 "",
7588 DiffHunkStatus::deleted(HasSecondaryHunk),
7589 ),
7590 (
7591 1..2,
7592 "two\n",
7593 "TWO\n",
7594 DiffHunkStatus::modified(NoSecondaryHunk),
7595 ),
7596 (
7597 3..4,
7598 "four\n",
7599 "FOUR\n",
7600 DiffHunkStatus::modified(HasSecondaryHunk),
7601 ),
7602 ],
7603 );
7604 });
7605
7606 let event = diff_events.next().await.unwrap();
7607 if let BufferDiffEvent::DiffChanged {
7608 changed_range: Some(changed_range),
7609 } = event
7610 {
7611 let changed_range = changed_range.to_point(&snapshot);
7612 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7613 } else {
7614 panic!("Unexpected event {event:?}");
7615 }
7616
7617 // Allow writing to the git index to succeed again.
7618 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7619
7620 // Stage two hunks with separate operations.
7621 uncommitted_diff.update(cx, |diff, cx| {
7622 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7623 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7624 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7625 });
7626
7627 // Both staged hunks appear as pending.
7628 uncommitted_diff.update(cx, |diff, cx| {
7629 assert_hunks(
7630 diff.hunks(&snapshot, cx),
7631 &snapshot,
7632 &diff.base_text_string().unwrap(),
7633 &[
7634 (
7635 0..0,
7636 "zero\n",
7637 "",
7638 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7639 ),
7640 (
7641 1..2,
7642 "two\n",
7643 "TWO\n",
7644 DiffHunkStatus::modified(NoSecondaryHunk),
7645 ),
7646 (
7647 3..4,
7648 "four\n",
7649 "FOUR\n",
7650 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7651 ),
7652 ],
7653 );
7654 });
7655
7656 // Both staging operations take effect.
7657 cx.run_until_parked();
7658 uncommitted_diff.update(cx, |diff, cx| {
7659 assert_hunks(
7660 diff.hunks(&snapshot, cx),
7661 &snapshot,
7662 &diff.base_text_string().unwrap(),
7663 &[
7664 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7665 (
7666 1..2,
7667 "two\n",
7668 "TWO\n",
7669 DiffHunkStatus::modified(NoSecondaryHunk),
7670 ),
7671 (
7672 3..4,
7673 "four\n",
7674 "FOUR\n",
7675 DiffHunkStatus::modified(NoSecondaryHunk),
7676 ),
7677 ],
7678 );
7679 });
7680}
7681
7682#[gpui::test(seeds(340, 472))]
7683async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7684 use DiffHunkSecondaryStatus::*;
7685 init_test(cx);
7686
7687 let committed_contents = r#"
7688 zero
7689 one
7690 two
7691 three
7692 four
7693 five
7694 "#
7695 .unindent();
7696 let file_contents = r#"
7697 one
7698 TWO
7699 three
7700 FOUR
7701 five
7702 "#
7703 .unindent();
7704
7705 let fs = FakeFs::new(cx.background_executor.clone());
7706 fs.insert_tree(
7707 "/dir",
7708 json!({
7709 ".git": {},
7710 "file.txt": file_contents.clone()
7711 }),
7712 )
7713 .await;
7714
7715 fs.set_head_for_repo(
7716 "/dir/.git".as_ref(),
7717 &[("file.txt", committed_contents.clone())],
7718 "deadbeef",
7719 );
7720 fs.set_index_for_repo(
7721 "/dir/.git".as_ref(),
7722 &[("file.txt", committed_contents.clone())],
7723 );
7724
7725 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7726
7727 let buffer = project
7728 .update(cx, |project, cx| {
7729 project.open_local_buffer("/dir/file.txt", cx)
7730 })
7731 .await
7732 .unwrap();
7733 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7734 let uncommitted_diff = project
7735 .update(cx, |project, cx| {
7736 project.open_uncommitted_diff(buffer.clone(), cx)
7737 })
7738 .await
7739 .unwrap();
7740
7741 // The hunks are initially unstaged.
7742 uncommitted_diff.read_with(cx, |diff, cx| {
7743 assert_hunks(
7744 diff.hunks(&snapshot, cx),
7745 &snapshot,
7746 &diff.base_text_string().unwrap(),
7747 &[
7748 (
7749 0..0,
7750 "zero\n",
7751 "",
7752 DiffHunkStatus::deleted(HasSecondaryHunk),
7753 ),
7754 (
7755 1..2,
7756 "two\n",
7757 "TWO\n",
7758 DiffHunkStatus::modified(HasSecondaryHunk),
7759 ),
7760 (
7761 3..4,
7762 "four\n",
7763 "FOUR\n",
7764 DiffHunkStatus::modified(HasSecondaryHunk),
7765 ),
7766 ],
7767 );
7768 });
7769
7770 // Pause IO events
7771 fs.pause_events();
7772
7773 // Stage the first hunk.
7774 uncommitted_diff.update(cx, |diff, cx| {
7775 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7776 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7777 assert_hunks(
7778 diff.hunks(&snapshot, cx),
7779 &snapshot,
7780 &diff.base_text_string().unwrap(),
7781 &[
7782 (
7783 0..0,
7784 "zero\n",
7785 "",
7786 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7787 ),
7788 (
7789 1..2,
7790 "two\n",
7791 "TWO\n",
7792 DiffHunkStatus::modified(HasSecondaryHunk),
7793 ),
7794 (
7795 3..4,
7796 "four\n",
7797 "FOUR\n",
7798 DiffHunkStatus::modified(HasSecondaryHunk),
7799 ),
7800 ],
7801 );
7802 });
7803
7804 // Stage the second hunk *before* receiving the FS event for the first hunk.
7805 cx.run_until_parked();
7806 uncommitted_diff.update(cx, |diff, cx| {
7807 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7808 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7809 assert_hunks(
7810 diff.hunks(&snapshot, cx),
7811 &snapshot,
7812 &diff.base_text_string().unwrap(),
7813 &[
7814 (
7815 0..0,
7816 "zero\n",
7817 "",
7818 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7819 ),
7820 (
7821 1..2,
7822 "two\n",
7823 "TWO\n",
7824 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7825 ),
7826 (
7827 3..4,
7828 "four\n",
7829 "FOUR\n",
7830 DiffHunkStatus::modified(HasSecondaryHunk),
7831 ),
7832 ],
7833 );
7834 });
7835
7836 // Process the FS event for staging the first hunk (second event is still pending).
7837 fs.flush_events(1);
7838 cx.run_until_parked();
7839
7840 // Stage the third hunk before receiving the second FS event.
7841 uncommitted_diff.update(cx, |diff, cx| {
7842 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7843 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7844 });
7845
7846 // Wait for all remaining IO.
7847 cx.run_until_parked();
7848 fs.flush_events(fs.buffered_event_count());
7849
7850 // Now all hunks are staged.
7851 cx.run_until_parked();
7852 uncommitted_diff.update(cx, |diff, cx| {
7853 assert_hunks(
7854 diff.hunks(&snapshot, cx),
7855 &snapshot,
7856 &diff.base_text_string().unwrap(),
7857 &[
7858 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7859 (
7860 1..2,
7861 "two\n",
7862 "TWO\n",
7863 DiffHunkStatus::modified(NoSecondaryHunk),
7864 ),
7865 (
7866 3..4,
7867 "four\n",
7868 "FOUR\n",
7869 DiffHunkStatus::modified(NoSecondaryHunk),
7870 ),
7871 ],
7872 );
7873 });
7874}
7875
7876#[gpui::test(iterations = 25)]
7877async fn test_staging_random_hunks(
7878 mut rng: StdRng,
7879 executor: BackgroundExecutor,
7880 cx: &mut gpui::TestAppContext,
7881) {
7882 let operations = env::var("OPERATIONS")
7883 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7884 .unwrap_or(20);
7885
7886 // Try to induce races between diff recalculation and index writes.
7887 if rng.random_bool(0.5) {
7888 executor.deprioritize(*CALCULATE_DIFF_TASK);
7889 }
7890
7891 use DiffHunkSecondaryStatus::*;
7892 init_test(cx);
7893
7894 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7895 let index_text = committed_text.clone();
7896 let buffer_text = (0..30)
7897 .map(|i| match i % 5 {
7898 0 => format!("line {i} (modified)\n"),
7899 _ => format!("line {i}\n"),
7900 })
7901 .collect::<String>();
7902
7903 let fs = FakeFs::new(cx.background_executor.clone());
7904 fs.insert_tree(
7905 path!("/dir"),
7906 json!({
7907 ".git": {},
7908 "file.txt": buffer_text.clone()
7909 }),
7910 )
7911 .await;
7912 fs.set_head_for_repo(
7913 path!("/dir/.git").as_ref(),
7914 &[("file.txt", committed_text.clone())],
7915 "deadbeef",
7916 );
7917 fs.set_index_for_repo(
7918 path!("/dir/.git").as_ref(),
7919 &[("file.txt", index_text.clone())],
7920 );
7921 let repo = fs
7922 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7923 .unwrap();
7924
7925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7926 let buffer = project
7927 .update(cx, |project, cx| {
7928 project.open_local_buffer(path!("/dir/file.txt"), cx)
7929 })
7930 .await
7931 .unwrap();
7932 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7933 let uncommitted_diff = project
7934 .update(cx, |project, cx| {
7935 project.open_uncommitted_diff(buffer.clone(), cx)
7936 })
7937 .await
7938 .unwrap();
7939
7940 let mut hunks =
7941 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7942 assert_eq!(hunks.len(), 6);
7943
7944 for _i in 0..operations {
7945 let hunk_ix = rng.random_range(0..hunks.len());
7946 let hunk = &mut hunks[hunk_ix];
7947 let row = hunk.range.start.row;
7948
7949 if hunk.status().has_secondary_hunk() {
7950 log::info!("staging hunk at {row}");
7951 uncommitted_diff.update(cx, |diff, cx| {
7952 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7953 });
7954 hunk.secondary_status = SecondaryHunkRemovalPending;
7955 } else {
7956 log::info!("unstaging hunk at {row}");
7957 uncommitted_diff.update(cx, |diff, cx| {
7958 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7959 });
7960 hunk.secondary_status = SecondaryHunkAdditionPending;
7961 }
7962
7963 for _ in 0..rng.random_range(0..10) {
7964 log::info!("yielding");
7965 cx.executor().simulate_random_delay().await;
7966 }
7967 }
7968
7969 cx.executor().run_until_parked();
7970
7971 for hunk in &mut hunks {
7972 if hunk.secondary_status == SecondaryHunkRemovalPending {
7973 hunk.secondary_status = NoSecondaryHunk;
7974 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7975 hunk.secondary_status = HasSecondaryHunk;
7976 }
7977 }
7978
7979 log::info!(
7980 "index text:\n{}",
7981 repo.load_index_text(rel_path("file.txt").into())
7982 .await
7983 .unwrap()
7984 );
7985
7986 uncommitted_diff.update(cx, |diff, cx| {
7987 let expected_hunks = hunks
7988 .iter()
7989 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7990 .collect::<Vec<_>>();
7991 let actual_hunks = diff
7992 .hunks(&snapshot, cx)
7993 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7994 .collect::<Vec<_>>();
7995 assert_eq!(actual_hunks, expected_hunks);
7996 });
7997}
7998
7999#[gpui::test]
8000async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8001 init_test(cx);
8002
8003 let committed_contents = r#"
8004 fn main() {
8005 println!("hello from HEAD");
8006 }
8007 "#
8008 .unindent();
8009 let file_contents = r#"
8010 fn main() {
8011 println!("hello from the working copy");
8012 }
8013 "#
8014 .unindent();
8015
8016 let fs = FakeFs::new(cx.background_executor.clone());
8017 fs.insert_tree(
8018 "/dir",
8019 json!({
8020 ".git": {},
8021 "src": {
8022 "main.rs": file_contents,
8023 }
8024 }),
8025 )
8026 .await;
8027
8028 fs.set_head_for_repo(
8029 Path::new("/dir/.git"),
8030 &[("src/main.rs", committed_contents.clone())],
8031 "deadbeef",
8032 );
8033 fs.set_index_for_repo(
8034 Path::new("/dir/.git"),
8035 &[("src/main.rs", committed_contents.clone())],
8036 );
8037
8038 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8039
8040 let buffer = project
8041 .update(cx, |project, cx| {
8042 project.open_local_buffer("/dir/src/main.rs", cx)
8043 })
8044 .await
8045 .unwrap();
8046 let uncommitted_diff = project
8047 .update(cx, |project, cx| {
8048 project.open_uncommitted_diff(buffer.clone(), cx)
8049 })
8050 .await
8051 .unwrap();
8052
8053 cx.run_until_parked();
8054 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8055 let snapshot = buffer.read(cx).snapshot();
8056 assert_hunks(
8057 uncommitted_diff.hunks(&snapshot, cx),
8058 &snapshot,
8059 &uncommitted_diff.base_text_string().unwrap(),
8060 &[(
8061 1..2,
8062 " println!(\"hello from HEAD\");\n",
8063 " println!(\"hello from the working copy\");\n",
8064 DiffHunkStatus {
8065 kind: DiffHunkStatusKind::Modified,
8066 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8067 },
8068 )],
8069 );
8070 });
8071}
8072
8073#[gpui::test]
8074async fn test_repository_and_path_for_project_path(
8075 background_executor: BackgroundExecutor,
8076 cx: &mut gpui::TestAppContext,
8077) {
8078 init_test(cx);
8079 let fs = FakeFs::new(background_executor);
8080 fs.insert_tree(
8081 path!("/root"),
8082 json!({
8083 "c.txt": "",
8084 "dir1": {
8085 ".git": {},
8086 "deps": {
8087 "dep1": {
8088 ".git": {},
8089 "src": {
8090 "a.txt": ""
8091 }
8092 }
8093 },
8094 "src": {
8095 "b.txt": ""
8096 }
8097 },
8098 }),
8099 )
8100 .await;
8101
8102 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8103 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8104 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8105 project
8106 .update(cx, |project, cx| project.git_scans_complete(cx))
8107 .await;
8108 cx.run_until_parked();
8109
8110 project.read_with(cx, |project, cx| {
8111 let git_store = project.git_store().read(cx);
8112 let pairs = [
8113 ("c.txt", None),
8114 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8115 (
8116 "dir1/deps/dep1/src/a.txt",
8117 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8118 ),
8119 ];
8120 let expected = pairs
8121 .iter()
8122 .map(|(path, result)| {
8123 (
8124 path,
8125 result.map(|(repo, repo_path)| {
8126 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8127 }),
8128 )
8129 })
8130 .collect::<Vec<_>>();
8131 let actual = pairs
8132 .iter()
8133 .map(|(path, _)| {
8134 let project_path = (tree_id, rel_path(path)).into();
8135 let result = maybe!({
8136 let (repo, repo_path) =
8137 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8138 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8139 });
8140 (path, result)
8141 })
8142 .collect::<Vec<_>>();
8143 pretty_assertions::assert_eq!(expected, actual);
8144 });
8145
8146 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8147 .await
8148 .unwrap();
8149 cx.run_until_parked();
8150
8151 project.read_with(cx, |project, cx| {
8152 let git_store = project.git_store().read(cx);
8153 assert_eq!(
8154 git_store.repository_and_path_for_project_path(
8155 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8156 cx
8157 ),
8158 None
8159 );
8160 });
8161}
8162
8163#[gpui::test]
8164async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8165 init_test(cx);
8166 let fs = FakeFs::new(cx.background_executor.clone());
8167 let home = paths::home_dir();
8168 fs.insert_tree(
8169 home,
8170 json!({
8171 ".git": {},
8172 "project": {
8173 "a.txt": "A"
8174 },
8175 }),
8176 )
8177 .await;
8178
8179 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8180 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8181 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8182
8183 project
8184 .update(cx, |project, cx| project.git_scans_complete(cx))
8185 .await;
8186 tree.flush_fs_events(cx).await;
8187
8188 project.read_with(cx, |project, cx| {
8189 let containing = project
8190 .git_store()
8191 .read(cx)
8192 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8193 assert!(containing.is_none());
8194 });
8195
8196 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8197 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8198 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8199 project
8200 .update(cx, |project, cx| project.git_scans_complete(cx))
8201 .await;
8202 tree.flush_fs_events(cx).await;
8203
8204 project.read_with(cx, |project, cx| {
8205 let containing = project
8206 .git_store()
8207 .read(cx)
8208 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8209 assert_eq!(
8210 containing
8211 .unwrap()
8212 .0
8213 .read(cx)
8214 .work_directory_abs_path
8215 .as_ref(),
8216 home,
8217 );
8218 });
8219}
8220
8221#[gpui::test]
8222async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8223 init_test(cx);
8224 cx.executor().allow_parking();
8225
8226 let root = TempTree::new(json!({
8227 "project": {
8228 "a.txt": "a", // Modified
8229 "b.txt": "bb", // Added
8230 "c.txt": "ccc", // Unchanged
8231 "d.txt": "dddd", // Deleted
8232 },
8233 }));
8234
8235 // Set up git repository before creating the project.
8236 let work_dir = root.path().join("project");
8237 let repo = git_init(work_dir.as_path());
8238 git_add("a.txt", &repo);
8239 git_add("c.txt", &repo);
8240 git_add("d.txt", &repo);
8241 git_commit("Initial commit", &repo);
8242 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8243 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8244
8245 let project = Project::test(
8246 Arc::new(RealFs::new(None, cx.executor())),
8247 [root.path()],
8248 cx,
8249 )
8250 .await;
8251
8252 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8253 tree.flush_fs_events(cx).await;
8254 project
8255 .update(cx, |project, cx| project.git_scans_complete(cx))
8256 .await;
8257 cx.executor().run_until_parked();
8258
8259 let repository = project.read_with(cx, |project, cx| {
8260 project.repositories(cx).values().next().unwrap().clone()
8261 });
8262
8263 // Check that the right git state is observed on startup
8264 repository.read_with(cx, |repository, _| {
8265 let entries = repository.cached_status().collect::<Vec<_>>();
8266 assert_eq!(
8267 entries,
8268 [
8269 StatusEntry {
8270 repo_path: repo_path("a.txt"),
8271 status: StatusCode::Modified.worktree(),
8272 },
8273 StatusEntry {
8274 repo_path: repo_path("b.txt"),
8275 status: FileStatus::Untracked,
8276 },
8277 StatusEntry {
8278 repo_path: repo_path("d.txt"),
8279 status: StatusCode::Deleted.worktree(),
8280 },
8281 ]
8282 );
8283 });
8284
8285 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8286
8287 tree.flush_fs_events(cx).await;
8288 project
8289 .update(cx, |project, cx| project.git_scans_complete(cx))
8290 .await;
8291 cx.executor().run_until_parked();
8292
8293 repository.read_with(cx, |repository, _| {
8294 let entries = repository.cached_status().collect::<Vec<_>>();
8295 assert_eq!(
8296 entries,
8297 [
8298 StatusEntry {
8299 repo_path: repo_path("a.txt"),
8300 status: StatusCode::Modified.worktree(),
8301 },
8302 StatusEntry {
8303 repo_path: repo_path("b.txt"),
8304 status: FileStatus::Untracked,
8305 },
8306 StatusEntry {
8307 repo_path: repo_path("c.txt"),
8308 status: StatusCode::Modified.worktree(),
8309 },
8310 StatusEntry {
8311 repo_path: repo_path("d.txt"),
8312 status: StatusCode::Deleted.worktree(),
8313 },
8314 ]
8315 );
8316 });
8317
8318 git_add("a.txt", &repo);
8319 git_add("c.txt", &repo);
8320 git_remove_index(Path::new("d.txt"), &repo);
8321 git_commit("Another commit", &repo);
8322 tree.flush_fs_events(cx).await;
8323 project
8324 .update(cx, |project, cx| project.git_scans_complete(cx))
8325 .await;
8326 cx.executor().run_until_parked();
8327
8328 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8329 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8330 tree.flush_fs_events(cx).await;
8331 project
8332 .update(cx, |project, cx| project.git_scans_complete(cx))
8333 .await;
8334 cx.executor().run_until_parked();
8335
8336 repository.read_with(cx, |repository, _cx| {
8337 let entries = repository.cached_status().collect::<Vec<_>>();
8338
8339 // Deleting an untracked entry, b.txt, should leave no status
8340 // a.txt was tracked, and so should have a status
8341 assert_eq!(
8342 entries,
8343 [StatusEntry {
8344 repo_path: repo_path("a.txt"),
8345 status: StatusCode::Deleted.worktree(),
8346 }]
8347 );
8348 });
8349}
8350
8351#[gpui::test]
8352async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8353 init_test(cx);
8354 cx.executor().allow_parking();
8355
8356 let root = TempTree::new(json!({
8357 "project": {
8358 "sub": {},
8359 "a.txt": "",
8360 },
8361 }));
8362
8363 let work_dir = root.path().join("project");
8364 let repo = git_init(work_dir.as_path());
8365 // a.txt exists in HEAD and the working copy but is deleted in the index.
8366 git_add("a.txt", &repo);
8367 git_commit("Initial commit", &repo);
8368 git_remove_index("a.txt".as_ref(), &repo);
8369 // `sub` is a nested git repository.
8370 let _sub = git_init(&work_dir.join("sub"));
8371
8372 let project = Project::test(
8373 Arc::new(RealFs::new(None, cx.executor())),
8374 [root.path()],
8375 cx,
8376 )
8377 .await;
8378
8379 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8380 tree.flush_fs_events(cx).await;
8381 project
8382 .update(cx, |project, cx| project.git_scans_complete(cx))
8383 .await;
8384 cx.executor().run_until_parked();
8385
8386 let repository = project.read_with(cx, |project, cx| {
8387 project
8388 .repositories(cx)
8389 .values()
8390 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8391 .unwrap()
8392 .clone()
8393 });
8394
8395 repository.read_with(cx, |repository, _cx| {
8396 let entries = repository.cached_status().collect::<Vec<_>>();
8397
8398 // `sub` doesn't appear in our computed statuses.
8399 // a.txt appears with a combined `DA` status.
8400 assert_eq!(
8401 entries,
8402 [StatusEntry {
8403 repo_path: repo_path("a.txt"),
8404 status: TrackedStatus {
8405 index_status: StatusCode::Deleted,
8406 worktree_status: StatusCode::Added
8407 }
8408 .into(),
8409 }]
8410 )
8411 });
8412}
8413
8414#[gpui::test]
8415async fn test_repository_subfolder_git_status(
8416 executor: gpui::BackgroundExecutor,
8417 cx: &mut gpui::TestAppContext,
8418) {
8419 init_test(cx);
8420
8421 let fs = FakeFs::new(executor);
8422 fs.insert_tree(
8423 path!("/root"),
8424 json!({
8425 "my-repo": {
8426 ".git": {},
8427 "a.txt": "a",
8428 "sub-folder-1": {
8429 "sub-folder-2": {
8430 "c.txt": "cc",
8431 "d": {
8432 "e.txt": "eee"
8433 }
8434 },
8435 }
8436 },
8437 }),
8438 )
8439 .await;
8440
8441 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8442 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8443
8444 fs.set_status_for_repo(
8445 path!("/root/my-repo/.git").as_ref(),
8446 &[(E_TXT, FileStatus::Untracked)],
8447 );
8448
8449 let project = Project::test(
8450 fs.clone(),
8451 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8452 cx,
8453 )
8454 .await;
8455
8456 project
8457 .update(cx, |project, cx| project.git_scans_complete(cx))
8458 .await;
8459 cx.run_until_parked();
8460
8461 let repository = project.read_with(cx, |project, cx| {
8462 project.repositories(cx).values().next().unwrap().clone()
8463 });
8464
8465 // Ensure that the git status is loaded correctly
8466 repository.read_with(cx, |repository, _cx| {
8467 assert_eq!(
8468 repository.work_directory_abs_path,
8469 Path::new(path!("/root/my-repo")).into()
8470 );
8471
8472 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8473 assert_eq!(
8474 repository
8475 .status_for_path(&repo_path(E_TXT))
8476 .unwrap()
8477 .status,
8478 FileStatus::Untracked
8479 );
8480 });
8481
8482 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8483 project
8484 .update(cx, |project, cx| project.git_scans_complete(cx))
8485 .await;
8486 cx.run_until_parked();
8487
8488 repository.read_with(cx, |repository, _cx| {
8489 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8490 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8491 });
8492}
8493
8494// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8495#[cfg(any())]
8496#[gpui::test]
8497async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8498 init_test(cx);
8499 cx.executor().allow_parking();
8500
8501 let root = TempTree::new(json!({
8502 "project": {
8503 "a.txt": "a",
8504 },
8505 }));
8506 let root_path = root.path();
8507
8508 let repo = git_init(&root_path.join("project"));
8509 git_add("a.txt", &repo);
8510 git_commit("init", &repo);
8511
8512 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8513
8514 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8515 tree.flush_fs_events(cx).await;
8516 project
8517 .update(cx, |project, cx| project.git_scans_complete(cx))
8518 .await;
8519 cx.executor().run_until_parked();
8520
8521 let repository = project.read_with(cx, |project, cx| {
8522 project.repositories(cx).values().next().unwrap().clone()
8523 });
8524
8525 git_branch("other-branch", &repo);
8526 git_checkout("refs/heads/other-branch", &repo);
8527 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8528 git_add("a.txt", &repo);
8529 git_commit("capitalize", &repo);
8530 let commit = repo
8531 .head()
8532 .expect("Failed to get HEAD")
8533 .peel_to_commit()
8534 .expect("HEAD is not a commit");
8535 git_checkout("refs/heads/main", &repo);
8536 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8537 git_add("a.txt", &repo);
8538 git_commit("improve letter", &repo);
8539 git_cherry_pick(&commit, &repo);
8540 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8541 .expect("No CHERRY_PICK_HEAD");
8542 pretty_assertions::assert_eq!(
8543 git_status(&repo),
8544 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8545 );
8546 tree.flush_fs_events(cx).await;
8547 project
8548 .update(cx, |project, cx| project.git_scans_complete(cx))
8549 .await;
8550 cx.executor().run_until_parked();
8551 let conflicts = repository.update(cx, |repository, _| {
8552 repository
8553 .merge_conflicts
8554 .iter()
8555 .cloned()
8556 .collect::<Vec<_>>()
8557 });
8558 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8559
8560 git_add("a.txt", &repo);
8561 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8562 git_commit("whatevs", &repo);
8563 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8564 .expect("Failed to remove CHERRY_PICK_HEAD");
8565 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8566 tree.flush_fs_events(cx).await;
8567 let conflicts = repository.update(cx, |repository, _| {
8568 repository
8569 .merge_conflicts
8570 .iter()
8571 .cloned()
8572 .collect::<Vec<_>>()
8573 });
8574 pretty_assertions::assert_eq!(conflicts, []);
8575}
8576
8577#[gpui::test]
8578async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8579 init_test(cx);
8580 let fs = FakeFs::new(cx.background_executor.clone());
8581 fs.insert_tree(
8582 path!("/root"),
8583 json!({
8584 ".git": {},
8585 ".gitignore": "*.txt\n",
8586 "a.xml": "<a></a>",
8587 "b.txt": "Some text"
8588 }),
8589 )
8590 .await;
8591
8592 fs.set_head_and_index_for_repo(
8593 path!("/root/.git").as_ref(),
8594 &[
8595 (".gitignore", "*.txt\n".into()),
8596 ("a.xml", "<a></a>".into()),
8597 ],
8598 );
8599
8600 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8601
8602 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8603 tree.flush_fs_events(cx).await;
8604 project
8605 .update(cx, |project, cx| project.git_scans_complete(cx))
8606 .await;
8607 cx.executor().run_until_parked();
8608
8609 let repository = project.read_with(cx, |project, cx| {
8610 project.repositories(cx).values().next().unwrap().clone()
8611 });
8612
8613 // One file is unmodified, the other is ignored.
8614 cx.read(|cx| {
8615 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8616 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8617 });
8618
8619 // Change the gitignore, and stage the newly non-ignored file.
8620 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8621 .await
8622 .unwrap();
8623 fs.set_index_for_repo(
8624 Path::new(path!("/root/.git")),
8625 &[
8626 (".gitignore", "*.txt\n".into()),
8627 ("a.xml", "<a></a>".into()),
8628 ("b.txt", "Some text".into()),
8629 ],
8630 );
8631
8632 cx.executor().run_until_parked();
8633 cx.read(|cx| {
8634 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8635 assert_entry_git_state(
8636 tree.read(cx),
8637 repository.read(cx),
8638 "b.txt",
8639 Some(StatusCode::Added),
8640 false,
8641 );
8642 });
8643}
8644
8645// NOTE:
8646// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8647// a directory which some program has already open.
8648// This is a limitation of the Windows.
8649// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8650// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8651#[gpui::test]
8652#[cfg_attr(target_os = "windows", ignore)]
8653async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8654 init_test(cx);
8655 cx.executor().allow_parking();
8656 let root = TempTree::new(json!({
8657 "projects": {
8658 "project1": {
8659 "a": "",
8660 "b": "",
8661 }
8662 },
8663
8664 }));
8665 let root_path = root.path();
8666
8667 let repo = git_init(&root_path.join("projects/project1"));
8668 git_add("a", &repo);
8669 git_commit("init", &repo);
8670 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8671
8672 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8673
8674 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8675 tree.flush_fs_events(cx).await;
8676 project
8677 .update(cx, |project, cx| project.git_scans_complete(cx))
8678 .await;
8679 cx.executor().run_until_parked();
8680
8681 let repository = project.read_with(cx, |project, cx| {
8682 project.repositories(cx).values().next().unwrap().clone()
8683 });
8684
8685 repository.read_with(cx, |repository, _| {
8686 assert_eq!(
8687 repository.work_directory_abs_path.as_ref(),
8688 root_path.join("projects/project1").as_path()
8689 );
8690 assert_eq!(
8691 repository
8692 .status_for_path(&repo_path("a"))
8693 .map(|entry| entry.status),
8694 Some(StatusCode::Modified.worktree()),
8695 );
8696 assert_eq!(
8697 repository
8698 .status_for_path(&repo_path("b"))
8699 .map(|entry| entry.status),
8700 Some(FileStatus::Untracked),
8701 );
8702 });
8703
8704 std::fs::rename(
8705 root_path.join("projects/project1"),
8706 root_path.join("projects/project2"),
8707 )
8708 .unwrap();
8709 tree.flush_fs_events(cx).await;
8710
8711 repository.read_with(cx, |repository, _| {
8712 assert_eq!(
8713 repository.work_directory_abs_path.as_ref(),
8714 root_path.join("projects/project2").as_path()
8715 );
8716 assert_eq!(
8717 repository.status_for_path(&repo_path("a")).unwrap().status,
8718 StatusCode::Modified.worktree(),
8719 );
8720 assert_eq!(
8721 repository.status_for_path(&repo_path("b")).unwrap().status,
8722 FileStatus::Untracked,
8723 );
8724 });
8725}
8726
8727// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8728// you can't rename a directory which some program has already open. This is a
8729// limitation of the Windows. See:
8730// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8731// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8732#[gpui::test]
8733#[cfg_attr(target_os = "windows", ignore)]
8734async fn test_file_status(cx: &mut gpui::TestAppContext) {
8735 init_test(cx);
8736 cx.executor().allow_parking();
8737 const IGNORE_RULE: &str = "**/target";
8738
8739 let root = TempTree::new(json!({
8740 "project": {
8741 "a.txt": "a",
8742 "b.txt": "bb",
8743 "c": {
8744 "d": {
8745 "e.txt": "eee"
8746 }
8747 },
8748 "f.txt": "ffff",
8749 "target": {
8750 "build_file": "???"
8751 },
8752 ".gitignore": IGNORE_RULE
8753 },
8754
8755 }));
8756 let root_path = root.path();
8757
8758 const A_TXT: &str = "a.txt";
8759 const B_TXT: &str = "b.txt";
8760 const E_TXT: &str = "c/d/e.txt";
8761 const F_TXT: &str = "f.txt";
8762 const DOTGITIGNORE: &str = ".gitignore";
8763 const BUILD_FILE: &str = "target/build_file";
8764
8765 // Set up git repository before creating the worktree.
8766 let work_dir = root.path().join("project");
8767 let mut repo = git_init(work_dir.as_path());
8768 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8769 git_add(A_TXT, &repo);
8770 git_add(E_TXT, &repo);
8771 git_add(DOTGITIGNORE, &repo);
8772 git_commit("Initial commit", &repo);
8773
8774 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8775
8776 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8777 tree.flush_fs_events(cx).await;
8778 project
8779 .update(cx, |project, cx| project.git_scans_complete(cx))
8780 .await;
8781 cx.executor().run_until_parked();
8782
8783 let repository = project.read_with(cx, |project, cx| {
8784 project.repositories(cx).values().next().unwrap().clone()
8785 });
8786
8787 // Check that the right git state is observed on startup
8788 repository.read_with(cx, |repository, _cx| {
8789 assert_eq!(
8790 repository.work_directory_abs_path.as_ref(),
8791 root_path.join("project").as_path()
8792 );
8793
8794 assert_eq!(
8795 repository
8796 .status_for_path(&repo_path(B_TXT))
8797 .unwrap()
8798 .status,
8799 FileStatus::Untracked,
8800 );
8801 assert_eq!(
8802 repository
8803 .status_for_path(&repo_path(F_TXT))
8804 .unwrap()
8805 .status,
8806 FileStatus::Untracked,
8807 );
8808 });
8809
8810 // Modify a file in the working copy.
8811 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8812 tree.flush_fs_events(cx).await;
8813 project
8814 .update(cx, |project, cx| project.git_scans_complete(cx))
8815 .await;
8816 cx.executor().run_until_parked();
8817
8818 // The worktree detects that the file's git status has changed.
8819 repository.read_with(cx, |repository, _| {
8820 assert_eq!(
8821 repository
8822 .status_for_path(&repo_path(A_TXT))
8823 .unwrap()
8824 .status,
8825 StatusCode::Modified.worktree(),
8826 );
8827 });
8828
8829 // Create a commit in the git repository.
8830 git_add(A_TXT, &repo);
8831 git_add(B_TXT, &repo);
8832 git_commit("Committing modified and added", &repo);
8833 tree.flush_fs_events(cx).await;
8834 project
8835 .update(cx, |project, cx| project.git_scans_complete(cx))
8836 .await;
8837 cx.executor().run_until_parked();
8838
8839 // The worktree detects that the files' git status have changed.
8840 repository.read_with(cx, |repository, _cx| {
8841 assert_eq!(
8842 repository
8843 .status_for_path(&repo_path(F_TXT))
8844 .unwrap()
8845 .status,
8846 FileStatus::Untracked,
8847 );
8848 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8849 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8850 });
8851
8852 // Modify files in the working copy and perform git operations on other files.
8853 git_reset(0, &repo);
8854 git_remove_index(Path::new(B_TXT), &repo);
8855 git_stash(&mut repo);
8856 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8857 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8858 tree.flush_fs_events(cx).await;
8859 project
8860 .update(cx, |project, cx| project.git_scans_complete(cx))
8861 .await;
8862 cx.executor().run_until_parked();
8863
8864 // Check that more complex repo changes are tracked
8865 repository.read_with(cx, |repository, _cx| {
8866 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8867 assert_eq!(
8868 repository
8869 .status_for_path(&repo_path(B_TXT))
8870 .unwrap()
8871 .status,
8872 FileStatus::Untracked,
8873 );
8874 assert_eq!(
8875 repository
8876 .status_for_path(&repo_path(E_TXT))
8877 .unwrap()
8878 .status,
8879 StatusCode::Modified.worktree(),
8880 );
8881 });
8882
8883 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8884 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8885 std::fs::write(
8886 work_dir.join(DOTGITIGNORE),
8887 [IGNORE_RULE, "f.txt"].join("\n"),
8888 )
8889 .unwrap();
8890
8891 git_add(Path::new(DOTGITIGNORE), &repo);
8892 git_commit("Committing modified git ignore", &repo);
8893
8894 tree.flush_fs_events(cx).await;
8895 cx.executor().run_until_parked();
8896
8897 let mut renamed_dir_name = "first_directory/second_directory";
8898 const RENAMED_FILE: &str = "rf.txt";
8899
8900 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8901 std::fs::write(
8902 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8903 "new-contents",
8904 )
8905 .unwrap();
8906
8907 tree.flush_fs_events(cx).await;
8908 project
8909 .update(cx, |project, cx| project.git_scans_complete(cx))
8910 .await;
8911 cx.executor().run_until_parked();
8912
8913 repository.read_with(cx, |repository, _cx| {
8914 assert_eq!(
8915 repository
8916 .status_for_path(
8917 &rel_path(renamed_dir_name)
8918 .join(rel_path(RENAMED_FILE))
8919 .into()
8920 )
8921 .unwrap()
8922 .status,
8923 FileStatus::Untracked,
8924 );
8925 });
8926
8927 renamed_dir_name = "new_first_directory/second_directory";
8928
8929 std::fs::rename(
8930 work_dir.join("first_directory"),
8931 work_dir.join("new_first_directory"),
8932 )
8933 .unwrap();
8934
8935 tree.flush_fs_events(cx).await;
8936 project
8937 .update(cx, |project, cx| project.git_scans_complete(cx))
8938 .await;
8939 cx.executor().run_until_parked();
8940
8941 repository.read_with(cx, |repository, _cx| {
8942 assert_eq!(
8943 repository
8944 .status_for_path(
8945 &rel_path(renamed_dir_name)
8946 .join(rel_path(RENAMED_FILE))
8947 .into()
8948 )
8949 .unwrap()
8950 .status,
8951 FileStatus::Untracked,
8952 );
8953 });
8954}
8955
8956#[gpui::test]
8957#[ignore]
8958async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8959 init_test(cx);
8960 cx.executor().allow_parking();
8961
8962 const IGNORE_RULE: &str = "**/target";
8963
8964 let root = TempTree::new(json!({
8965 "project": {
8966 "src": {
8967 "main.rs": "fn main() {}"
8968 },
8969 "target": {
8970 "debug": {
8971 "important_text.txt": "important text",
8972 },
8973 },
8974 ".gitignore": IGNORE_RULE
8975 },
8976
8977 }));
8978 let root_path = root.path();
8979
8980 // Set up git repository before creating the worktree.
8981 let work_dir = root.path().join("project");
8982 let repo = git_init(work_dir.as_path());
8983 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8984 git_add("src/main.rs", &repo);
8985 git_add(".gitignore", &repo);
8986 git_commit("Initial commit", &repo);
8987
8988 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8989 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8990 let project_events = Arc::new(Mutex::new(Vec::new()));
8991 project.update(cx, |project, cx| {
8992 let repo_events = repository_updates.clone();
8993 cx.subscribe(project.git_store(), move |_, _, e, _| {
8994 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8995 repo_events.lock().push(e.clone());
8996 }
8997 })
8998 .detach();
8999 let project_events = project_events.clone();
9000 cx.subscribe_self(move |_, e, _| {
9001 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9002 project_events.lock().extend(
9003 updates
9004 .iter()
9005 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9006 .filter(|(path, _)| path != "fs-event-sentinel"),
9007 );
9008 }
9009 })
9010 .detach();
9011 });
9012
9013 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9014 tree.flush_fs_events(cx).await;
9015 tree.update(cx, |tree, cx| {
9016 tree.load_file(
9017 rel_path("project/target/debug/important_text.txt"),
9018 &Default::default(),
9019 None,
9020 cx,
9021 )
9022 })
9023 .await
9024 .unwrap();
9025 tree.update(cx, |tree, _| {
9026 assert_eq!(
9027 tree.entries(true, 0)
9028 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9029 .collect::<Vec<_>>(),
9030 vec![
9031 (rel_path(""), false),
9032 (rel_path("project/"), false),
9033 (rel_path("project/.gitignore"), false),
9034 (rel_path("project/src"), false),
9035 (rel_path("project/src/main.rs"), false),
9036 (rel_path("project/target"), true),
9037 (rel_path("project/target/debug"), true),
9038 (rel_path("project/target/debug/important_text.txt"), true),
9039 ]
9040 );
9041 });
9042
9043 assert_eq!(
9044 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9045 vec![
9046 RepositoryEvent::StatusesChanged { full_scan: true },
9047 RepositoryEvent::MergeHeadsChanged,
9048 ],
9049 "Initial worktree scan should produce a repo update event"
9050 );
9051 assert_eq!(
9052 project_events.lock().drain(..).collect::<Vec<_>>(),
9053 vec![
9054 ("project/target".to_string(), PathChange::Loaded),
9055 ("project/target/debug".to_string(), PathChange::Loaded),
9056 (
9057 "project/target/debug/important_text.txt".to_string(),
9058 PathChange::Loaded
9059 ),
9060 ],
9061 "Initial project changes should show that all not-ignored and all opened files are loaded"
9062 );
9063
9064 let deps_dir = work_dir.join("target").join("debug").join("deps");
9065 std::fs::create_dir_all(&deps_dir).unwrap();
9066 tree.flush_fs_events(cx).await;
9067 project
9068 .update(cx, |project, cx| project.git_scans_complete(cx))
9069 .await;
9070 cx.executor().run_until_parked();
9071 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9072 tree.flush_fs_events(cx).await;
9073 project
9074 .update(cx, |project, cx| project.git_scans_complete(cx))
9075 .await;
9076 cx.executor().run_until_parked();
9077 std::fs::remove_dir_all(&deps_dir).unwrap();
9078 tree.flush_fs_events(cx).await;
9079 project
9080 .update(cx, |project, cx| project.git_scans_complete(cx))
9081 .await;
9082 cx.executor().run_until_parked();
9083
9084 tree.update(cx, |tree, _| {
9085 assert_eq!(
9086 tree.entries(true, 0)
9087 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9088 .collect::<Vec<_>>(),
9089 vec![
9090 (rel_path(""), false),
9091 (rel_path("project/"), false),
9092 (rel_path("project/.gitignore"), false),
9093 (rel_path("project/src"), false),
9094 (rel_path("project/src/main.rs"), false),
9095 (rel_path("project/target"), true),
9096 (rel_path("project/target/debug"), true),
9097 (rel_path("project/target/debug/important_text.txt"), true),
9098 ],
9099 "No stray temp files should be left after the flycheck changes"
9100 );
9101 });
9102
9103 assert_eq!(
9104 repository_updates
9105 .lock()
9106 .iter()
9107 .cloned()
9108 .collect::<Vec<_>>(),
9109 Vec::new(),
9110 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9111 );
9112 assert_eq!(
9113 project_events.lock().as_slice(),
9114 vec![
9115 ("project/target/debug/deps".to_string(), PathChange::Added),
9116 ("project/target/debug/deps".to_string(), PathChange::Removed),
9117 ],
9118 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9119 No updates for more nested directories should happen as those are ignored",
9120 );
9121}
9122
9123#[gpui::test]
9124async fn test_odd_events_for_ignored_dirs(
9125 executor: BackgroundExecutor,
9126 cx: &mut gpui::TestAppContext,
9127) {
9128 init_test(cx);
9129 let fs = FakeFs::new(executor);
9130 fs.insert_tree(
9131 path!("/root"),
9132 json!({
9133 ".git": {},
9134 ".gitignore": "**/target/",
9135 "src": {
9136 "main.rs": "fn main() {}",
9137 },
9138 "target": {
9139 "debug": {
9140 "foo.txt": "foo",
9141 "deps": {}
9142 }
9143 }
9144 }),
9145 )
9146 .await;
9147 fs.set_head_and_index_for_repo(
9148 path!("/root/.git").as_ref(),
9149 &[
9150 (".gitignore", "**/target/".into()),
9151 ("src/main.rs", "fn main() {}".into()),
9152 ],
9153 );
9154
9155 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9156 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9157 let project_events = Arc::new(Mutex::new(Vec::new()));
9158 project.update(cx, |project, cx| {
9159 let repository_updates = repository_updates.clone();
9160 cx.subscribe(project.git_store(), move |_, _, e, _| {
9161 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9162 repository_updates.lock().push(e.clone());
9163 }
9164 })
9165 .detach();
9166 let project_events = project_events.clone();
9167 cx.subscribe_self(move |_, e, _| {
9168 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9169 project_events.lock().extend(
9170 updates
9171 .iter()
9172 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9173 .filter(|(path, _)| path != "fs-event-sentinel"),
9174 );
9175 }
9176 })
9177 .detach();
9178 });
9179
9180 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9181 tree.update(cx, |tree, cx| {
9182 tree.load_file(
9183 rel_path("target/debug/foo.txt"),
9184 &Default::default(),
9185 None,
9186 cx,
9187 )
9188 })
9189 .await
9190 .unwrap();
9191 tree.flush_fs_events(cx).await;
9192 project
9193 .update(cx, |project, cx| project.git_scans_complete(cx))
9194 .await;
9195 cx.run_until_parked();
9196 tree.update(cx, |tree, _| {
9197 assert_eq!(
9198 tree.entries(true, 0)
9199 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9200 .collect::<Vec<_>>(),
9201 vec![
9202 (rel_path(""), false),
9203 (rel_path(".gitignore"), false),
9204 (rel_path("src"), false),
9205 (rel_path("src/main.rs"), false),
9206 (rel_path("target"), true),
9207 (rel_path("target/debug"), true),
9208 (rel_path("target/debug/deps"), true),
9209 (rel_path("target/debug/foo.txt"), true),
9210 ]
9211 );
9212 });
9213
9214 assert_eq!(
9215 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9216 vec![
9217 RepositoryEvent::MergeHeadsChanged,
9218 RepositoryEvent::BranchChanged,
9219 RepositoryEvent::StatusesChanged { full_scan: false },
9220 RepositoryEvent::StatusesChanged { full_scan: false },
9221 ],
9222 "Initial worktree scan should produce a repo update event"
9223 );
9224 assert_eq!(
9225 project_events.lock().drain(..).collect::<Vec<_>>(),
9226 vec![
9227 ("target".to_string(), PathChange::Loaded),
9228 ("target/debug".to_string(), PathChange::Loaded),
9229 ("target/debug/deps".to_string(), PathChange::Loaded),
9230 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9231 ],
9232 "All non-ignored entries and all opened firs should be getting a project event",
9233 );
9234
9235 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9236 // This may happen multiple times during a single flycheck, but once is enough for testing.
9237 fs.emit_fs_event("/root/target/debug/deps", None);
9238 tree.flush_fs_events(cx).await;
9239 project
9240 .update(cx, |project, cx| project.git_scans_complete(cx))
9241 .await;
9242 cx.executor().run_until_parked();
9243
9244 assert_eq!(
9245 repository_updates
9246 .lock()
9247 .iter()
9248 .cloned()
9249 .collect::<Vec<_>>(),
9250 Vec::new(),
9251 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9252 );
9253 assert_eq!(
9254 project_events.lock().as_slice(),
9255 Vec::new(),
9256 "No further project events should happen, as only ignored dirs received FS events",
9257 );
9258}
9259
9260#[gpui::test]
9261async fn test_repos_in_invisible_worktrees(
9262 executor: BackgroundExecutor,
9263 cx: &mut gpui::TestAppContext,
9264) {
9265 init_test(cx);
9266 let fs = FakeFs::new(executor);
9267 fs.insert_tree(
9268 path!("/root"),
9269 json!({
9270 "dir1": {
9271 ".git": {},
9272 "dep1": {
9273 ".git": {},
9274 "src": {
9275 "a.txt": "",
9276 },
9277 },
9278 "b.txt": "",
9279 },
9280 }),
9281 )
9282 .await;
9283
9284 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9285 let _visible_worktree =
9286 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9287 project
9288 .update(cx, |project, cx| project.git_scans_complete(cx))
9289 .await;
9290
9291 let repos = project.read_with(cx, |project, cx| {
9292 project
9293 .repositories(cx)
9294 .values()
9295 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9296 .collect::<Vec<_>>()
9297 });
9298 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9299
9300 let (_invisible_worktree, _) = project
9301 .update(cx, |project, cx| {
9302 project.worktree_store.update(cx, |worktree_store, cx| {
9303 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9304 })
9305 })
9306 .await
9307 .expect("failed to create worktree");
9308 project
9309 .update(cx, |project, cx| project.git_scans_complete(cx))
9310 .await;
9311
9312 let repos = project.read_with(cx, |project, cx| {
9313 project
9314 .repositories(cx)
9315 .values()
9316 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9317 .collect::<Vec<_>>()
9318 });
9319 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9320}
9321
9322#[gpui::test(iterations = 10)]
9323async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9324 init_test(cx);
9325 cx.update(|cx| {
9326 cx.update_global::<SettingsStore, _>(|store, cx| {
9327 store.update_user_settings(cx, |settings| {
9328 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9329 });
9330 });
9331 });
9332 let fs = FakeFs::new(cx.background_executor.clone());
9333 fs.insert_tree(
9334 path!("/root"),
9335 json!({
9336 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9337 "tree": {
9338 ".git": {},
9339 ".gitignore": "ignored-dir\n",
9340 "tracked-dir": {
9341 "tracked-file1": "",
9342 "ancestor-ignored-file1": "",
9343 },
9344 "ignored-dir": {
9345 "ignored-file1": ""
9346 }
9347 }
9348 }),
9349 )
9350 .await;
9351 fs.set_head_and_index_for_repo(
9352 path!("/root/tree/.git").as_ref(),
9353 &[
9354 (".gitignore", "ignored-dir\n".into()),
9355 ("tracked-dir/tracked-file1", "".into()),
9356 ],
9357 );
9358
9359 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9360
9361 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9362 tree.flush_fs_events(cx).await;
9363 project
9364 .update(cx, |project, cx| project.git_scans_complete(cx))
9365 .await;
9366 cx.executor().run_until_parked();
9367
9368 let repository = project.read_with(cx, |project, cx| {
9369 project.repositories(cx).values().next().unwrap().clone()
9370 });
9371
9372 tree.read_with(cx, |tree, _| {
9373 tree.as_local()
9374 .unwrap()
9375 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9376 })
9377 .recv()
9378 .await;
9379
9380 cx.read(|cx| {
9381 assert_entry_git_state(
9382 tree.read(cx),
9383 repository.read(cx),
9384 "tracked-dir/tracked-file1",
9385 None,
9386 false,
9387 );
9388 assert_entry_git_state(
9389 tree.read(cx),
9390 repository.read(cx),
9391 "tracked-dir/ancestor-ignored-file1",
9392 None,
9393 false,
9394 );
9395 assert_entry_git_state(
9396 tree.read(cx),
9397 repository.read(cx),
9398 "ignored-dir/ignored-file1",
9399 None,
9400 true,
9401 );
9402 });
9403
9404 fs.create_file(
9405 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9406 Default::default(),
9407 )
9408 .await
9409 .unwrap();
9410 fs.set_index_for_repo(
9411 path!("/root/tree/.git").as_ref(),
9412 &[
9413 (".gitignore", "ignored-dir\n".into()),
9414 ("tracked-dir/tracked-file1", "".into()),
9415 ("tracked-dir/tracked-file2", "".into()),
9416 ],
9417 );
9418 fs.create_file(
9419 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9420 Default::default(),
9421 )
9422 .await
9423 .unwrap();
9424 fs.create_file(
9425 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9426 Default::default(),
9427 )
9428 .await
9429 .unwrap();
9430
9431 cx.executor().run_until_parked();
9432 cx.read(|cx| {
9433 assert_entry_git_state(
9434 tree.read(cx),
9435 repository.read(cx),
9436 "tracked-dir/tracked-file2",
9437 Some(StatusCode::Added),
9438 false,
9439 );
9440 assert_entry_git_state(
9441 tree.read(cx),
9442 repository.read(cx),
9443 "tracked-dir/ancestor-ignored-file2",
9444 None,
9445 false,
9446 );
9447 assert_entry_git_state(
9448 tree.read(cx),
9449 repository.read(cx),
9450 "ignored-dir/ignored-file2",
9451 None,
9452 true,
9453 );
9454 assert!(
9455 tree.read(cx)
9456 .entry_for_path(&rel_path(".git"))
9457 .unwrap()
9458 .is_ignored
9459 );
9460 });
9461}
9462
9463#[gpui::test]
9464async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9465 init_test(cx);
9466
9467 let fs = FakeFs::new(cx.executor());
9468 fs.insert_tree(
9469 path!("/project"),
9470 json!({
9471 ".git": {
9472 "worktrees": {
9473 "some-worktree": {
9474 "commondir": "../..\n",
9475 // For is_git_dir
9476 "HEAD": "",
9477 "config": ""
9478 }
9479 },
9480 "modules": {
9481 "subdir": {
9482 "some-submodule": {
9483 // For is_git_dir
9484 "HEAD": "",
9485 "config": "",
9486 }
9487 }
9488 }
9489 },
9490 "src": {
9491 "a.txt": "A",
9492 },
9493 "some-worktree": {
9494 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9495 "src": {
9496 "b.txt": "B",
9497 }
9498 },
9499 "subdir": {
9500 "some-submodule": {
9501 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9502 "c.txt": "C",
9503 }
9504 }
9505 }),
9506 )
9507 .await;
9508
9509 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9510 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9511 scan_complete.await;
9512
9513 let mut repositories = project.update(cx, |project, cx| {
9514 project
9515 .repositories(cx)
9516 .values()
9517 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9518 .collect::<Vec<_>>()
9519 });
9520 repositories.sort();
9521 pretty_assertions::assert_eq!(
9522 repositories,
9523 [
9524 Path::new(path!("/project")).into(),
9525 Path::new(path!("/project/some-worktree")).into(),
9526 Path::new(path!("/project/subdir/some-submodule")).into(),
9527 ]
9528 );
9529
9530 // Generate a git-related event for the worktree and check that it's refreshed.
9531 fs.with_git_state(
9532 path!("/project/some-worktree/.git").as_ref(),
9533 true,
9534 |state| {
9535 state
9536 .head_contents
9537 .insert(repo_path("src/b.txt"), "b".to_owned());
9538 state
9539 .index_contents
9540 .insert(repo_path("src/b.txt"), "b".to_owned());
9541 },
9542 )
9543 .unwrap();
9544 cx.run_until_parked();
9545
9546 let buffer = project
9547 .update(cx, |project, cx| {
9548 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9549 })
9550 .await
9551 .unwrap();
9552 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9553 let (repo, _) = project
9554 .git_store()
9555 .read(cx)
9556 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9557 .unwrap();
9558 pretty_assertions::assert_eq!(
9559 repo.read(cx).work_directory_abs_path,
9560 Path::new(path!("/project/some-worktree")).into(),
9561 );
9562 let barrier = repo.update(cx, |repo, _| repo.barrier());
9563 (repo.clone(), barrier)
9564 });
9565 barrier.await.unwrap();
9566 worktree_repo.update(cx, |repo, _| {
9567 pretty_assertions::assert_eq!(
9568 repo.status_for_path(&repo_path("src/b.txt"))
9569 .unwrap()
9570 .status,
9571 StatusCode::Modified.worktree(),
9572 );
9573 });
9574
9575 // The same for the submodule.
9576 fs.with_git_state(
9577 path!("/project/subdir/some-submodule/.git").as_ref(),
9578 true,
9579 |state| {
9580 state
9581 .head_contents
9582 .insert(repo_path("c.txt"), "c".to_owned());
9583 state
9584 .index_contents
9585 .insert(repo_path("c.txt"), "c".to_owned());
9586 },
9587 )
9588 .unwrap();
9589 cx.run_until_parked();
9590
9591 let buffer = project
9592 .update(cx, |project, cx| {
9593 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9594 })
9595 .await
9596 .unwrap();
9597 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9598 let (repo, _) = project
9599 .git_store()
9600 .read(cx)
9601 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9602 .unwrap();
9603 pretty_assertions::assert_eq!(
9604 repo.read(cx).work_directory_abs_path,
9605 Path::new(path!("/project/subdir/some-submodule")).into(),
9606 );
9607 let barrier = repo.update(cx, |repo, _| repo.barrier());
9608 (repo.clone(), barrier)
9609 });
9610 barrier.await.unwrap();
9611 submodule_repo.update(cx, |repo, _| {
9612 pretty_assertions::assert_eq!(
9613 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9614 StatusCode::Modified.worktree(),
9615 );
9616 });
9617}
9618
9619#[gpui::test]
9620async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9621 init_test(cx);
9622 let fs = FakeFs::new(cx.background_executor.clone());
9623 fs.insert_tree(
9624 path!("/root"),
9625 json!({
9626 "project": {
9627 ".git": {},
9628 "child1": {
9629 "a.txt": "A",
9630 },
9631 "child2": {
9632 "b.txt": "B",
9633 }
9634 }
9635 }),
9636 )
9637 .await;
9638
9639 let project = Project::test(
9640 fs.clone(),
9641 [
9642 path!("/root/project/child1").as_ref(),
9643 path!("/root/project/child2").as_ref(),
9644 ],
9645 cx,
9646 )
9647 .await;
9648
9649 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9650 tree.flush_fs_events(cx).await;
9651 project
9652 .update(cx, |project, cx| project.git_scans_complete(cx))
9653 .await;
9654 cx.executor().run_until_parked();
9655
9656 let repos = project.read_with(cx, |project, cx| {
9657 project
9658 .repositories(cx)
9659 .values()
9660 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9661 .collect::<Vec<_>>()
9662 });
9663 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9664}
9665
9666async fn search(
9667 project: &Entity<Project>,
9668 query: SearchQuery,
9669 cx: &mut gpui::TestAppContext,
9670) -> Result<HashMap<String, Vec<Range<usize>>>> {
9671 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9672 let mut results = HashMap::default();
9673 while let Ok(search_result) = search_rx.recv().await {
9674 match search_result {
9675 SearchResult::Buffer { buffer, ranges } => {
9676 results.entry(buffer).or_insert(ranges);
9677 }
9678 SearchResult::LimitReached => {}
9679 }
9680 }
9681 Ok(results
9682 .into_iter()
9683 .map(|(buffer, ranges)| {
9684 buffer.update(cx, |buffer, cx| {
9685 let path = buffer
9686 .file()
9687 .unwrap()
9688 .full_path(cx)
9689 .to_string_lossy()
9690 .to_string();
9691 let ranges = ranges
9692 .into_iter()
9693 .map(|range| range.to_offset(buffer))
9694 .collect::<Vec<_>>();
9695 (path, ranges)
9696 })
9697 })
9698 .collect())
9699}
9700
9701pub fn init_test(cx: &mut gpui::TestAppContext) {
9702 zlog::init_test();
9703
9704 cx.update(|cx| {
9705 let settings_store = SettingsStore::test(cx);
9706 cx.set_global(settings_store);
9707 release_channel::init(SemanticVersion::default(), cx);
9708 language::init(cx);
9709 Project::init_settings(cx);
9710 });
9711}
9712
9713fn json_lang() -> Arc<Language> {
9714 Arc::new(Language::new(
9715 LanguageConfig {
9716 name: "JSON".into(),
9717 matcher: LanguageMatcher {
9718 path_suffixes: vec!["json".to_string()],
9719 ..Default::default()
9720 },
9721 ..Default::default()
9722 },
9723 None,
9724 ))
9725}
9726
9727fn js_lang() -> Arc<Language> {
9728 Arc::new(Language::new(
9729 LanguageConfig {
9730 name: "JavaScript".into(),
9731 matcher: LanguageMatcher {
9732 path_suffixes: vec!["js".to_string()],
9733 ..Default::default()
9734 },
9735 ..Default::default()
9736 },
9737 None,
9738 ))
9739}
9740
9741fn rust_lang() -> Arc<Language> {
9742 Arc::new(Language::new(
9743 LanguageConfig {
9744 name: "Rust".into(),
9745 matcher: LanguageMatcher {
9746 path_suffixes: vec!["rs".to_string()],
9747 ..Default::default()
9748 },
9749 ..Default::default()
9750 },
9751 Some(tree_sitter_rust::LANGUAGE.into()),
9752 ))
9753}
9754
9755fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9756 struct PythonMootToolchainLister(Arc<FakeFs>);
9757 #[async_trait]
9758 impl ToolchainLister for PythonMootToolchainLister {
9759 async fn list(
9760 &self,
9761 worktree_root: PathBuf,
9762 subroot_relative_path: Arc<RelPath>,
9763 _: Option<HashMap<String, String>>,
9764 _: &dyn Fs,
9765 ) -> ToolchainList {
9766 // This lister will always return a path .venv directories within ancestors
9767 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9768 let mut toolchains = vec![];
9769 for ancestor in ancestors {
9770 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9771 if self.0.is_dir(&venv_path).await {
9772 toolchains.push(Toolchain {
9773 name: SharedString::new("Python Venv"),
9774 path: venv_path.to_string_lossy().into_owned().into(),
9775 language_name: LanguageName(SharedString::new_static("Python")),
9776 as_json: serde_json::Value::Null,
9777 })
9778 }
9779 }
9780 ToolchainList {
9781 toolchains,
9782 ..Default::default()
9783 }
9784 }
9785 async fn resolve(
9786 &self,
9787 _: PathBuf,
9788 _: Option<HashMap<String, String>>,
9789 _: &dyn Fs,
9790 ) -> anyhow::Result<Toolchain> {
9791 Err(anyhow::anyhow!("Not implemented"))
9792 }
9793 fn meta(&self) -> ToolchainMetadata {
9794 ToolchainMetadata {
9795 term: SharedString::new_static("Virtual Environment"),
9796 new_toolchain_placeholder: SharedString::new_static(
9797 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9798 ),
9799 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9800 }
9801 }
9802 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9803 vec![]
9804 }
9805 }
9806 Arc::new(
9807 Language::new(
9808 LanguageConfig {
9809 name: "Python".into(),
9810 matcher: LanguageMatcher {
9811 path_suffixes: vec!["py".to_string()],
9812 ..Default::default()
9813 },
9814 ..Default::default()
9815 },
9816 None, // We're not testing Python parsing with this language.
9817 )
9818 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9819 "pyproject.toml",
9820 ))))
9821 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9822 )
9823}
9824
9825fn typescript_lang() -> Arc<Language> {
9826 Arc::new(Language::new(
9827 LanguageConfig {
9828 name: "TypeScript".into(),
9829 matcher: LanguageMatcher {
9830 path_suffixes: vec!["ts".to_string()],
9831 ..Default::default()
9832 },
9833 ..Default::default()
9834 },
9835 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9836 ))
9837}
9838
9839fn tsx_lang() -> Arc<Language> {
9840 Arc::new(Language::new(
9841 LanguageConfig {
9842 name: "tsx".into(),
9843 matcher: LanguageMatcher {
9844 path_suffixes: vec!["tsx".to_string()],
9845 ..Default::default()
9846 },
9847 ..Default::default()
9848 },
9849 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9850 ))
9851}
9852
9853fn get_all_tasks(
9854 project: &Entity<Project>,
9855 task_contexts: Arc<TaskContexts>,
9856 cx: &mut App,
9857) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9858 let new_tasks = project.update(cx, |project, cx| {
9859 project.task_store.update(cx, |task_store, cx| {
9860 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9861 this.used_and_current_resolved_tasks(task_contexts, cx)
9862 })
9863 })
9864 });
9865
9866 cx.background_spawn(async move {
9867 let (mut old, new) = new_tasks.await;
9868 old.extend(new);
9869 old
9870 })
9871}
9872
9873#[track_caller]
9874fn assert_entry_git_state(
9875 tree: &Worktree,
9876 repository: &Repository,
9877 path: &str,
9878 index_status: Option<StatusCode>,
9879 is_ignored: bool,
9880) {
9881 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9882 let entry = tree
9883 .entry_for_path(&rel_path(path))
9884 .unwrap_or_else(|| panic!("entry {path} not found"));
9885 let status = repository
9886 .status_for_path(&repo_path(path))
9887 .map(|entry| entry.status);
9888 let expected = index_status.map(|index_status| {
9889 TrackedStatus {
9890 index_status,
9891 worktree_status: StatusCode::Unmodified,
9892 }
9893 .into()
9894 });
9895 assert_eq!(
9896 status, expected,
9897 "expected {path} to have git status: {expected:?}"
9898 );
9899 assert_eq!(
9900 entry.is_ignored, is_ignored,
9901 "expected {path} to have is_ignored: {is_ignored}"
9902 );
9903}
9904
9905#[track_caller]
9906fn git_init(path: &Path) -> git2::Repository {
9907 let mut init_opts = RepositoryInitOptions::new();
9908 init_opts.initial_head("main");
9909 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9910}
9911
9912#[track_caller]
9913fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9914 let path = path.as_ref();
9915 let mut index = repo.index().expect("Failed to get index");
9916 index.add_path(path).expect("Failed to add file");
9917 index.write().expect("Failed to write index");
9918}
9919
9920#[track_caller]
9921fn git_remove_index(path: &Path, repo: &git2::Repository) {
9922 let mut index = repo.index().expect("Failed to get index");
9923 index.remove_path(path).expect("Failed to add file");
9924 index.write().expect("Failed to write index");
9925}
9926
9927#[track_caller]
9928fn git_commit(msg: &'static str, repo: &git2::Repository) {
9929 use git2::Signature;
9930
9931 let signature = Signature::now("test", "test@zed.dev").unwrap();
9932 let oid = repo.index().unwrap().write_tree().unwrap();
9933 let tree = repo.find_tree(oid).unwrap();
9934 if let Ok(head) = repo.head() {
9935 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9936
9937 let parent_commit = parent_obj.as_commit().unwrap();
9938
9939 repo.commit(
9940 Some("HEAD"),
9941 &signature,
9942 &signature,
9943 msg,
9944 &tree,
9945 &[parent_commit],
9946 )
9947 .expect("Failed to commit with parent");
9948 } else {
9949 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9950 .expect("Failed to commit");
9951 }
9952}
9953
9954#[cfg(any())]
9955#[track_caller]
9956fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9957 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9958}
9959
9960#[track_caller]
9961fn git_stash(repo: &mut git2::Repository) {
9962 use git2::Signature;
9963
9964 let signature = Signature::now("test", "test@zed.dev").unwrap();
9965 repo.stash_save(&signature, "N/A", None)
9966 .expect("Failed to stash");
9967}
9968
9969#[track_caller]
9970fn git_reset(offset: usize, repo: &git2::Repository) {
9971 let head = repo.head().expect("Couldn't get repo head");
9972 let object = head.peel(git2::ObjectType::Commit).unwrap();
9973 let commit = object.as_commit().unwrap();
9974 let new_head = commit
9975 .parents()
9976 .inspect(|parnet| {
9977 parnet.message();
9978 })
9979 .nth(offset)
9980 .expect("Not enough history");
9981 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9982 .expect("Could not reset");
9983}
9984
9985#[cfg(any())]
9986#[track_caller]
9987fn git_branch(name: &str, repo: &git2::Repository) {
9988 let head = repo
9989 .head()
9990 .expect("Couldn't get repo head")
9991 .peel_to_commit()
9992 .expect("HEAD is not a commit");
9993 repo.branch(name, &head, false).expect("Failed to commit");
9994}
9995
9996#[cfg(any())]
9997#[track_caller]
9998fn git_checkout(name: &str, repo: &git2::Repository) {
9999 repo.set_head(name).expect("Failed to set head");
10000 repo.checkout_head(None).expect("Failed to check out head");
10001}
10002
10003#[cfg(any())]
10004#[track_caller]
10005fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10006 repo.statuses(None)
10007 .unwrap()
10008 .iter()
10009 .map(|status| (status.path().unwrap().to_string(), status.status()))
10010 .collect()
10011}
10012
10013#[gpui::test]
10014async fn test_find_project_path_abs(
10015 background_executor: BackgroundExecutor,
10016 cx: &mut gpui::TestAppContext,
10017) {
10018 // find_project_path should work with absolute paths
10019 init_test(cx);
10020
10021 let fs = FakeFs::new(background_executor);
10022 fs.insert_tree(
10023 path!("/root"),
10024 json!({
10025 "project1": {
10026 "file1.txt": "content1",
10027 "subdir": {
10028 "file2.txt": "content2"
10029 }
10030 },
10031 "project2": {
10032 "file3.txt": "content3"
10033 }
10034 }),
10035 )
10036 .await;
10037
10038 let project = Project::test(
10039 fs.clone(),
10040 [
10041 path!("/root/project1").as_ref(),
10042 path!("/root/project2").as_ref(),
10043 ],
10044 cx,
10045 )
10046 .await;
10047
10048 // Make sure the worktrees are fully initialized
10049 project
10050 .update(cx, |project, cx| project.git_scans_complete(cx))
10051 .await;
10052 cx.run_until_parked();
10053
10054 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10055 project.read_with(cx, |project, cx| {
10056 let worktrees: Vec<_> = project.worktrees(cx).collect();
10057 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10058 let id1 = worktrees[0].read(cx).id();
10059 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10060 let id2 = worktrees[1].read(cx).id();
10061 (abs_path1, id1, abs_path2, id2)
10062 });
10063
10064 project.update(cx, |project, cx| {
10065 let abs_path = project1_abs_path.join("file1.txt");
10066 let found_path = project.find_project_path(abs_path, cx).unwrap();
10067 assert_eq!(found_path.worktree_id, project1_id);
10068 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10069
10070 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10071 let found_path = project.find_project_path(abs_path, cx).unwrap();
10072 assert_eq!(found_path.worktree_id, project1_id);
10073 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10074
10075 let abs_path = project2_abs_path.join("file3.txt");
10076 let found_path = project.find_project_path(abs_path, cx).unwrap();
10077 assert_eq!(found_path.worktree_id, project2_id);
10078 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10079
10080 let abs_path = project1_abs_path.join("nonexistent.txt");
10081 let found_path = project.find_project_path(abs_path, cx);
10082 assert!(
10083 found_path.is_some(),
10084 "Should find project path for nonexistent file in worktree"
10085 );
10086
10087 // Test with an absolute path outside any worktree
10088 let abs_path = Path::new("/some/other/path");
10089 let found_path = project.find_project_path(abs_path, cx);
10090 assert!(
10091 found_path.is_none(),
10092 "Should not find project path for path outside any worktree"
10093 );
10094 });
10095}