1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
13 assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, TestAppContext, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettings, LanguageSettingsContent},
31 markdown_lang, rust_lang, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
205 let buffer = project
206 .update(cx, |project, cx| {
207 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
208 })
209 .await
210 .unwrap();
211 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
212 };
213
214 let settings_a = settings_for("a.rs", cx).await;
215 let settings_b = settings_for("b/b.rs", cx).await;
216 let settings_c = settings_for("c.js", cx).await;
217 let settings_readme = settings_for("README.json", cx).await;
218 // .editorconfig overrides .zed/settings
219 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
220 assert_eq!(settings_a.hard_tabs, true);
221 assert_eq!(settings_a.ensure_final_newline_on_save, true);
222 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
223 assert_eq!(settings_a.preferred_line_length, 120);
224
225 // .editorconfig in b/ overrides .editorconfig in root
226 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
227
228 // "indent_size" is not set, so "tab_width" is used
229 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
230
231 // When max_line_length is "off", default to .zed/settings.json
232 assert_eq!(settings_b.preferred_line_length, 64);
233 assert_eq!(settings_c.preferred_line_length, 64);
234
235 // README.md should not be affected by .editorconfig's globe "*.rs"
236 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
237}
238
239#[gpui::test]
240async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
241 init_test(cx);
242
243 let fs = FakeFs::new(cx.executor());
244 fs.insert_tree(
245 path!("/grandparent"),
246 json!({
247 ".editorconfig": "[*]\nindent_size = 4\n",
248 "parent": {
249 ".editorconfig": "[*.rs]\nindent_size = 2\n",
250 "worktree": {
251 ".editorconfig": "[*.md]\nindent_size = 3\n",
252 "main.rs": "fn main() {}",
253 "README.md": "# README",
254 "other.txt": "other content",
255 }
256 }
257 }),
258 )
259 .await;
260
261 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
262
263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
264 language_registry.add(rust_lang());
265 language_registry.add(markdown_lang());
266
267 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
268
269 cx.executor().run_until_parked();
270 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
271 let buffer = project
272 .update(cx, |project, cx| {
273 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
274 })
275 .await
276 .unwrap();
277 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
278 };
279
280 let settings_rs = settings_for("main.rs", cx).await;
281 let settings_md = settings_for("README.md", cx).await;
282 let settings_txt = settings_for("other.txt", cx).await;
283
284 // main.rs gets indent_size = 2 from parent's external .editorconfig
285 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
286
287 // README.md gets indent_size = 3 from internal worktree .editorconfig
288 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
289
290 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
291 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
292}
293
294#[gpui::test]
295async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
296 init_test(cx);
297
298 let fs = FakeFs::new(cx.executor());
299 fs.insert_tree(
300 path!("/parent"),
301 json!({
302 ".editorconfig": "[*]\nindent_size = 99\n",
303 "worktree": {
304 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
305 "file.rs": "fn main() {}",
306 }
307 }),
308 )
309 .await;
310
311 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
312
313 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
314 language_registry.add(rust_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 let buffer = project
321 .update(cx, |project, cx| {
322 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
323 })
324 .await
325 .unwrap();
326
327 cx.update(|cx| {
328 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
329
330 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
331 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
332 });
333}
334
335#[gpui::test]
336async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
337 init_test(cx);
338
339 let fs = FakeFs::new(cx.executor());
340 fs.insert_tree(
341 path!("/grandparent"),
342 json!({
343 ".editorconfig": "[*]\nindent_size = 99\n",
344 "parent": {
345 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
346 "worktree": {
347 "file.rs": "fn main() {}",
348 }
349 }
350 }),
351 )
352 .await;
353
354 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
355
356 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
357 language_registry.add(rust_lang());
358
359 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
360
361 cx.executor().run_until_parked();
362
363 let buffer = project
364 .update(cx, |project, cx| {
365 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
366 })
367 .await
368 .unwrap();
369
370 cx.update(|cx| {
371 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
372
373 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
374 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
375 });
376}
377
378#[gpui::test]
379async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
380 init_test(cx);
381
382 let fs = FakeFs::new(cx.executor());
383 fs.insert_tree(
384 path!("/parent"),
385 json!({
386 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
387 "worktree_a": {
388 "file.rs": "fn a() {}",
389 ".editorconfig": "[*]\ninsert_final_newline = true\n",
390 },
391 "worktree_b": {
392 "file.rs": "fn b() {}",
393 ".editorconfig": "[*]\ninsert_final_newline = false\n",
394 }
395 }),
396 )
397 .await;
398
399 let project = Project::test(
400 fs,
401 [
402 path!("/parent/worktree_a").as_ref(),
403 path!("/parent/worktree_b").as_ref(),
404 ],
405 cx,
406 )
407 .await;
408
409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
410 language_registry.add(rust_lang());
411
412 cx.executor().run_until_parked();
413
414 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
415 assert_eq!(worktrees.len(), 2);
416
417 for worktree in worktrees {
418 let buffer = project
419 .update(cx, |project, cx| {
420 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
421 })
422 .await
423 .unwrap();
424
425 cx.update(|cx| {
426 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
427
428 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
429 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
430 });
431 }
432}
433
434#[gpui::test]
435async fn test_external_editorconfig_not_loaded_without_internal_config(
436 cx: &mut gpui::TestAppContext,
437) {
438 init_test(cx);
439
440 let fs = FakeFs::new(cx.executor());
441 fs.insert_tree(
442 path!("/parent"),
443 json!({
444 ".editorconfig": "[*]\nindent_size = 99\n",
445 "worktree": {
446 "file.rs": "fn main() {}",
447 }
448 }),
449 )
450 .await;
451
452 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
453
454 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
455 language_registry.add(rust_lang());
456
457 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
458
459 cx.executor().run_until_parked();
460
461 let buffer = project
462 .update(cx, |project, cx| {
463 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
464 })
465 .await
466 .unwrap();
467
468 cx.update(|cx| {
469 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
470
471 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
472 // because without an internal .editorconfig, external configs are not loaded
473 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
474 });
475}
476
477#[gpui::test]
478async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
479 init_test(cx);
480
481 let fs = FakeFs::new(cx.executor());
482 fs.insert_tree(
483 path!("/parent"),
484 json!({
485 ".editorconfig": "[*]\nindent_size = 4\n",
486 "worktree": {
487 ".editorconfig": "[*]\n",
488 "file.rs": "fn main() {}",
489 }
490 }),
491 )
492 .await;
493
494 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
495
496 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
497 language_registry.add(rust_lang());
498
499 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
500
501 cx.executor().run_until_parked();
502
503 let buffer = project
504 .update(cx, |project, cx| {
505 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
506 })
507 .await
508 .unwrap();
509
510 cx.update(|cx| {
511 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
512
513 // Test initial settings: tab_size = 4 from parent's external .editorconfig
514 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
515 });
516
517 fs.atomic_write(
518 PathBuf::from(path!("/parent/.editorconfig")),
519 "[*]\nindent_size = 8\n".to_owned(),
520 )
521 .await
522 .unwrap();
523
524 cx.executor().run_until_parked();
525
526 let buffer = project
527 .update(cx, |project, cx| {
528 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
529 })
530 .await
531 .unwrap();
532
533 cx.update(|cx| {
534 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
535
536 // Test settings updated: tab_size = 8
537 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
538 });
539}
540
541#[gpui::test]
542async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
543 init_test(cx);
544
545 let fs = FakeFs::new(cx.executor());
546 fs.insert_tree(
547 path!("/parent"),
548 json!({
549 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
550 "existing_worktree": {
551 ".editorconfig": "[*]\n",
552 "file.rs": "fn a() {}",
553 },
554 "new_worktree": {
555 ".editorconfig": "[*]\n",
556 "file.rs": "fn b() {}",
557 }
558 }),
559 )
560 .await;
561
562 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
563
564 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
565 language_registry.add(rust_lang());
566
567 cx.executor().run_until_parked();
568
569 let buffer = project
570 .update(cx, |project, cx| {
571 let id = project.worktrees(cx).next().unwrap().read(cx).id();
572 project.open_buffer((id, rel_path("file.rs")), cx)
573 })
574 .await
575 .unwrap();
576
577 cx.update(|cx| {
578 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
579
580 // Test existing worktree has tab_size = 7
581 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
582 });
583
584 let (new_worktree, _) = project
585 .update(cx, |project, cx| {
586 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
587 })
588 .await
589 .unwrap();
590
591 cx.executor().run_until_parked();
592
593 let buffer = project
594 .update(cx, |project, cx| {
595 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
596 })
597 .await
598 .unwrap();
599
600 cx.update(|cx| {
601 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
602
603 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
604 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
605 });
606}
607
608#[gpui::test]
609async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
610 init_test(cx);
611
612 let fs = FakeFs::new(cx.executor());
613 fs.insert_tree(
614 path!("/parent"),
615 json!({
616 ".editorconfig": "[*]\nindent_size = 6\n",
617 "worktree": {
618 ".editorconfig": "[*]\n",
619 "file.rs": "fn main() {}",
620 }
621 }),
622 )
623 .await;
624
625 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
626
627 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
628 language_registry.add(rust_lang());
629
630 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
631 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
632
633 cx.executor().run_until_parked();
634
635 cx.update(|cx| {
636 let store = cx.global::<SettingsStore>();
637 let (worktree_ids, external_paths, watcher_paths) =
638 store.editorconfig_store.read(cx).test_state();
639
640 // Test external config is loaded
641 assert!(worktree_ids.contains(&worktree_id));
642 assert!(!external_paths.is_empty());
643 assert!(!watcher_paths.is_empty());
644 });
645
646 project.update(cx, |project, cx| {
647 project.remove_worktree(worktree_id, cx);
648 });
649
650 cx.executor().run_until_parked();
651
652 cx.update(|cx| {
653 let store = cx.global::<SettingsStore>();
654 let (worktree_ids, external_paths, watcher_paths) =
655 store.editorconfig_store.read(cx).test_state();
656
657 // Test worktree state, external configs, and watchers all removed
658 assert!(!worktree_ids.contains(&worktree_id));
659 assert!(external_paths.is_empty());
660 assert!(watcher_paths.is_empty());
661 });
662}
663
664#[gpui::test]
665async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
666 cx: &mut gpui::TestAppContext,
667) {
668 init_test(cx);
669
670 let fs = FakeFs::new(cx.executor());
671 fs.insert_tree(
672 path!("/parent"),
673 json!({
674 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
675 "worktree_a": {
676 ".editorconfig": "[*]\n",
677 "file.rs": "fn a() {}",
678 },
679 "worktree_b": {
680 ".editorconfig": "[*]\n",
681 "file.rs": "fn b() {}",
682 }
683 }),
684 )
685 .await;
686
687 let project = Project::test(
688 fs,
689 [
690 path!("/parent/worktree_a").as_ref(),
691 path!("/parent/worktree_b").as_ref(),
692 ],
693 cx,
694 )
695 .await;
696
697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
698 language_registry.add(rust_lang());
699
700 cx.executor().run_until_parked();
701
702 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
703 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
704 assert_eq!(worktrees.len(), 2);
705
706 let worktree_a = &worktrees[0];
707 let worktree_b = &worktrees[1];
708 let worktree_a_id = worktree_a.read(cx).id();
709 let worktree_b_id = worktree_b.read(cx).id();
710 (worktree_a_id, worktree_b.clone(), worktree_b_id)
711 });
712
713 cx.update(|cx| {
714 let store = cx.global::<SettingsStore>();
715 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
716
717 // Test both worktrees have settings and share external config
718 assert!(worktree_ids.contains(&worktree_a_id));
719 assert!(worktree_ids.contains(&worktree_b_id));
720 assert_eq!(external_paths.len(), 1); // single shared external config
721 });
722
723 project.update(cx, |project, cx| {
724 project.remove_worktree(worktree_a_id, cx);
725 });
726
727 cx.executor().run_until_parked();
728
729 cx.update(|cx| {
730 let store = cx.global::<SettingsStore>();
731 let (worktree_ids, external_paths, watcher_paths) =
732 store.editorconfig_store.read(cx).test_state();
733
734 // Test worktree_a is gone but external config remains for worktree_b
735 assert!(!worktree_ids.contains(&worktree_a_id));
736 assert!(worktree_ids.contains(&worktree_b_id));
737 // External config should still exist because worktree_b uses it
738 assert_eq!(external_paths.len(), 1);
739 assert_eq!(watcher_paths.len(), 1);
740 });
741
742 let buffer = project
743 .update(cx, |project, cx| {
744 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
745 })
746 .await
747 .unwrap();
748
749 cx.update(|cx| {
750 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
751
752 // Test worktree_b still has correct settings
753 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
754 });
755}
756
757#[gpui::test]
758async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
759 init_test(cx);
760 cx.update(|cx| {
761 GitHostingProviderRegistry::default_global(cx);
762 git_hosting_providers::init(cx);
763 });
764
765 let fs = FakeFs::new(cx.executor());
766 let str_path = path!("/dir");
767 let path = Path::new(str_path);
768
769 fs.insert_tree(
770 path!("/dir"),
771 json!({
772 ".zed": {
773 "settings.json": r#"{
774 "git_hosting_providers": [
775 {
776 "provider": "gitlab",
777 "base_url": "https://google.com",
778 "name": "foo"
779 }
780 ]
781 }"#
782 },
783 }),
784 )
785 .await;
786
787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
788 let (_worktree, _) =
789 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
790 cx.executor().run_until_parked();
791
792 cx.update(|cx| {
793 let provider = GitHostingProviderRegistry::global(cx);
794 assert!(
795 provider
796 .list_hosting_providers()
797 .into_iter()
798 .any(|provider| provider.name() == "foo")
799 );
800 });
801
802 fs.atomic_write(
803 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
804 "{}".into(),
805 )
806 .await
807 .unwrap();
808
809 cx.run_until_parked();
810
811 cx.update(|cx| {
812 let provider = GitHostingProviderRegistry::global(cx);
813 assert!(
814 !provider
815 .list_hosting_providers()
816 .into_iter()
817 .any(|provider| provider.name() == "foo")
818 );
819 });
820}
821
822#[gpui::test]
823async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
824 init_test(cx);
825 TaskStore::init(None);
826
827 let fs = FakeFs::new(cx.executor());
828 fs.insert_tree(
829 path!("/dir"),
830 json!({
831 ".zed": {
832 "settings.json": r#"{ "tab_size": 8 }"#,
833 "tasks.json": r#"[{
834 "label": "cargo check all",
835 "command": "cargo",
836 "args": ["check", "--all"]
837 },]"#,
838 },
839 "a": {
840 "a.rs": "fn a() {\n A\n}"
841 },
842 "b": {
843 ".zed": {
844 "settings.json": r#"{ "tab_size": 2 }"#,
845 "tasks.json": r#"[{
846 "label": "cargo check",
847 "command": "cargo",
848 "args": ["check"]
849 },]"#,
850 },
851 "b.rs": "fn b() {\n B\n}"
852 }
853 }),
854 )
855 .await;
856
857 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
858 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
859
860 cx.executor().run_until_parked();
861 let worktree_id = cx.update(|cx| {
862 project.update(cx, |project, cx| {
863 project.worktrees(cx).next().unwrap().read(cx).id()
864 })
865 });
866
867 let mut task_contexts = TaskContexts::default();
868 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
869 let task_contexts = Arc::new(task_contexts);
870
871 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
872 id: worktree_id,
873 directory_in_worktree: rel_path(".zed").into(),
874 id_base: "local worktree tasks from directory \".zed\"".into(),
875 };
876
877 let buffer_a = project
878 .update(cx, |project, cx| {
879 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
880 })
881 .await
882 .unwrap();
883 let buffer_b = project
884 .update(cx, |project, cx| {
885 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
886 })
887 .await
888 .unwrap();
889 cx.update(|cx| {
890 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
891 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
892
893 assert_eq!(settings_a.tab_size.get(), 8);
894 assert_eq!(settings_b.tab_size.get(), 2);
895 });
896
897 let all_tasks = cx
898 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
899 .await
900 .into_iter()
901 .map(|(source_kind, task)| {
902 let resolved = task.resolved;
903 (
904 source_kind,
905 task.resolved_label,
906 resolved.args,
907 resolved.env,
908 )
909 })
910 .collect::<Vec<_>>();
911 assert_eq!(
912 all_tasks,
913 vec![
914 (
915 TaskSourceKind::Worktree {
916 id: worktree_id,
917 directory_in_worktree: rel_path("b/.zed").into(),
918 id_base: "local worktree tasks from directory \"b/.zed\"".into()
919 },
920 "cargo check".to_string(),
921 vec!["check".to_string()],
922 HashMap::default(),
923 ),
924 (
925 topmost_local_task_source_kind.clone(),
926 "cargo check all".to_string(),
927 vec!["check".to_string(), "--all".to_string()],
928 HashMap::default(),
929 ),
930 ]
931 );
932
933 let (_, resolved_task) = cx
934 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
935 .await
936 .into_iter()
937 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
938 .expect("should have one global task");
939 project.update(cx, |project, cx| {
940 let task_inventory = project
941 .task_store
942 .read(cx)
943 .task_inventory()
944 .cloned()
945 .unwrap();
946 task_inventory.update(cx, |inventory, _| {
947 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
948 inventory
949 .update_file_based_tasks(
950 TaskSettingsLocation::Global(tasks_file()),
951 Some(
952 &json!([{
953 "label": "cargo check unstable",
954 "command": "cargo",
955 "args": [
956 "check",
957 "--all",
958 "--all-targets"
959 ],
960 "env": {
961 "RUSTFLAGS": "-Zunstable-options"
962 }
963 }])
964 .to_string(),
965 ),
966 )
967 .unwrap();
968 });
969 });
970 cx.run_until_parked();
971
972 let all_tasks = cx
973 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
974 .await
975 .into_iter()
976 .map(|(source_kind, task)| {
977 let resolved = task.resolved;
978 (
979 source_kind,
980 task.resolved_label,
981 resolved.args,
982 resolved.env,
983 )
984 })
985 .collect::<Vec<_>>();
986 assert_eq!(
987 all_tasks,
988 vec![
989 (
990 topmost_local_task_source_kind.clone(),
991 "cargo check all".to_string(),
992 vec!["check".to_string(), "--all".to_string()],
993 HashMap::default(),
994 ),
995 (
996 TaskSourceKind::Worktree {
997 id: worktree_id,
998 directory_in_worktree: rel_path("b/.zed").into(),
999 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1000 },
1001 "cargo check".to_string(),
1002 vec!["check".to_string()],
1003 HashMap::default(),
1004 ),
1005 (
1006 TaskSourceKind::AbsPath {
1007 abs_path: paths::tasks_file().clone(),
1008 id_base: "global tasks.json".into(),
1009 },
1010 "cargo check unstable".to_string(),
1011 vec![
1012 "check".to_string(),
1013 "--all".to_string(),
1014 "--all-targets".to_string(),
1015 ],
1016 HashMap::from_iter(Some((
1017 "RUSTFLAGS".to_string(),
1018 "-Zunstable-options".to_string()
1019 ))),
1020 ),
1021 ]
1022 );
1023}
1024
1025#[gpui::test]
1026async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1027 init_test(cx);
1028 TaskStore::init(None);
1029
1030 let fs = FakeFs::new(cx.executor());
1031 fs.insert_tree(
1032 path!("/dir"),
1033 json!({
1034 ".zed": {
1035 "tasks.json": r#"[{
1036 "label": "test worktree root",
1037 "command": "echo $ZED_WORKTREE_ROOT"
1038 }]"#,
1039 },
1040 "a": {
1041 "a.rs": "fn a() {\n A\n}"
1042 },
1043 }),
1044 )
1045 .await;
1046
1047 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1048 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1049
1050 cx.executor().run_until_parked();
1051 let worktree_id = cx.update(|cx| {
1052 project.update(cx, |project, cx| {
1053 project.worktrees(cx).next().unwrap().read(cx).id()
1054 })
1055 });
1056
1057 let active_non_worktree_item_tasks = cx
1058 .update(|cx| {
1059 get_all_tasks(
1060 &project,
1061 Arc::new(TaskContexts {
1062 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1063 active_worktree_context: None,
1064 other_worktree_contexts: Vec::new(),
1065 lsp_task_sources: HashMap::default(),
1066 latest_selection: None,
1067 }),
1068 cx,
1069 )
1070 })
1071 .await;
1072 assert!(
1073 active_non_worktree_item_tasks.is_empty(),
1074 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1075 );
1076
1077 let active_worktree_tasks = cx
1078 .update(|cx| {
1079 get_all_tasks(
1080 &project,
1081 Arc::new(TaskContexts {
1082 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1083 active_worktree_context: Some((worktree_id, {
1084 let mut worktree_context = TaskContext::default();
1085 worktree_context
1086 .task_variables
1087 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1088 worktree_context
1089 })),
1090 other_worktree_contexts: Vec::new(),
1091 lsp_task_sources: HashMap::default(),
1092 latest_selection: None,
1093 }),
1094 cx,
1095 )
1096 })
1097 .await;
1098 assert_eq!(
1099 active_worktree_tasks
1100 .into_iter()
1101 .map(|(source_kind, task)| {
1102 let resolved = task.resolved;
1103 (source_kind, resolved.command.unwrap())
1104 })
1105 .collect::<Vec<_>>(),
1106 vec![(
1107 TaskSourceKind::Worktree {
1108 id: worktree_id,
1109 directory_in_worktree: rel_path(".zed").into(),
1110 id_base: "local worktree tasks from directory \".zed\"".into(),
1111 },
1112 "echo /dir".to_string(),
1113 )]
1114 );
1115}
1116
1117#[gpui::test]
1118async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1119 cx: &mut gpui::TestAppContext,
1120) {
1121 pub(crate) struct PyprojectTomlManifestProvider;
1122
1123 impl ManifestProvider for PyprojectTomlManifestProvider {
1124 fn name(&self) -> ManifestName {
1125 SharedString::new_static("pyproject.toml").into()
1126 }
1127
1128 fn search(
1129 &self,
1130 ManifestQuery {
1131 path,
1132 depth,
1133 delegate,
1134 }: ManifestQuery,
1135 ) -> Option<Arc<RelPath>> {
1136 for path in path.ancestors().take(depth) {
1137 let p = path.join(rel_path("pyproject.toml"));
1138 if delegate.exists(&p, Some(false)) {
1139 return Some(path.into());
1140 }
1141 }
1142
1143 None
1144 }
1145 }
1146
1147 init_test(cx);
1148 let fs = FakeFs::new(cx.executor());
1149
1150 fs.insert_tree(
1151 path!("/the-root"),
1152 json!({
1153 ".zed": {
1154 "settings.json": r#"
1155 {
1156 "languages": {
1157 "Python": {
1158 "language_servers": ["ty"]
1159 }
1160 }
1161 }"#
1162 },
1163 "project-a": {
1164 ".venv": {},
1165 "file.py": "",
1166 "pyproject.toml": ""
1167 },
1168 "project-b": {
1169 ".venv": {},
1170 "source_file.py":"",
1171 "another_file.py": "",
1172 "pyproject.toml": ""
1173 }
1174 }),
1175 )
1176 .await;
1177 cx.update(|cx| {
1178 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1179 });
1180
1181 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1182 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1183 let _fake_python_server = language_registry.register_fake_lsp(
1184 "Python",
1185 FakeLspAdapter {
1186 name: "ty",
1187 capabilities: lsp::ServerCapabilities {
1188 ..Default::default()
1189 },
1190 ..Default::default()
1191 },
1192 );
1193
1194 language_registry.add(python_lang(fs.clone()));
1195 let (first_buffer, _handle) = project
1196 .update(cx, |project, cx| {
1197 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1198 })
1199 .await
1200 .unwrap();
1201 cx.executor().run_until_parked();
1202 let servers = project.update(cx, |project, cx| {
1203 project.lsp_store.update(cx, |this, cx| {
1204 first_buffer.update(cx, |buffer, cx| {
1205 this.running_language_servers_for_local_buffer(buffer, cx)
1206 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1207 .collect::<Vec<_>>()
1208 })
1209 })
1210 });
1211 cx.executor().run_until_parked();
1212 assert_eq!(servers.len(), 1);
1213 let (adapter, server) = servers.into_iter().next().unwrap();
1214 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1215 assert_eq!(server.server_id(), LanguageServerId(0));
1216 // `workspace_folders` are set to the rooting point.
1217 assert_eq!(
1218 server.workspace_folders(),
1219 BTreeSet::from_iter(
1220 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1221 )
1222 );
1223
1224 let (second_project_buffer, _other_handle) = project
1225 .update(cx, |project, cx| {
1226 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1227 })
1228 .await
1229 .unwrap();
1230 cx.executor().run_until_parked();
1231 let servers = project.update(cx, |project, cx| {
1232 project.lsp_store.update(cx, |this, cx| {
1233 second_project_buffer.update(cx, |buffer, cx| {
1234 this.running_language_servers_for_local_buffer(buffer, cx)
1235 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1236 .collect::<Vec<_>>()
1237 })
1238 })
1239 });
1240 cx.executor().run_until_parked();
1241 assert_eq!(servers.len(), 1);
1242 let (adapter, server) = servers.into_iter().next().unwrap();
1243 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1244 // We're not using venvs at all here, so both folders should fall under the same root.
1245 assert_eq!(server.server_id(), LanguageServerId(0));
1246 // Now, let's select a different toolchain for one of subprojects.
1247
1248 let Toolchains {
1249 toolchains: available_toolchains_for_b,
1250 root_path,
1251 ..
1252 } = project
1253 .update(cx, |this, cx| {
1254 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1255 this.available_toolchains(
1256 ProjectPath {
1257 worktree_id,
1258 path: rel_path("project-b/source_file.py").into(),
1259 },
1260 LanguageName::new_static("Python"),
1261 cx,
1262 )
1263 })
1264 .await
1265 .expect("A toolchain to be discovered");
1266 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1267 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1268 let currently_active_toolchain = project
1269 .update(cx, |this, cx| {
1270 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1271 this.active_toolchain(
1272 ProjectPath {
1273 worktree_id,
1274 path: rel_path("project-b/source_file.py").into(),
1275 },
1276 LanguageName::new_static("Python"),
1277 cx,
1278 )
1279 })
1280 .await;
1281
1282 assert!(currently_active_toolchain.is_none());
1283 let _ = project
1284 .update(cx, |this, cx| {
1285 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1286 this.activate_toolchain(
1287 ProjectPath {
1288 worktree_id,
1289 path: root_path,
1290 },
1291 available_toolchains_for_b
1292 .toolchains
1293 .into_iter()
1294 .next()
1295 .unwrap(),
1296 cx,
1297 )
1298 })
1299 .await
1300 .unwrap();
1301 cx.run_until_parked();
1302 let servers = project.update(cx, |project, cx| {
1303 project.lsp_store.update(cx, |this, cx| {
1304 second_project_buffer.update(cx, |buffer, cx| {
1305 this.running_language_servers_for_local_buffer(buffer, cx)
1306 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1307 .collect::<Vec<_>>()
1308 })
1309 })
1310 });
1311 cx.executor().run_until_parked();
1312 assert_eq!(servers.len(), 1);
1313 let (adapter, server) = servers.into_iter().next().unwrap();
1314 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1315 // There's a new language server in town.
1316 assert_eq!(server.server_id(), LanguageServerId(1));
1317}
1318
1319#[gpui::test]
1320async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1321 init_test(cx);
1322
1323 let fs = FakeFs::new(cx.executor());
1324 fs.insert_tree(
1325 path!("/dir"),
1326 json!({
1327 "test.rs": "const A: i32 = 1;",
1328 "test2.rs": "",
1329 "Cargo.toml": "a = 1",
1330 "package.json": "{\"a\": 1}",
1331 }),
1332 )
1333 .await;
1334
1335 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1336 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1337
1338 let mut fake_rust_servers = language_registry.register_fake_lsp(
1339 "Rust",
1340 FakeLspAdapter {
1341 name: "the-rust-language-server",
1342 capabilities: lsp::ServerCapabilities {
1343 completion_provider: Some(lsp::CompletionOptions {
1344 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1345 ..Default::default()
1346 }),
1347 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1348 lsp::TextDocumentSyncOptions {
1349 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1350 ..Default::default()
1351 },
1352 )),
1353 ..Default::default()
1354 },
1355 ..Default::default()
1356 },
1357 );
1358 let mut fake_json_servers = language_registry.register_fake_lsp(
1359 "JSON",
1360 FakeLspAdapter {
1361 name: "the-json-language-server",
1362 capabilities: lsp::ServerCapabilities {
1363 completion_provider: Some(lsp::CompletionOptions {
1364 trigger_characters: Some(vec![":".to_string()]),
1365 ..Default::default()
1366 }),
1367 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1368 lsp::TextDocumentSyncOptions {
1369 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1370 ..Default::default()
1371 },
1372 )),
1373 ..Default::default()
1374 },
1375 ..Default::default()
1376 },
1377 );
1378
1379 // Open a buffer without an associated language server.
1380 let (toml_buffer, _handle) = project
1381 .update(cx, |project, cx| {
1382 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1383 })
1384 .await
1385 .unwrap();
1386
1387 // Open a buffer with an associated language server before the language for it has been loaded.
1388 let (rust_buffer, _handle2) = project
1389 .update(cx, |project, cx| {
1390 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1391 })
1392 .await
1393 .unwrap();
1394 rust_buffer.update(cx, |buffer, _| {
1395 assert_eq!(buffer.language().map(|l| l.name()), None);
1396 });
1397
1398 // Now we add the languages to the project, and ensure they get assigned to all
1399 // the relevant open buffers.
1400 language_registry.add(json_lang());
1401 language_registry.add(rust_lang());
1402 cx.executor().run_until_parked();
1403 rust_buffer.update(cx, |buffer, _| {
1404 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1405 });
1406
1407 // A server is started up, and it is notified about Rust files.
1408 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1409 assert_eq!(
1410 fake_rust_server
1411 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1412 .await
1413 .text_document,
1414 lsp::TextDocumentItem {
1415 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1416 version: 0,
1417 text: "const A: i32 = 1;".to_string(),
1418 language_id: "rust".to_string(),
1419 }
1420 );
1421
1422 // The buffer is configured based on the language server's capabilities.
1423 rust_buffer.update(cx, |buffer, _| {
1424 assert_eq!(
1425 buffer
1426 .completion_triggers()
1427 .iter()
1428 .cloned()
1429 .collect::<Vec<_>>(),
1430 &[".".to_string(), "::".to_string()]
1431 );
1432 });
1433 toml_buffer.update(cx, |buffer, _| {
1434 assert!(buffer.completion_triggers().is_empty());
1435 });
1436
1437 // Edit a buffer. The changes are reported to the language server.
1438 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1439 assert_eq!(
1440 fake_rust_server
1441 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1442 .await
1443 .text_document,
1444 lsp::VersionedTextDocumentIdentifier::new(
1445 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1446 1
1447 )
1448 );
1449
1450 // Open a third buffer with a different associated language server.
1451 let (json_buffer, _json_handle) = project
1452 .update(cx, |project, cx| {
1453 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1454 })
1455 .await
1456 .unwrap();
1457
1458 // A json language server is started up and is only notified about the json buffer.
1459 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1460 assert_eq!(
1461 fake_json_server
1462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1463 .await
1464 .text_document,
1465 lsp::TextDocumentItem {
1466 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1467 version: 0,
1468 text: "{\"a\": 1}".to_string(),
1469 language_id: "json".to_string(),
1470 }
1471 );
1472
1473 // This buffer is configured based on the second language server's
1474 // capabilities.
1475 json_buffer.update(cx, |buffer, _| {
1476 assert_eq!(
1477 buffer
1478 .completion_triggers()
1479 .iter()
1480 .cloned()
1481 .collect::<Vec<_>>(),
1482 &[":".to_string()]
1483 );
1484 });
1485
1486 // When opening another buffer whose language server is already running,
1487 // it is also configured based on the existing language server's capabilities.
1488 let (rust_buffer2, _handle4) = project
1489 .update(cx, |project, cx| {
1490 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1491 })
1492 .await
1493 .unwrap();
1494 rust_buffer2.update(cx, |buffer, _| {
1495 assert_eq!(
1496 buffer
1497 .completion_triggers()
1498 .iter()
1499 .cloned()
1500 .collect::<Vec<_>>(),
1501 &[".".to_string(), "::".to_string()]
1502 );
1503 });
1504
1505 // Changes are reported only to servers matching the buffer's language.
1506 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1507 rust_buffer2.update(cx, |buffer, cx| {
1508 buffer.edit([(0..0, "let x = 1;")], None, cx)
1509 });
1510 assert_eq!(
1511 fake_rust_server
1512 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1513 .await
1514 .text_document,
1515 lsp::VersionedTextDocumentIdentifier::new(
1516 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1517 1
1518 )
1519 );
1520
1521 // Save notifications are reported to all servers.
1522 project
1523 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1524 .await
1525 .unwrap();
1526 assert_eq!(
1527 fake_rust_server
1528 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1529 .await
1530 .text_document,
1531 lsp::TextDocumentIdentifier::new(
1532 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1533 )
1534 );
1535 assert_eq!(
1536 fake_json_server
1537 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1538 .await
1539 .text_document,
1540 lsp::TextDocumentIdentifier::new(
1541 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1542 )
1543 );
1544
1545 // Renames are reported only to servers matching the buffer's language.
1546 fs.rename(
1547 Path::new(path!("/dir/test2.rs")),
1548 Path::new(path!("/dir/test3.rs")),
1549 Default::default(),
1550 )
1551 .await
1552 .unwrap();
1553 assert_eq!(
1554 fake_rust_server
1555 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1556 .await
1557 .text_document,
1558 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1559 );
1560 assert_eq!(
1561 fake_rust_server
1562 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1563 .await
1564 .text_document,
1565 lsp::TextDocumentItem {
1566 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1567 version: 0,
1568 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1569 language_id: "rust".to_string(),
1570 },
1571 );
1572
1573 rust_buffer2.update(cx, |buffer, cx| {
1574 buffer.update_diagnostics(
1575 LanguageServerId(0),
1576 DiagnosticSet::from_sorted_entries(
1577 vec![DiagnosticEntry {
1578 diagnostic: Default::default(),
1579 range: Anchor::MIN..Anchor::MAX,
1580 }],
1581 &buffer.snapshot(),
1582 ),
1583 cx,
1584 );
1585 assert_eq!(
1586 buffer
1587 .snapshot()
1588 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1589 .count(),
1590 1
1591 );
1592 });
1593
1594 // When the rename changes the extension of the file, the buffer gets closed on the old
1595 // language server and gets opened on the new one.
1596 fs.rename(
1597 Path::new(path!("/dir/test3.rs")),
1598 Path::new(path!("/dir/test3.json")),
1599 Default::default(),
1600 )
1601 .await
1602 .unwrap();
1603 assert_eq!(
1604 fake_rust_server
1605 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1606 .await
1607 .text_document,
1608 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1609 );
1610 assert_eq!(
1611 fake_json_server
1612 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1613 .await
1614 .text_document,
1615 lsp::TextDocumentItem {
1616 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1617 version: 0,
1618 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1619 language_id: "json".to_string(),
1620 },
1621 );
1622
1623 // We clear the diagnostics, since the language has changed.
1624 rust_buffer2.update(cx, |buffer, _| {
1625 assert_eq!(
1626 buffer
1627 .snapshot()
1628 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1629 .count(),
1630 0
1631 );
1632 });
1633
1634 // The renamed file's version resets after changing language server.
1635 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1636 assert_eq!(
1637 fake_json_server
1638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1639 .await
1640 .text_document,
1641 lsp::VersionedTextDocumentIdentifier::new(
1642 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1643 1
1644 )
1645 );
1646
1647 // Restart language servers
1648 project.update(cx, |project, cx| {
1649 project.restart_language_servers_for_buffers(
1650 vec![rust_buffer.clone(), json_buffer.clone()],
1651 HashSet::default(),
1652 cx,
1653 );
1654 });
1655
1656 let mut rust_shutdown_requests = fake_rust_server
1657 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1658 let mut json_shutdown_requests = fake_json_server
1659 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1660 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1661
1662 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1663 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1664
1665 // Ensure rust document is reopened in new rust language server
1666 assert_eq!(
1667 fake_rust_server
1668 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1669 .await
1670 .text_document,
1671 lsp::TextDocumentItem {
1672 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1673 version: 0,
1674 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1675 language_id: "rust".to_string(),
1676 }
1677 );
1678
1679 // Ensure json documents are reopened in new json language server
1680 assert_set_eq!(
1681 [
1682 fake_json_server
1683 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1684 .await
1685 .text_document,
1686 fake_json_server
1687 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1688 .await
1689 .text_document,
1690 ],
1691 [
1692 lsp::TextDocumentItem {
1693 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1694 version: 0,
1695 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1696 language_id: "json".to_string(),
1697 },
1698 lsp::TextDocumentItem {
1699 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1700 version: 0,
1701 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1702 language_id: "json".to_string(),
1703 }
1704 ]
1705 );
1706
1707 // Close notifications are reported only to servers matching the buffer's language.
1708 cx.update(|_| drop(_json_handle));
1709 let close_message = lsp::DidCloseTextDocumentParams {
1710 text_document: lsp::TextDocumentIdentifier::new(
1711 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1712 ),
1713 };
1714 assert_eq!(
1715 fake_json_server
1716 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1717 .await,
1718 close_message,
1719 );
1720}
1721
1722#[gpui::test]
1723async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1724 init_test(cx);
1725
1726 let settings_json_contents = json!({
1727 "languages": {
1728 "Rust": {
1729 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1730 }
1731 },
1732 "lsp": {
1733 "my_fake_lsp": {
1734 "binary": {
1735 // file exists, so this is treated as a relative path
1736 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1737 }
1738 },
1739 "lsp_on_path": {
1740 "binary": {
1741 // file doesn't exist, so it will fall back on PATH env var
1742 "path": path!("lsp_on_path.exe").to_string(),
1743 }
1744 }
1745 },
1746 });
1747
1748 let fs = FakeFs::new(cx.executor());
1749 fs.insert_tree(
1750 path!("/the-root"),
1751 json!({
1752 ".zed": {
1753 "settings.json": settings_json_contents.to_string(),
1754 },
1755 ".relative_path": {
1756 "to": {
1757 "my_fake_lsp.exe": "",
1758 },
1759 },
1760 "src": {
1761 "main.rs": "",
1762 }
1763 }),
1764 )
1765 .await;
1766
1767 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1768 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1769 language_registry.add(rust_lang());
1770
1771 let mut my_fake_lsp = language_registry.register_fake_lsp(
1772 "Rust",
1773 FakeLspAdapter {
1774 name: "my_fake_lsp",
1775 ..Default::default()
1776 },
1777 );
1778 let mut lsp_on_path = language_registry.register_fake_lsp(
1779 "Rust",
1780 FakeLspAdapter {
1781 name: "lsp_on_path",
1782 ..Default::default()
1783 },
1784 );
1785
1786 cx.run_until_parked();
1787
1788 // Start the language server by opening a buffer with a compatible file extension.
1789 project
1790 .update(cx, |project, cx| {
1791 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1792 })
1793 .await
1794 .unwrap();
1795
1796 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1797 assert_eq!(
1798 lsp_path.to_string_lossy(),
1799 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1800 );
1801
1802 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1803 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1804}
1805
1806#[gpui::test]
1807async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1808 init_test(cx);
1809
1810 let settings_json_contents = json!({
1811 "languages": {
1812 "Rust": {
1813 "language_servers": ["tilde_lsp"]
1814 }
1815 },
1816 "lsp": {
1817 "tilde_lsp": {
1818 "binary": {
1819 "path": "~/.local/bin/rust-analyzer",
1820 }
1821 }
1822 },
1823 });
1824
1825 let fs = FakeFs::new(cx.executor());
1826 fs.insert_tree(
1827 path!("/root"),
1828 json!({
1829 ".zed": {
1830 "settings.json": settings_json_contents.to_string(),
1831 },
1832 "src": {
1833 "main.rs": "fn main() {}",
1834 }
1835 }),
1836 )
1837 .await;
1838
1839 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1840 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1841 language_registry.add(rust_lang());
1842
1843 let mut tilde_lsp = language_registry.register_fake_lsp(
1844 "Rust",
1845 FakeLspAdapter {
1846 name: "tilde_lsp",
1847 ..Default::default()
1848 },
1849 );
1850 cx.run_until_parked();
1851
1852 project
1853 .update(cx, |project, cx| {
1854 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1855 })
1856 .await
1857 .unwrap();
1858
1859 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1860 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1861 assert_eq!(
1862 lsp_path, expected_path,
1863 "Tilde path should expand to home directory"
1864 );
1865}
1866
1867#[gpui::test]
1868async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1869 init_test(cx);
1870
1871 let fs = FakeFs::new(cx.executor());
1872 fs.insert_tree(
1873 path!("/the-root"),
1874 json!({
1875 ".gitignore": "target\n",
1876 "Cargo.lock": "",
1877 "src": {
1878 "a.rs": "",
1879 "b.rs": "",
1880 },
1881 "target": {
1882 "x": {
1883 "out": {
1884 "x.rs": ""
1885 }
1886 },
1887 "y": {
1888 "out": {
1889 "y.rs": "",
1890 }
1891 },
1892 "z": {
1893 "out": {
1894 "z.rs": ""
1895 }
1896 }
1897 }
1898 }),
1899 )
1900 .await;
1901 fs.insert_tree(
1902 path!("/the-registry"),
1903 json!({
1904 "dep1": {
1905 "src": {
1906 "dep1.rs": "",
1907 }
1908 },
1909 "dep2": {
1910 "src": {
1911 "dep2.rs": "",
1912 }
1913 },
1914 }),
1915 )
1916 .await;
1917 fs.insert_tree(
1918 path!("/the/stdlib"),
1919 json!({
1920 "LICENSE": "",
1921 "src": {
1922 "string.rs": "",
1923 }
1924 }),
1925 )
1926 .await;
1927
1928 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1929 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1930 (project.languages().clone(), project.lsp_store())
1931 });
1932 language_registry.add(rust_lang());
1933 let mut fake_servers = language_registry.register_fake_lsp(
1934 "Rust",
1935 FakeLspAdapter {
1936 name: "the-language-server",
1937 ..Default::default()
1938 },
1939 );
1940
1941 cx.executor().run_until_parked();
1942
1943 // Start the language server by opening a buffer with a compatible file extension.
1944 project
1945 .update(cx, |project, cx| {
1946 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1947 })
1948 .await
1949 .unwrap();
1950
1951 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1952 project.update(cx, |project, cx| {
1953 let worktree = project.worktrees(cx).next().unwrap();
1954 assert_eq!(
1955 worktree
1956 .read(cx)
1957 .snapshot()
1958 .entries(true, 0)
1959 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1960 .collect::<Vec<_>>(),
1961 &[
1962 ("", false),
1963 (".gitignore", false),
1964 ("Cargo.lock", false),
1965 ("src", false),
1966 ("src/a.rs", false),
1967 ("src/b.rs", false),
1968 ("target", true),
1969 ]
1970 );
1971 });
1972
1973 let prev_read_dir_count = fs.read_dir_call_count();
1974
1975 let fake_server = fake_servers.next().await.unwrap();
1976 cx.executor().run_until_parked();
1977 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1978 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1979 id
1980 });
1981
1982 // Simulate jumping to a definition in a dependency outside of the worktree.
1983 let _out_of_worktree_buffer = project
1984 .update(cx, |project, cx| {
1985 project.open_local_buffer_via_lsp(
1986 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1987 server_id,
1988 cx,
1989 )
1990 })
1991 .await
1992 .unwrap();
1993
1994 // Keep track of the FS events reported to the language server.
1995 let file_changes = Arc::new(Mutex::new(Vec::new()));
1996 fake_server
1997 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1998 registrations: vec![lsp::Registration {
1999 id: Default::default(),
2000 method: "workspace/didChangeWatchedFiles".to_string(),
2001 register_options: serde_json::to_value(
2002 lsp::DidChangeWatchedFilesRegistrationOptions {
2003 watchers: vec![
2004 lsp::FileSystemWatcher {
2005 glob_pattern: lsp::GlobPattern::String(
2006 path!("/the-root/Cargo.toml").to_string(),
2007 ),
2008 kind: None,
2009 },
2010 lsp::FileSystemWatcher {
2011 glob_pattern: lsp::GlobPattern::String(
2012 path!("/the-root/src/*.{rs,c}").to_string(),
2013 ),
2014 kind: None,
2015 },
2016 lsp::FileSystemWatcher {
2017 glob_pattern: lsp::GlobPattern::String(
2018 path!("/the-root/target/y/**/*.rs").to_string(),
2019 ),
2020 kind: None,
2021 },
2022 lsp::FileSystemWatcher {
2023 glob_pattern: lsp::GlobPattern::String(
2024 path!("/the/stdlib/src/**/*.rs").to_string(),
2025 ),
2026 kind: None,
2027 },
2028 lsp::FileSystemWatcher {
2029 glob_pattern: lsp::GlobPattern::String(
2030 path!("**/Cargo.lock").to_string(),
2031 ),
2032 kind: None,
2033 },
2034 ],
2035 },
2036 )
2037 .ok(),
2038 }],
2039 })
2040 .await
2041 .into_response()
2042 .unwrap();
2043 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2044 let file_changes = file_changes.clone();
2045 move |params, _| {
2046 let mut file_changes = file_changes.lock();
2047 file_changes.extend(params.changes);
2048 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2049 }
2050 });
2051
2052 cx.executor().run_until_parked();
2053 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2054 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2055
2056 let mut new_watched_paths = fs.watched_paths();
2057 new_watched_paths.retain(|path| {
2058 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2059 });
2060 assert_eq!(
2061 &new_watched_paths,
2062 &[
2063 Path::new(path!("/the-root")),
2064 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2065 Path::new(path!("/the/stdlib/src"))
2066 ]
2067 );
2068
2069 // Now the language server has asked us to watch an ignored directory path,
2070 // so we recursively load it.
2071 project.update(cx, |project, cx| {
2072 let worktree = project.visible_worktrees(cx).next().unwrap();
2073 assert_eq!(
2074 worktree
2075 .read(cx)
2076 .snapshot()
2077 .entries(true, 0)
2078 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2079 .collect::<Vec<_>>(),
2080 &[
2081 ("", false),
2082 (".gitignore", false),
2083 ("Cargo.lock", false),
2084 ("src", false),
2085 ("src/a.rs", false),
2086 ("src/b.rs", false),
2087 ("target", true),
2088 ("target/x", true),
2089 ("target/y", true),
2090 ("target/y/out", true),
2091 ("target/y/out/y.rs", true),
2092 ("target/z", true),
2093 ]
2094 );
2095 });
2096
2097 // Perform some file system mutations, two of which match the watched patterns,
2098 // and one of which does not.
2099 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2100 .await
2101 .unwrap();
2102 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2103 .await
2104 .unwrap();
2105 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2106 .await
2107 .unwrap();
2108 fs.create_file(
2109 path!("/the-root/target/x/out/x2.rs").as_ref(),
2110 Default::default(),
2111 )
2112 .await
2113 .unwrap();
2114 fs.create_file(
2115 path!("/the-root/target/y/out/y2.rs").as_ref(),
2116 Default::default(),
2117 )
2118 .await
2119 .unwrap();
2120 fs.save(
2121 path!("/the-root/Cargo.lock").as_ref(),
2122 &"".into(),
2123 Default::default(),
2124 )
2125 .await
2126 .unwrap();
2127 fs.save(
2128 path!("/the-stdlib/LICENSE").as_ref(),
2129 &"".into(),
2130 Default::default(),
2131 )
2132 .await
2133 .unwrap();
2134 fs.save(
2135 path!("/the/stdlib/src/string.rs").as_ref(),
2136 &"".into(),
2137 Default::default(),
2138 )
2139 .await
2140 .unwrap();
2141
2142 // The language server receives events for the FS mutations that match its watch patterns.
2143 cx.executor().run_until_parked();
2144 assert_eq!(
2145 &*file_changes.lock(),
2146 &[
2147 lsp::FileEvent {
2148 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2149 typ: lsp::FileChangeType::CHANGED,
2150 },
2151 lsp::FileEvent {
2152 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2153 typ: lsp::FileChangeType::DELETED,
2154 },
2155 lsp::FileEvent {
2156 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2157 typ: lsp::FileChangeType::CREATED,
2158 },
2159 lsp::FileEvent {
2160 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2161 typ: lsp::FileChangeType::CREATED,
2162 },
2163 lsp::FileEvent {
2164 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2165 typ: lsp::FileChangeType::CHANGED,
2166 },
2167 ]
2168 );
2169}
2170
2171#[gpui::test]
2172async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2173 init_test(cx);
2174
2175 let fs = FakeFs::new(cx.executor());
2176 fs.insert_tree(
2177 path!("/dir"),
2178 json!({
2179 "a.rs": "let a = 1;",
2180 "b.rs": "let b = 2;"
2181 }),
2182 )
2183 .await;
2184
2185 let project = Project::test(
2186 fs,
2187 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2188 cx,
2189 )
2190 .await;
2191 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2192
2193 let buffer_a = project
2194 .update(cx, |project, cx| {
2195 project.open_local_buffer(path!("/dir/a.rs"), cx)
2196 })
2197 .await
2198 .unwrap();
2199 let buffer_b = project
2200 .update(cx, |project, cx| {
2201 project.open_local_buffer(path!("/dir/b.rs"), cx)
2202 })
2203 .await
2204 .unwrap();
2205
2206 lsp_store.update(cx, |lsp_store, cx| {
2207 lsp_store
2208 .update_diagnostics(
2209 LanguageServerId(0),
2210 lsp::PublishDiagnosticsParams {
2211 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2212 version: None,
2213 diagnostics: vec![lsp::Diagnostic {
2214 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2215 severity: Some(lsp::DiagnosticSeverity::ERROR),
2216 message: "error 1".to_string(),
2217 ..Default::default()
2218 }],
2219 },
2220 None,
2221 DiagnosticSourceKind::Pushed,
2222 &[],
2223 cx,
2224 )
2225 .unwrap();
2226 lsp_store
2227 .update_diagnostics(
2228 LanguageServerId(0),
2229 lsp::PublishDiagnosticsParams {
2230 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2231 version: None,
2232 diagnostics: vec![lsp::Diagnostic {
2233 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2234 severity: Some(DiagnosticSeverity::WARNING),
2235 message: "error 2".to_string(),
2236 ..Default::default()
2237 }],
2238 },
2239 None,
2240 DiagnosticSourceKind::Pushed,
2241 &[],
2242 cx,
2243 )
2244 .unwrap();
2245 });
2246
2247 buffer_a.update(cx, |buffer, _| {
2248 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2249 assert_eq!(
2250 chunks
2251 .iter()
2252 .map(|(s, d)| (s.as_str(), *d))
2253 .collect::<Vec<_>>(),
2254 &[
2255 ("let ", None),
2256 ("a", Some(DiagnosticSeverity::ERROR)),
2257 (" = 1;", None),
2258 ]
2259 );
2260 });
2261 buffer_b.update(cx, |buffer, _| {
2262 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2263 assert_eq!(
2264 chunks
2265 .iter()
2266 .map(|(s, d)| (s.as_str(), *d))
2267 .collect::<Vec<_>>(),
2268 &[
2269 ("let ", None),
2270 ("b", Some(DiagnosticSeverity::WARNING)),
2271 (" = 2;", None),
2272 ]
2273 );
2274 });
2275}
2276
2277#[gpui::test]
2278async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let fs = FakeFs::new(cx.executor());
2282 fs.insert_tree(
2283 path!("/root"),
2284 json!({
2285 "dir": {
2286 ".git": {
2287 "HEAD": "ref: refs/heads/main",
2288 },
2289 ".gitignore": "b.rs",
2290 "a.rs": "let a = 1;",
2291 "b.rs": "let b = 2;",
2292 },
2293 "other.rs": "let b = c;"
2294 }),
2295 )
2296 .await;
2297
2298 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2299 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2300 let (worktree, _) = project
2301 .update(cx, |project, cx| {
2302 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2303 })
2304 .await
2305 .unwrap();
2306 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2307
2308 let (worktree, _) = project
2309 .update(cx, |project, cx| {
2310 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2311 })
2312 .await
2313 .unwrap();
2314 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2315
2316 let server_id = LanguageServerId(0);
2317 lsp_store.update(cx, |lsp_store, cx| {
2318 lsp_store
2319 .update_diagnostics(
2320 server_id,
2321 lsp::PublishDiagnosticsParams {
2322 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2323 version: None,
2324 diagnostics: vec![lsp::Diagnostic {
2325 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2326 severity: Some(lsp::DiagnosticSeverity::ERROR),
2327 message: "unused variable 'b'".to_string(),
2328 ..Default::default()
2329 }],
2330 },
2331 None,
2332 DiagnosticSourceKind::Pushed,
2333 &[],
2334 cx,
2335 )
2336 .unwrap();
2337 lsp_store
2338 .update_diagnostics(
2339 server_id,
2340 lsp::PublishDiagnosticsParams {
2341 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2342 version: None,
2343 diagnostics: vec![lsp::Diagnostic {
2344 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2345 severity: Some(lsp::DiagnosticSeverity::ERROR),
2346 message: "unknown variable 'c'".to_string(),
2347 ..Default::default()
2348 }],
2349 },
2350 None,
2351 DiagnosticSourceKind::Pushed,
2352 &[],
2353 cx,
2354 )
2355 .unwrap();
2356 });
2357
2358 let main_ignored_buffer = project
2359 .update(cx, |project, cx| {
2360 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2361 })
2362 .await
2363 .unwrap();
2364 main_ignored_buffer.update(cx, |buffer, _| {
2365 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2366 assert_eq!(
2367 chunks
2368 .iter()
2369 .map(|(s, d)| (s.as_str(), *d))
2370 .collect::<Vec<_>>(),
2371 &[
2372 ("let ", None),
2373 ("b", Some(DiagnosticSeverity::ERROR)),
2374 (" = 2;", None),
2375 ],
2376 "Gigitnored buffers should still get in-buffer diagnostics",
2377 );
2378 });
2379 let other_buffer = project
2380 .update(cx, |project, cx| {
2381 project.open_buffer((other_worktree_id, rel_path("")), cx)
2382 })
2383 .await
2384 .unwrap();
2385 other_buffer.update(cx, |buffer, _| {
2386 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2387 assert_eq!(
2388 chunks
2389 .iter()
2390 .map(|(s, d)| (s.as_str(), *d))
2391 .collect::<Vec<_>>(),
2392 &[
2393 ("let b = ", None),
2394 ("c", Some(DiagnosticSeverity::ERROR)),
2395 (";", None),
2396 ],
2397 "Buffers from hidden projects should still get in-buffer diagnostics"
2398 );
2399 });
2400
2401 project.update(cx, |project, cx| {
2402 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2403 assert_eq!(
2404 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2405 vec![(
2406 ProjectPath {
2407 worktree_id: main_worktree_id,
2408 path: rel_path("b.rs").into(),
2409 },
2410 server_id,
2411 DiagnosticSummary {
2412 error_count: 1,
2413 warning_count: 0,
2414 }
2415 )]
2416 );
2417 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2418 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2419 });
2420}
2421
2422#[gpui::test]
2423async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2424 init_test(cx);
2425
2426 let progress_token = "the-progress-token";
2427
2428 let fs = FakeFs::new(cx.executor());
2429 fs.insert_tree(
2430 path!("/dir"),
2431 json!({
2432 "a.rs": "fn a() { A }",
2433 "b.rs": "const y: i32 = 1",
2434 }),
2435 )
2436 .await;
2437
2438 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2439 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2440
2441 language_registry.add(rust_lang());
2442 let mut fake_servers = language_registry.register_fake_lsp(
2443 "Rust",
2444 FakeLspAdapter {
2445 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2446 disk_based_diagnostics_sources: vec!["disk".into()],
2447 ..Default::default()
2448 },
2449 );
2450
2451 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2452
2453 // Cause worktree to start the fake language server
2454 let _ = project
2455 .update(cx, |project, cx| {
2456 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2457 })
2458 .await
2459 .unwrap();
2460
2461 let mut events = cx.events(&project);
2462
2463 let fake_server = fake_servers.next().await.unwrap();
2464 assert_eq!(
2465 events.next().await.unwrap(),
2466 Event::LanguageServerAdded(
2467 LanguageServerId(0),
2468 fake_server.server.name(),
2469 Some(worktree_id)
2470 ),
2471 );
2472
2473 fake_server
2474 .start_progress(format!("{}/0", progress_token))
2475 .await;
2476 assert_eq!(
2477 events.next().await.unwrap(),
2478 Event::DiskBasedDiagnosticsStarted {
2479 language_server_id: LanguageServerId(0),
2480 }
2481 );
2482
2483 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2484 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2485 version: None,
2486 diagnostics: vec![lsp::Diagnostic {
2487 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2488 severity: Some(lsp::DiagnosticSeverity::ERROR),
2489 message: "undefined variable 'A'".to_string(),
2490 ..Default::default()
2491 }],
2492 });
2493 assert_eq!(
2494 events.next().await.unwrap(),
2495 Event::DiagnosticsUpdated {
2496 language_server_id: LanguageServerId(0),
2497 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2498 }
2499 );
2500
2501 fake_server.end_progress(format!("{}/0", progress_token));
2502 assert_eq!(
2503 events.next().await.unwrap(),
2504 Event::DiskBasedDiagnosticsFinished {
2505 language_server_id: LanguageServerId(0)
2506 }
2507 );
2508
2509 let buffer = project
2510 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2511 .await
2512 .unwrap();
2513
2514 buffer.update(cx, |buffer, _| {
2515 let snapshot = buffer.snapshot();
2516 let diagnostics = snapshot
2517 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2518 .collect::<Vec<_>>();
2519 assert_eq!(
2520 diagnostics,
2521 &[DiagnosticEntryRef {
2522 range: Point::new(0, 9)..Point::new(0, 10),
2523 diagnostic: &Diagnostic {
2524 severity: lsp::DiagnosticSeverity::ERROR,
2525 message: "undefined variable 'A'".to_string(),
2526 group_id: 0,
2527 is_primary: true,
2528 source_kind: DiagnosticSourceKind::Pushed,
2529 ..Diagnostic::default()
2530 }
2531 }]
2532 )
2533 });
2534
2535 // Ensure publishing empty diagnostics twice only results in one update event.
2536 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2537 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2538 version: None,
2539 diagnostics: Default::default(),
2540 });
2541 assert_eq!(
2542 events.next().await.unwrap(),
2543 Event::DiagnosticsUpdated {
2544 language_server_id: LanguageServerId(0),
2545 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2546 }
2547 );
2548
2549 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2550 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2551 version: None,
2552 diagnostics: Default::default(),
2553 });
2554 cx.executor().run_until_parked();
2555 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2556}
2557
2558#[gpui::test]
2559async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2560 init_test(cx);
2561
2562 let progress_token = "the-progress-token";
2563
2564 let fs = FakeFs::new(cx.executor());
2565 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2566
2567 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2568
2569 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2570 language_registry.add(rust_lang());
2571 let mut fake_servers = language_registry.register_fake_lsp(
2572 "Rust",
2573 FakeLspAdapter {
2574 name: "the-language-server",
2575 disk_based_diagnostics_sources: vec!["disk".into()],
2576 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2577 ..FakeLspAdapter::default()
2578 },
2579 );
2580
2581 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2582
2583 let (buffer, _handle) = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2586 })
2587 .await
2588 .unwrap();
2589 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2590 // Simulate diagnostics starting to update.
2591 let fake_server = fake_servers.next().await.unwrap();
2592 cx.executor().run_until_parked();
2593 fake_server.start_progress(progress_token).await;
2594
2595 // Restart the server before the diagnostics finish updating.
2596 project.update(cx, |project, cx| {
2597 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2598 });
2599 let mut events = cx.events(&project);
2600
2601 // Simulate the newly started server sending more diagnostics.
2602 let fake_server = fake_servers.next().await.unwrap();
2603 cx.executor().run_until_parked();
2604 assert_eq!(
2605 events.next().await.unwrap(),
2606 Event::LanguageServerRemoved(LanguageServerId(0))
2607 );
2608 assert_eq!(
2609 events.next().await.unwrap(),
2610 Event::LanguageServerAdded(
2611 LanguageServerId(1),
2612 fake_server.server.name(),
2613 Some(worktree_id)
2614 )
2615 );
2616 fake_server.start_progress(progress_token).await;
2617 assert_eq!(
2618 events.next().await.unwrap(),
2619 Event::LanguageServerBufferRegistered {
2620 server_id: LanguageServerId(1),
2621 buffer_id,
2622 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2623 name: Some(fake_server.server.name())
2624 }
2625 );
2626 assert_eq!(
2627 events.next().await.unwrap(),
2628 Event::DiskBasedDiagnosticsStarted {
2629 language_server_id: LanguageServerId(1)
2630 }
2631 );
2632 project.update(cx, |project, cx| {
2633 assert_eq!(
2634 project
2635 .language_servers_running_disk_based_diagnostics(cx)
2636 .collect::<Vec<_>>(),
2637 [LanguageServerId(1)]
2638 );
2639 });
2640
2641 // All diagnostics are considered done, despite the old server's diagnostic
2642 // task never completing.
2643 fake_server.end_progress(progress_token);
2644 assert_eq!(
2645 events.next().await.unwrap(),
2646 Event::DiskBasedDiagnosticsFinished {
2647 language_server_id: LanguageServerId(1)
2648 }
2649 );
2650 project.update(cx, |project, cx| {
2651 assert_eq!(
2652 project
2653 .language_servers_running_disk_based_diagnostics(cx)
2654 .collect::<Vec<_>>(),
2655 [] as [language::LanguageServerId; 0]
2656 );
2657 });
2658}
2659
2660#[gpui::test]
2661async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2662 init_test(cx);
2663
2664 let fs = FakeFs::new(cx.executor());
2665 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2666
2667 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2668
2669 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2670 language_registry.add(rust_lang());
2671 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2672
2673 let (buffer, _) = project
2674 .update(cx, |project, cx| {
2675 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2676 })
2677 .await
2678 .unwrap();
2679
2680 // Publish diagnostics
2681 let fake_server = fake_servers.next().await.unwrap();
2682 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2683 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2684 version: None,
2685 diagnostics: vec![lsp::Diagnostic {
2686 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2687 severity: Some(lsp::DiagnosticSeverity::ERROR),
2688 message: "the message".to_string(),
2689 ..Default::default()
2690 }],
2691 });
2692
2693 cx.executor().run_until_parked();
2694 buffer.update(cx, |buffer, _| {
2695 assert_eq!(
2696 buffer
2697 .snapshot()
2698 .diagnostics_in_range::<_, usize>(0..1, false)
2699 .map(|entry| entry.diagnostic.message.clone())
2700 .collect::<Vec<_>>(),
2701 ["the message".to_string()]
2702 );
2703 });
2704 project.update(cx, |project, cx| {
2705 assert_eq!(
2706 project.diagnostic_summary(false, cx),
2707 DiagnosticSummary {
2708 error_count: 1,
2709 warning_count: 0,
2710 }
2711 );
2712 });
2713
2714 project.update(cx, |project, cx| {
2715 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2716 });
2717
2718 // The diagnostics are cleared.
2719 cx.executor().run_until_parked();
2720 buffer.update(cx, |buffer, _| {
2721 assert_eq!(
2722 buffer
2723 .snapshot()
2724 .diagnostics_in_range::<_, usize>(0..1, false)
2725 .map(|entry| entry.diagnostic.message.clone())
2726 .collect::<Vec<_>>(),
2727 Vec::<String>::new(),
2728 );
2729 });
2730 project.update(cx, |project, cx| {
2731 assert_eq!(
2732 project.diagnostic_summary(false, cx),
2733 DiagnosticSummary {
2734 error_count: 0,
2735 warning_count: 0,
2736 }
2737 );
2738 });
2739}
2740
2741#[gpui::test]
2742async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2743 init_test(cx);
2744
2745 let fs = FakeFs::new(cx.executor());
2746 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2747
2748 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2749 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2750
2751 language_registry.add(rust_lang());
2752 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2753
2754 let (buffer, _handle) = project
2755 .update(cx, |project, cx| {
2756 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2757 })
2758 .await
2759 .unwrap();
2760
2761 // Before restarting the server, report diagnostics with an unknown buffer version.
2762 let fake_server = fake_servers.next().await.unwrap();
2763 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2764 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2765 version: Some(10000),
2766 diagnostics: Vec::new(),
2767 });
2768 cx.executor().run_until_parked();
2769 project.update(cx, |project, cx| {
2770 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2771 });
2772
2773 let mut fake_server = fake_servers.next().await.unwrap();
2774 let notification = fake_server
2775 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2776 .await
2777 .text_document;
2778 assert_eq!(notification.version, 0);
2779}
2780
2781#[gpui::test]
2782async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2783 init_test(cx);
2784
2785 let progress_token = "the-progress-token";
2786
2787 let fs = FakeFs::new(cx.executor());
2788 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2789
2790 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2791
2792 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2793 language_registry.add(rust_lang());
2794 let mut fake_servers = language_registry.register_fake_lsp(
2795 "Rust",
2796 FakeLspAdapter {
2797 name: "the-language-server",
2798 disk_based_diagnostics_sources: vec!["disk".into()],
2799 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2800 ..Default::default()
2801 },
2802 );
2803
2804 let (buffer, _handle) = project
2805 .update(cx, |project, cx| {
2806 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2807 })
2808 .await
2809 .unwrap();
2810
2811 // Simulate diagnostics starting to update.
2812 let mut fake_server = fake_servers.next().await.unwrap();
2813 fake_server
2814 .start_progress_with(
2815 "another-token",
2816 lsp::WorkDoneProgressBegin {
2817 cancellable: Some(false),
2818 ..Default::default()
2819 },
2820 )
2821 .await;
2822 // Ensure progress notification is fully processed before starting the next one
2823 cx.executor().run_until_parked();
2824
2825 fake_server
2826 .start_progress_with(
2827 progress_token,
2828 lsp::WorkDoneProgressBegin {
2829 cancellable: Some(true),
2830 ..Default::default()
2831 },
2832 )
2833 .await;
2834 // Ensure progress notification is fully processed before cancelling
2835 cx.executor().run_until_parked();
2836
2837 project.update(cx, |project, cx| {
2838 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2839 });
2840 cx.executor().run_until_parked();
2841
2842 let cancel_notification = fake_server
2843 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2844 .await;
2845 assert_eq!(
2846 cancel_notification.token,
2847 NumberOrString::String(progress_token.into())
2848 );
2849}
2850
2851#[gpui::test]
2852async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2853 init_test(cx);
2854
2855 let fs = FakeFs::new(cx.executor());
2856 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2857 .await;
2858
2859 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2860 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2861
2862 let mut fake_rust_servers = language_registry.register_fake_lsp(
2863 "Rust",
2864 FakeLspAdapter {
2865 name: "rust-lsp",
2866 ..Default::default()
2867 },
2868 );
2869 let mut fake_js_servers = language_registry.register_fake_lsp(
2870 "JavaScript",
2871 FakeLspAdapter {
2872 name: "js-lsp",
2873 ..Default::default()
2874 },
2875 );
2876 language_registry.add(rust_lang());
2877 language_registry.add(js_lang());
2878
2879 let _rs_buffer = project
2880 .update(cx, |project, cx| {
2881 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2882 })
2883 .await
2884 .unwrap();
2885 let _js_buffer = project
2886 .update(cx, |project, cx| {
2887 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2888 })
2889 .await
2890 .unwrap();
2891
2892 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2893 assert_eq!(
2894 fake_rust_server_1
2895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2896 .await
2897 .text_document
2898 .uri
2899 .as_str(),
2900 uri!("file:///dir/a.rs")
2901 );
2902
2903 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2904 assert_eq!(
2905 fake_js_server
2906 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2907 .await
2908 .text_document
2909 .uri
2910 .as_str(),
2911 uri!("file:///dir/b.js")
2912 );
2913
2914 // Disable Rust language server, ensuring only that server gets stopped.
2915 cx.update(|cx| {
2916 SettingsStore::update_global(cx, |settings, cx| {
2917 settings.update_user_settings(cx, |settings| {
2918 settings.languages_mut().insert(
2919 "Rust".into(),
2920 LanguageSettingsContent {
2921 enable_language_server: Some(false),
2922 ..Default::default()
2923 },
2924 );
2925 });
2926 })
2927 });
2928 fake_rust_server_1
2929 .receive_notification::<lsp::notification::Exit>()
2930 .await;
2931
2932 // Enable Rust and disable JavaScript language servers, ensuring that the
2933 // former gets started again and that the latter stops.
2934 cx.update(|cx| {
2935 SettingsStore::update_global(cx, |settings, cx| {
2936 settings.update_user_settings(cx, |settings| {
2937 settings.languages_mut().insert(
2938 "Rust".into(),
2939 LanguageSettingsContent {
2940 enable_language_server: Some(true),
2941 ..Default::default()
2942 },
2943 );
2944 settings.languages_mut().insert(
2945 "JavaScript".into(),
2946 LanguageSettingsContent {
2947 enable_language_server: Some(false),
2948 ..Default::default()
2949 },
2950 );
2951 });
2952 })
2953 });
2954 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2955 assert_eq!(
2956 fake_rust_server_2
2957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2958 .await
2959 .text_document
2960 .uri
2961 .as_str(),
2962 uri!("file:///dir/a.rs")
2963 );
2964 fake_js_server
2965 .receive_notification::<lsp::notification::Exit>()
2966 .await;
2967}
2968
2969#[gpui::test(iterations = 3)]
2970async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2971 init_test(cx);
2972
2973 let text = "
2974 fn a() { A }
2975 fn b() { BB }
2976 fn c() { CCC }
2977 "
2978 .unindent();
2979
2980 let fs = FakeFs::new(cx.executor());
2981 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2982
2983 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2984 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2985
2986 language_registry.add(rust_lang());
2987 let mut fake_servers = language_registry.register_fake_lsp(
2988 "Rust",
2989 FakeLspAdapter {
2990 disk_based_diagnostics_sources: vec!["disk".into()],
2991 ..Default::default()
2992 },
2993 );
2994
2995 let buffer = project
2996 .update(cx, |project, cx| {
2997 project.open_local_buffer(path!("/dir/a.rs"), cx)
2998 })
2999 .await
3000 .unwrap();
3001
3002 let _handle = project.update(cx, |project, cx| {
3003 project.register_buffer_with_language_servers(&buffer, cx)
3004 });
3005
3006 let mut fake_server = fake_servers.next().await.unwrap();
3007 let open_notification = fake_server
3008 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3009 .await;
3010
3011 // Edit the buffer, moving the content down
3012 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3013 let change_notification_1 = fake_server
3014 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3015 .await;
3016 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3017
3018 // Report some diagnostics for the initial version of the buffer
3019 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3020 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3021 version: Some(open_notification.text_document.version),
3022 diagnostics: vec![
3023 lsp::Diagnostic {
3024 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3025 severity: Some(DiagnosticSeverity::ERROR),
3026 message: "undefined variable 'A'".to_string(),
3027 source: Some("disk".to_string()),
3028 ..Default::default()
3029 },
3030 lsp::Diagnostic {
3031 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3032 severity: Some(DiagnosticSeverity::ERROR),
3033 message: "undefined variable 'BB'".to_string(),
3034 source: Some("disk".to_string()),
3035 ..Default::default()
3036 },
3037 lsp::Diagnostic {
3038 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3039 severity: Some(DiagnosticSeverity::ERROR),
3040 source: Some("disk".to_string()),
3041 message: "undefined variable 'CCC'".to_string(),
3042 ..Default::default()
3043 },
3044 ],
3045 });
3046
3047 // The diagnostics have moved down since they were created.
3048 cx.executor().run_until_parked();
3049 buffer.update(cx, |buffer, _| {
3050 assert_eq!(
3051 buffer
3052 .snapshot()
3053 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3054 .collect::<Vec<_>>(),
3055 &[
3056 DiagnosticEntry {
3057 range: Point::new(3, 9)..Point::new(3, 11),
3058 diagnostic: Diagnostic {
3059 source: Some("disk".into()),
3060 severity: DiagnosticSeverity::ERROR,
3061 message: "undefined variable 'BB'".to_string(),
3062 is_disk_based: true,
3063 group_id: 1,
3064 is_primary: true,
3065 source_kind: DiagnosticSourceKind::Pushed,
3066 ..Diagnostic::default()
3067 },
3068 },
3069 DiagnosticEntry {
3070 range: Point::new(4, 9)..Point::new(4, 12),
3071 diagnostic: Diagnostic {
3072 source: Some("disk".into()),
3073 severity: DiagnosticSeverity::ERROR,
3074 message: "undefined variable 'CCC'".to_string(),
3075 is_disk_based: true,
3076 group_id: 2,
3077 is_primary: true,
3078 source_kind: DiagnosticSourceKind::Pushed,
3079 ..Diagnostic::default()
3080 }
3081 }
3082 ]
3083 );
3084 assert_eq!(
3085 chunks_with_diagnostics(buffer, 0..buffer.len()),
3086 [
3087 ("\n\nfn a() { ".to_string(), None),
3088 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3089 (" }\nfn b() { ".to_string(), None),
3090 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3091 (" }\nfn c() { ".to_string(), None),
3092 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3093 (" }\n".to_string(), None),
3094 ]
3095 );
3096 assert_eq!(
3097 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3098 [
3099 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3100 (" }\nfn c() { ".to_string(), None),
3101 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3102 ]
3103 );
3104 });
3105
3106 // Ensure overlapping diagnostics are highlighted correctly.
3107 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3108 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3109 version: Some(open_notification.text_document.version),
3110 diagnostics: vec![
3111 lsp::Diagnostic {
3112 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3113 severity: Some(DiagnosticSeverity::ERROR),
3114 message: "undefined variable 'A'".to_string(),
3115 source: Some("disk".to_string()),
3116 ..Default::default()
3117 },
3118 lsp::Diagnostic {
3119 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3120 severity: Some(DiagnosticSeverity::WARNING),
3121 message: "unreachable statement".to_string(),
3122 source: Some("disk".to_string()),
3123 ..Default::default()
3124 },
3125 ],
3126 });
3127
3128 cx.executor().run_until_parked();
3129 buffer.update(cx, |buffer, _| {
3130 assert_eq!(
3131 buffer
3132 .snapshot()
3133 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3134 .collect::<Vec<_>>(),
3135 &[
3136 DiagnosticEntry {
3137 range: Point::new(2, 9)..Point::new(2, 12),
3138 diagnostic: Diagnostic {
3139 source: Some("disk".into()),
3140 severity: DiagnosticSeverity::WARNING,
3141 message: "unreachable statement".to_string(),
3142 is_disk_based: true,
3143 group_id: 4,
3144 is_primary: true,
3145 source_kind: DiagnosticSourceKind::Pushed,
3146 ..Diagnostic::default()
3147 }
3148 },
3149 DiagnosticEntry {
3150 range: Point::new(2, 9)..Point::new(2, 10),
3151 diagnostic: Diagnostic {
3152 source: Some("disk".into()),
3153 severity: DiagnosticSeverity::ERROR,
3154 message: "undefined variable 'A'".to_string(),
3155 is_disk_based: true,
3156 group_id: 3,
3157 is_primary: true,
3158 source_kind: DiagnosticSourceKind::Pushed,
3159 ..Diagnostic::default()
3160 },
3161 }
3162 ]
3163 );
3164 assert_eq!(
3165 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3166 [
3167 ("fn a() { ".to_string(), None),
3168 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3169 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3170 ("\n".to_string(), None),
3171 ]
3172 );
3173 assert_eq!(
3174 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3175 [
3176 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3177 ("\n".to_string(), None),
3178 ]
3179 );
3180 });
3181
3182 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3183 // changes since the last save.
3184 buffer.update(cx, |buffer, cx| {
3185 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3186 buffer.edit(
3187 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3188 None,
3189 cx,
3190 );
3191 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3192 });
3193 let change_notification_2 = fake_server
3194 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3195 .await;
3196 assert!(
3197 change_notification_2.text_document.version > change_notification_1.text_document.version
3198 );
3199
3200 // Handle out-of-order diagnostics
3201 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3202 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3203 version: Some(change_notification_2.text_document.version),
3204 diagnostics: vec![
3205 lsp::Diagnostic {
3206 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3207 severity: Some(DiagnosticSeverity::ERROR),
3208 message: "undefined variable 'BB'".to_string(),
3209 source: Some("disk".to_string()),
3210 ..Default::default()
3211 },
3212 lsp::Diagnostic {
3213 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3214 severity: Some(DiagnosticSeverity::WARNING),
3215 message: "undefined variable 'A'".to_string(),
3216 source: Some("disk".to_string()),
3217 ..Default::default()
3218 },
3219 ],
3220 });
3221
3222 cx.executor().run_until_parked();
3223 buffer.update(cx, |buffer, _| {
3224 assert_eq!(
3225 buffer
3226 .snapshot()
3227 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3228 .collect::<Vec<_>>(),
3229 &[
3230 DiagnosticEntry {
3231 range: Point::new(2, 21)..Point::new(2, 22),
3232 diagnostic: Diagnostic {
3233 source: Some("disk".into()),
3234 severity: DiagnosticSeverity::WARNING,
3235 message: "undefined variable 'A'".to_string(),
3236 is_disk_based: true,
3237 group_id: 6,
3238 is_primary: true,
3239 source_kind: DiagnosticSourceKind::Pushed,
3240 ..Diagnostic::default()
3241 }
3242 },
3243 DiagnosticEntry {
3244 range: Point::new(3, 9)..Point::new(3, 14),
3245 diagnostic: Diagnostic {
3246 source: Some("disk".into()),
3247 severity: DiagnosticSeverity::ERROR,
3248 message: "undefined variable 'BB'".to_string(),
3249 is_disk_based: true,
3250 group_id: 5,
3251 is_primary: true,
3252 source_kind: DiagnosticSourceKind::Pushed,
3253 ..Diagnostic::default()
3254 },
3255 }
3256 ]
3257 );
3258 });
3259}
3260
3261#[gpui::test]
3262async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3263 init_test(cx);
3264
3265 let text = concat!(
3266 "let one = ;\n", //
3267 "let two = \n",
3268 "let three = 3;\n",
3269 );
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3273
3274 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3275 let buffer = project
3276 .update(cx, |project, cx| {
3277 project.open_local_buffer(path!("/dir/a.rs"), cx)
3278 })
3279 .await
3280 .unwrap();
3281
3282 project.update(cx, |project, cx| {
3283 project.lsp_store.update(cx, |lsp_store, cx| {
3284 lsp_store
3285 .update_diagnostic_entries(
3286 LanguageServerId(0),
3287 PathBuf::from(path!("/dir/a.rs")),
3288 None,
3289 None,
3290 vec![
3291 DiagnosticEntry {
3292 range: Unclipped(PointUtf16::new(0, 10))
3293 ..Unclipped(PointUtf16::new(0, 10)),
3294 diagnostic: Diagnostic {
3295 severity: DiagnosticSeverity::ERROR,
3296 message: "syntax error 1".to_string(),
3297 source_kind: DiagnosticSourceKind::Pushed,
3298 ..Diagnostic::default()
3299 },
3300 },
3301 DiagnosticEntry {
3302 range: Unclipped(PointUtf16::new(1, 10))
3303 ..Unclipped(PointUtf16::new(1, 10)),
3304 diagnostic: Diagnostic {
3305 severity: DiagnosticSeverity::ERROR,
3306 message: "syntax error 2".to_string(),
3307 source_kind: DiagnosticSourceKind::Pushed,
3308 ..Diagnostic::default()
3309 },
3310 },
3311 ],
3312 cx,
3313 )
3314 .unwrap();
3315 })
3316 });
3317
3318 // An empty range is extended forward to include the following character.
3319 // At the end of a line, an empty range is extended backward to include
3320 // the preceding character.
3321 buffer.update(cx, |buffer, _| {
3322 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3323 assert_eq!(
3324 chunks
3325 .iter()
3326 .map(|(s, d)| (s.as_str(), *d))
3327 .collect::<Vec<_>>(),
3328 &[
3329 ("let one = ", None),
3330 (";", Some(DiagnosticSeverity::ERROR)),
3331 ("\nlet two =", None),
3332 (" ", Some(DiagnosticSeverity::ERROR)),
3333 ("\nlet three = 3;\n", None)
3334 ]
3335 );
3336 });
3337}
3338
3339#[gpui::test]
3340async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3341 init_test(cx);
3342
3343 let fs = FakeFs::new(cx.executor());
3344 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3345 .await;
3346
3347 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3348 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
3349
3350 lsp_store.update(cx, |lsp_store, cx| {
3351 lsp_store
3352 .update_diagnostic_entries(
3353 LanguageServerId(0),
3354 Path::new(path!("/dir/a.rs")).to_owned(),
3355 None,
3356 None,
3357 vec![DiagnosticEntry {
3358 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3359 diagnostic: Diagnostic {
3360 severity: DiagnosticSeverity::ERROR,
3361 is_primary: true,
3362 message: "syntax error a1".to_string(),
3363 source_kind: DiagnosticSourceKind::Pushed,
3364 ..Diagnostic::default()
3365 },
3366 }],
3367 cx,
3368 )
3369 .unwrap();
3370 lsp_store
3371 .update_diagnostic_entries(
3372 LanguageServerId(1),
3373 Path::new(path!("/dir/a.rs")).to_owned(),
3374 None,
3375 None,
3376 vec![DiagnosticEntry {
3377 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3378 diagnostic: Diagnostic {
3379 severity: DiagnosticSeverity::ERROR,
3380 is_primary: true,
3381 message: "syntax error b1".to_string(),
3382 source_kind: DiagnosticSourceKind::Pushed,
3383 ..Diagnostic::default()
3384 },
3385 }],
3386 cx,
3387 )
3388 .unwrap();
3389
3390 assert_eq!(
3391 lsp_store.diagnostic_summary(false, cx),
3392 DiagnosticSummary {
3393 error_count: 2,
3394 warning_count: 0,
3395 }
3396 );
3397 });
3398}
3399
3400#[gpui::test]
3401async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3402 init_test(cx);
3403
3404 let text = "
3405 fn a() {
3406 f1();
3407 }
3408 fn b() {
3409 f2();
3410 }
3411 fn c() {
3412 f3();
3413 }
3414 "
3415 .unindent();
3416
3417 let fs = FakeFs::new(cx.executor());
3418 fs.insert_tree(
3419 path!("/dir"),
3420 json!({
3421 "a.rs": text.clone(),
3422 }),
3423 )
3424 .await;
3425
3426 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3427 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3428
3429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3430 language_registry.add(rust_lang());
3431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3432
3433 let (buffer, _handle) = project
3434 .update(cx, |project, cx| {
3435 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3436 })
3437 .await
3438 .unwrap();
3439
3440 let mut fake_server = fake_servers.next().await.unwrap();
3441 let lsp_document_version = fake_server
3442 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3443 .await
3444 .text_document
3445 .version;
3446
3447 // Simulate editing the buffer after the language server computes some edits.
3448 buffer.update(cx, |buffer, cx| {
3449 buffer.edit(
3450 [(
3451 Point::new(0, 0)..Point::new(0, 0),
3452 "// above first function\n",
3453 )],
3454 None,
3455 cx,
3456 );
3457 buffer.edit(
3458 [(
3459 Point::new(2, 0)..Point::new(2, 0),
3460 " // inside first function\n",
3461 )],
3462 None,
3463 cx,
3464 );
3465 buffer.edit(
3466 [(
3467 Point::new(6, 4)..Point::new(6, 4),
3468 "// inside second function ",
3469 )],
3470 None,
3471 cx,
3472 );
3473
3474 assert_eq!(
3475 buffer.text(),
3476 "
3477 // above first function
3478 fn a() {
3479 // inside first function
3480 f1();
3481 }
3482 fn b() {
3483 // inside second function f2();
3484 }
3485 fn c() {
3486 f3();
3487 }
3488 "
3489 .unindent()
3490 );
3491 });
3492
3493 let edits = lsp_store
3494 .update(cx, |lsp_store, cx| {
3495 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3496 &buffer,
3497 vec![
3498 // replace body of first function
3499 lsp::TextEdit {
3500 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3501 new_text: "
3502 fn a() {
3503 f10();
3504 }
3505 "
3506 .unindent(),
3507 },
3508 // edit inside second function
3509 lsp::TextEdit {
3510 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3511 new_text: "00".into(),
3512 },
3513 // edit inside third function via two distinct edits
3514 lsp::TextEdit {
3515 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3516 new_text: "4000".into(),
3517 },
3518 lsp::TextEdit {
3519 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3520 new_text: "".into(),
3521 },
3522 ],
3523 LanguageServerId(0),
3524 Some(lsp_document_version),
3525 cx,
3526 )
3527 })
3528 .await
3529 .unwrap();
3530
3531 buffer.update(cx, |buffer, cx| {
3532 for (range, new_text) in edits {
3533 buffer.edit([(range, new_text)], None, cx);
3534 }
3535 assert_eq!(
3536 buffer.text(),
3537 "
3538 // above first function
3539 fn a() {
3540 // inside first function
3541 f10();
3542 }
3543 fn b() {
3544 // inside second function f200();
3545 }
3546 fn c() {
3547 f4000();
3548 }
3549 "
3550 .unindent()
3551 );
3552 });
3553}
3554
3555#[gpui::test]
3556async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3557 init_test(cx);
3558
3559 let text = "
3560 use a::b;
3561 use a::c;
3562
3563 fn f() {
3564 b();
3565 c();
3566 }
3567 "
3568 .unindent();
3569
3570 let fs = FakeFs::new(cx.executor());
3571 fs.insert_tree(
3572 path!("/dir"),
3573 json!({
3574 "a.rs": text.clone(),
3575 }),
3576 )
3577 .await;
3578
3579 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3580 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3581 let buffer = project
3582 .update(cx, |project, cx| {
3583 project.open_local_buffer(path!("/dir/a.rs"), cx)
3584 })
3585 .await
3586 .unwrap();
3587
3588 // Simulate the language server sending us a small edit in the form of a very large diff.
3589 // Rust-analyzer does this when performing a merge-imports code action.
3590 let edits = lsp_store
3591 .update(cx, |lsp_store, cx| {
3592 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3593 &buffer,
3594 [
3595 // Replace the first use statement without editing the semicolon.
3596 lsp::TextEdit {
3597 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3598 new_text: "a::{b, c}".into(),
3599 },
3600 // Reinsert the remainder of the file between the semicolon and the final
3601 // newline of the file.
3602 lsp::TextEdit {
3603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3604 new_text: "\n\n".into(),
3605 },
3606 lsp::TextEdit {
3607 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3608 new_text: "
3609 fn f() {
3610 b();
3611 c();
3612 }"
3613 .unindent(),
3614 },
3615 // Delete everything after the first newline of the file.
3616 lsp::TextEdit {
3617 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3618 new_text: "".into(),
3619 },
3620 ],
3621 LanguageServerId(0),
3622 None,
3623 cx,
3624 )
3625 })
3626 .await
3627 .unwrap();
3628
3629 buffer.update(cx, |buffer, cx| {
3630 let edits = edits
3631 .into_iter()
3632 .map(|(range, text)| {
3633 (
3634 range.start.to_point(buffer)..range.end.to_point(buffer),
3635 text,
3636 )
3637 })
3638 .collect::<Vec<_>>();
3639
3640 assert_eq!(
3641 edits,
3642 [
3643 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3644 (Point::new(1, 0)..Point::new(2, 0), "".into())
3645 ]
3646 );
3647
3648 for (range, new_text) in edits {
3649 buffer.edit([(range, new_text)], None, cx);
3650 }
3651 assert_eq!(
3652 buffer.text(),
3653 "
3654 use a::{b, c};
3655
3656 fn f() {
3657 b();
3658 c();
3659 }
3660 "
3661 .unindent()
3662 );
3663 });
3664}
3665
3666#[gpui::test]
3667async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3668 cx: &mut gpui::TestAppContext,
3669) {
3670 init_test(cx);
3671
3672 let text = "Path()";
3673
3674 let fs = FakeFs::new(cx.executor());
3675 fs.insert_tree(
3676 path!("/dir"),
3677 json!({
3678 "a.rs": text
3679 }),
3680 )
3681 .await;
3682
3683 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3684 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3685 let buffer = project
3686 .update(cx, |project, cx| {
3687 project.open_local_buffer(path!("/dir/a.rs"), cx)
3688 })
3689 .await
3690 .unwrap();
3691
3692 // Simulate the language server sending us a pair of edits at the same location,
3693 // with an insertion following a replacement (which violates the LSP spec).
3694 let edits = lsp_store
3695 .update(cx, |lsp_store, cx| {
3696 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3697 &buffer,
3698 [
3699 lsp::TextEdit {
3700 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3701 new_text: "Path".into(),
3702 },
3703 lsp::TextEdit {
3704 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3705 new_text: "from path import Path\n\n\n".into(),
3706 },
3707 ],
3708 LanguageServerId(0),
3709 None,
3710 cx,
3711 )
3712 })
3713 .await
3714 .unwrap();
3715
3716 buffer.update(cx, |buffer, cx| {
3717 buffer.edit(edits, None, cx);
3718 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3719 });
3720}
3721
3722#[gpui::test]
3723async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3724 init_test(cx);
3725
3726 let text = "
3727 use a::b;
3728 use a::c;
3729
3730 fn f() {
3731 b();
3732 c();
3733 }
3734 "
3735 .unindent();
3736
3737 let fs = FakeFs::new(cx.executor());
3738 fs.insert_tree(
3739 path!("/dir"),
3740 json!({
3741 "a.rs": text.clone(),
3742 }),
3743 )
3744 .await;
3745
3746 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3747 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3748 let buffer = project
3749 .update(cx, |project, cx| {
3750 project.open_local_buffer(path!("/dir/a.rs"), cx)
3751 })
3752 .await
3753 .unwrap();
3754
3755 // Simulate the language server sending us edits in a non-ordered fashion,
3756 // with ranges sometimes being inverted or pointing to invalid locations.
3757 let edits = lsp_store
3758 .update(cx, |lsp_store, cx| {
3759 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3760 &buffer,
3761 [
3762 lsp::TextEdit {
3763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3764 new_text: "\n\n".into(),
3765 },
3766 lsp::TextEdit {
3767 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3768 new_text: "a::{b, c}".into(),
3769 },
3770 lsp::TextEdit {
3771 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3772 new_text: "".into(),
3773 },
3774 lsp::TextEdit {
3775 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3776 new_text: "
3777 fn f() {
3778 b();
3779 c();
3780 }"
3781 .unindent(),
3782 },
3783 ],
3784 LanguageServerId(0),
3785 None,
3786 cx,
3787 )
3788 })
3789 .await
3790 .unwrap();
3791
3792 buffer.update(cx, |buffer, cx| {
3793 let edits = edits
3794 .into_iter()
3795 .map(|(range, text)| {
3796 (
3797 range.start.to_point(buffer)..range.end.to_point(buffer),
3798 text,
3799 )
3800 })
3801 .collect::<Vec<_>>();
3802
3803 assert_eq!(
3804 edits,
3805 [
3806 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3807 (Point::new(1, 0)..Point::new(2, 0), "".into())
3808 ]
3809 );
3810
3811 for (range, new_text) in edits {
3812 buffer.edit([(range, new_text)], None, cx);
3813 }
3814 assert_eq!(
3815 buffer.text(),
3816 "
3817 use a::{b, c};
3818
3819 fn f() {
3820 b();
3821 c();
3822 }
3823 "
3824 .unindent()
3825 );
3826 });
3827}
3828
3829fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3830 buffer: &Buffer,
3831 range: Range<T>,
3832) -> Vec<(String, Option<DiagnosticSeverity>)> {
3833 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3834 for chunk in buffer.snapshot().chunks(range, true) {
3835 if chunks
3836 .last()
3837 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3838 {
3839 chunks.last_mut().unwrap().0.push_str(chunk.text);
3840 } else {
3841 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3842 }
3843 }
3844 chunks
3845}
3846
3847#[gpui::test(iterations = 10)]
3848async fn test_definition(cx: &mut gpui::TestAppContext) {
3849 init_test(cx);
3850
3851 let fs = FakeFs::new(cx.executor());
3852 fs.insert_tree(
3853 path!("/dir"),
3854 json!({
3855 "a.rs": "const fn a() { A }",
3856 "b.rs": "const y: i32 = crate::a()",
3857 }),
3858 )
3859 .await;
3860
3861 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3862
3863 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3864 language_registry.add(rust_lang());
3865 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3866
3867 let (buffer, _handle) = project
3868 .update(cx, |project, cx| {
3869 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3870 })
3871 .await
3872 .unwrap();
3873
3874 let fake_server = fake_servers.next().await.unwrap();
3875 cx.executor().run_until_parked();
3876
3877 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3878 let params = params.text_document_position_params;
3879 assert_eq!(
3880 params.text_document.uri.to_file_path().unwrap(),
3881 Path::new(path!("/dir/b.rs")),
3882 );
3883 assert_eq!(params.position, lsp::Position::new(0, 22));
3884
3885 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3886 lsp::Location::new(
3887 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3888 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3889 ),
3890 )))
3891 });
3892 let mut definitions = project
3893 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3894 .await
3895 .unwrap()
3896 .unwrap();
3897
3898 // Assert no new language server started
3899 cx.executor().run_until_parked();
3900 assert!(fake_servers.try_next().is_err());
3901
3902 assert_eq!(definitions.len(), 1);
3903 let definition = definitions.pop().unwrap();
3904 cx.update(|cx| {
3905 let target_buffer = definition.target.buffer.read(cx);
3906 assert_eq!(
3907 target_buffer
3908 .file()
3909 .unwrap()
3910 .as_local()
3911 .unwrap()
3912 .abs_path(cx),
3913 Path::new(path!("/dir/a.rs")),
3914 );
3915 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3916 assert_eq!(
3917 list_worktrees(&project, cx),
3918 [
3919 (path!("/dir/a.rs").as_ref(), false),
3920 (path!("/dir/b.rs").as_ref(), true)
3921 ],
3922 );
3923
3924 drop(definition);
3925 });
3926 cx.update(|cx| {
3927 assert_eq!(
3928 list_worktrees(&project, cx),
3929 [(path!("/dir/b.rs").as_ref(), true)]
3930 );
3931 });
3932
3933 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3934 project
3935 .read(cx)
3936 .worktrees(cx)
3937 .map(|worktree| {
3938 let worktree = worktree.read(cx);
3939 (
3940 worktree.as_local().unwrap().abs_path().as_ref(),
3941 worktree.is_visible(),
3942 )
3943 })
3944 .collect::<Vec<_>>()
3945 }
3946}
3947
3948#[gpui::test]
3949async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3950 init_test(cx);
3951
3952 let fs = FakeFs::new(cx.executor());
3953 fs.insert_tree(
3954 path!("/dir"),
3955 json!({
3956 "a.ts": "",
3957 }),
3958 )
3959 .await;
3960
3961 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3962
3963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3964 language_registry.add(typescript_lang());
3965 let mut fake_language_servers = language_registry.register_fake_lsp(
3966 "TypeScript",
3967 FakeLspAdapter {
3968 capabilities: lsp::ServerCapabilities {
3969 completion_provider: Some(lsp::CompletionOptions {
3970 trigger_characters: Some(vec![".".to_string()]),
3971 ..Default::default()
3972 }),
3973 ..Default::default()
3974 },
3975 ..Default::default()
3976 },
3977 );
3978
3979 let (buffer, _handle) = project
3980 .update(cx, |p, cx| {
3981 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3982 })
3983 .await
3984 .unwrap();
3985
3986 let fake_server = fake_language_servers.next().await.unwrap();
3987 cx.executor().run_until_parked();
3988
3989 // When text_edit exists, it takes precedence over insert_text and label
3990 let text = "let a = obj.fqn";
3991 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3992 let completions = project.update(cx, |project, cx| {
3993 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3994 });
3995
3996 fake_server
3997 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3998 Ok(Some(lsp::CompletionResponse::Array(vec![
3999 lsp::CompletionItem {
4000 label: "labelText".into(),
4001 insert_text: Some("insertText".into()),
4002 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4003 range: lsp::Range::new(
4004 lsp::Position::new(0, text.len() as u32 - 3),
4005 lsp::Position::new(0, text.len() as u32),
4006 ),
4007 new_text: "textEditText".into(),
4008 })),
4009 ..Default::default()
4010 },
4011 ])))
4012 })
4013 .next()
4014 .await;
4015
4016 let completions = completions
4017 .await
4018 .unwrap()
4019 .into_iter()
4020 .flat_map(|response| response.completions)
4021 .collect::<Vec<_>>();
4022 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4023
4024 assert_eq!(completions.len(), 1);
4025 assert_eq!(completions[0].new_text, "textEditText");
4026 assert_eq!(
4027 completions[0].replace_range.to_offset(&snapshot),
4028 text.len() - 3..text.len()
4029 );
4030}
4031
4032#[gpui::test]
4033async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4034 init_test(cx);
4035
4036 let fs = FakeFs::new(cx.executor());
4037 fs.insert_tree(
4038 path!("/dir"),
4039 json!({
4040 "a.ts": "",
4041 }),
4042 )
4043 .await;
4044
4045 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4046
4047 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4048 language_registry.add(typescript_lang());
4049 let mut fake_language_servers = language_registry.register_fake_lsp(
4050 "TypeScript",
4051 FakeLspAdapter {
4052 capabilities: lsp::ServerCapabilities {
4053 completion_provider: Some(lsp::CompletionOptions {
4054 trigger_characters: Some(vec![".".to_string()]),
4055 ..Default::default()
4056 }),
4057 ..Default::default()
4058 },
4059 ..Default::default()
4060 },
4061 );
4062
4063 let (buffer, _handle) = project
4064 .update(cx, |p, cx| {
4065 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4066 })
4067 .await
4068 .unwrap();
4069
4070 let fake_server = fake_language_servers.next().await.unwrap();
4071 cx.executor().run_until_parked();
4072 let text = "let a = obj.fqn";
4073
4074 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4075 {
4076 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4077 let completions = project.update(cx, |project, cx| {
4078 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4079 });
4080
4081 fake_server
4082 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4083 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4084 is_incomplete: false,
4085 item_defaults: Some(lsp::CompletionListItemDefaults {
4086 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4087 lsp::Range::new(
4088 lsp::Position::new(0, text.len() as u32 - 3),
4089 lsp::Position::new(0, text.len() as u32),
4090 ),
4091 )),
4092 ..Default::default()
4093 }),
4094 items: vec![lsp::CompletionItem {
4095 label: "labelText".into(),
4096 text_edit_text: Some("textEditText".into()),
4097 text_edit: None,
4098 ..Default::default()
4099 }],
4100 })))
4101 })
4102 .next()
4103 .await;
4104
4105 let completions = completions
4106 .await
4107 .unwrap()
4108 .into_iter()
4109 .flat_map(|response| response.completions)
4110 .collect::<Vec<_>>();
4111 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4112
4113 assert_eq!(completions.len(), 1);
4114 assert_eq!(completions[0].new_text, "textEditText");
4115 assert_eq!(
4116 completions[0].replace_range.to_offset(&snapshot),
4117 text.len() - 3..text.len()
4118 );
4119 }
4120
4121 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4122 {
4123 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4124 let completions = project.update(cx, |project, cx| {
4125 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4126 });
4127
4128 fake_server
4129 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4130 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4131 is_incomplete: false,
4132 item_defaults: Some(lsp::CompletionListItemDefaults {
4133 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4134 lsp::Range::new(
4135 lsp::Position::new(0, text.len() as u32 - 3),
4136 lsp::Position::new(0, text.len() as u32),
4137 ),
4138 )),
4139 ..Default::default()
4140 }),
4141 items: vec![lsp::CompletionItem {
4142 label: "labelText".into(),
4143 text_edit_text: None,
4144 insert_text: Some("irrelevant".into()),
4145 text_edit: None,
4146 ..Default::default()
4147 }],
4148 })))
4149 })
4150 .next()
4151 .await;
4152
4153 let completions = completions
4154 .await
4155 .unwrap()
4156 .into_iter()
4157 .flat_map(|response| response.completions)
4158 .collect::<Vec<_>>();
4159 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4160
4161 assert_eq!(completions.len(), 1);
4162 assert_eq!(completions[0].new_text, "labelText");
4163 assert_eq!(
4164 completions[0].replace_range.to_offset(&snapshot),
4165 text.len() - 3..text.len()
4166 );
4167 }
4168}
4169
4170#[gpui::test]
4171async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4172 init_test(cx);
4173
4174 let fs = FakeFs::new(cx.executor());
4175 fs.insert_tree(
4176 path!("/dir"),
4177 json!({
4178 "a.ts": "",
4179 }),
4180 )
4181 .await;
4182
4183 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4184
4185 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4186 language_registry.add(typescript_lang());
4187 let mut fake_language_servers = language_registry.register_fake_lsp(
4188 "TypeScript",
4189 FakeLspAdapter {
4190 capabilities: lsp::ServerCapabilities {
4191 completion_provider: Some(lsp::CompletionOptions {
4192 trigger_characters: Some(vec![":".to_string()]),
4193 ..Default::default()
4194 }),
4195 ..Default::default()
4196 },
4197 ..Default::default()
4198 },
4199 );
4200
4201 let (buffer, _handle) = project
4202 .update(cx, |p, cx| {
4203 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4204 })
4205 .await
4206 .unwrap();
4207
4208 let fake_server = fake_language_servers.next().await.unwrap();
4209 cx.executor().run_until_parked();
4210
4211 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4212 let text = "let a = b.fqn";
4213 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4214 let completions = project.update(cx, |project, cx| {
4215 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4216 });
4217
4218 fake_server
4219 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4220 Ok(Some(lsp::CompletionResponse::Array(vec![
4221 lsp::CompletionItem {
4222 label: "fullyQualifiedName?".into(),
4223 insert_text: Some("fullyQualifiedName".into()),
4224 ..Default::default()
4225 },
4226 ])))
4227 })
4228 .next()
4229 .await;
4230 let completions = completions
4231 .await
4232 .unwrap()
4233 .into_iter()
4234 .flat_map(|response| response.completions)
4235 .collect::<Vec<_>>();
4236 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4237 assert_eq!(completions.len(), 1);
4238 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4239 assert_eq!(
4240 completions[0].replace_range.to_offset(&snapshot),
4241 text.len() - 3..text.len()
4242 );
4243
4244 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4245 let text = "let a = \"atoms/cmp\"";
4246 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4247 let completions = project.update(cx, |project, cx| {
4248 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4249 });
4250
4251 fake_server
4252 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4253 Ok(Some(lsp::CompletionResponse::Array(vec![
4254 lsp::CompletionItem {
4255 label: "component".into(),
4256 ..Default::default()
4257 },
4258 ])))
4259 })
4260 .next()
4261 .await;
4262 let completions = completions
4263 .await
4264 .unwrap()
4265 .into_iter()
4266 .flat_map(|response| response.completions)
4267 .collect::<Vec<_>>();
4268 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4269 assert_eq!(completions.len(), 1);
4270 assert_eq!(completions[0].new_text, "component");
4271 assert_eq!(
4272 completions[0].replace_range.to_offset(&snapshot),
4273 text.len() - 4..text.len() - 1
4274 );
4275}
4276
4277#[gpui::test]
4278async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4279 init_test(cx);
4280
4281 let fs = FakeFs::new(cx.executor());
4282 fs.insert_tree(
4283 path!("/dir"),
4284 json!({
4285 "a.ts": "",
4286 }),
4287 )
4288 .await;
4289
4290 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4291
4292 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4293 language_registry.add(typescript_lang());
4294 let mut fake_language_servers = language_registry.register_fake_lsp(
4295 "TypeScript",
4296 FakeLspAdapter {
4297 capabilities: lsp::ServerCapabilities {
4298 completion_provider: Some(lsp::CompletionOptions {
4299 trigger_characters: Some(vec![":".to_string()]),
4300 ..Default::default()
4301 }),
4302 ..Default::default()
4303 },
4304 ..Default::default()
4305 },
4306 );
4307
4308 let (buffer, _handle) = project
4309 .update(cx, |p, cx| {
4310 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4311 })
4312 .await
4313 .unwrap();
4314
4315 let fake_server = fake_language_servers.next().await.unwrap();
4316 cx.executor().run_until_parked();
4317
4318 let text = "let a = b.fqn";
4319 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4320 let completions = project.update(cx, |project, cx| {
4321 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4322 });
4323
4324 fake_server
4325 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4326 Ok(Some(lsp::CompletionResponse::Array(vec![
4327 lsp::CompletionItem {
4328 label: "fullyQualifiedName?".into(),
4329 insert_text: Some("fully\rQualified\r\nName".into()),
4330 ..Default::default()
4331 },
4332 ])))
4333 })
4334 .next()
4335 .await;
4336 let completions = completions
4337 .await
4338 .unwrap()
4339 .into_iter()
4340 .flat_map(|response| response.completions)
4341 .collect::<Vec<_>>();
4342 assert_eq!(completions.len(), 1);
4343 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4344}
4345
4346#[gpui::test(iterations = 10)]
4347async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4348 init_test(cx);
4349
4350 let fs = FakeFs::new(cx.executor());
4351 fs.insert_tree(
4352 path!("/dir"),
4353 json!({
4354 "a.ts": "a",
4355 }),
4356 )
4357 .await;
4358
4359 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4360
4361 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4362 language_registry.add(typescript_lang());
4363 let mut fake_language_servers = language_registry.register_fake_lsp(
4364 "TypeScript",
4365 FakeLspAdapter {
4366 capabilities: lsp::ServerCapabilities {
4367 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4368 lsp::CodeActionOptions {
4369 resolve_provider: Some(true),
4370 ..lsp::CodeActionOptions::default()
4371 },
4372 )),
4373 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4374 commands: vec!["_the/command".to_string()],
4375 ..lsp::ExecuteCommandOptions::default()
4376 }),
4377 ..lsp::ServerCapabilities::default()
4378 },
4379 ..FakeLspAdapter::default()
4380 },
4381 );
4382
4383 let (buffer, _handle) = project
4384 .update(cx, |p, cx| {
4385 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4386 })
4387 .await
4388 .unwrap();
4389
4390 let fake_server = fake_language_servers.next().await.unwrap();
4391 cx.executor().run_until_parked();
4392
4393 // Language server returns code actions that contain commands, and not edits.
4394 let actions = project.update(cx, |project, cx| {
4395 project.code_actions(&buffer, 0..0, None, cx)
4396 });
4397 fake_server
4398 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4399 Ok(Some(vec![
4400 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4401 title: "The code action".into(),
4402 data: Some(serde_json::json!({
4403 "command": "_the/command",
4404 })),
4405 ..lsp::CodeAction::default()
4406 }),
4407 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4408 title: "two".into(),
4409 ..lsp::CodeAction::default()
4410 }),
4411 ]))
4412 })
4413 .next()
4414 .await;
4415
4416 let action = actions.await.unwrap().unwrap()[0].clone();
4417 let apply = project.update(cx, |project, cx| {
4418 project.apply_code_action(buffer.clone(), action, true, cx)
4419 });
4420
4421 // Resolving the code action does not populate its edits. In absence of
4422 // edits, we must execute the given command.
4423 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4424 |mut action, _| async move {
4425 if action.data.is_some() {
4426 action.command = Some(lsp::Command {
4427 title: "The command".into(),
4428 command: "_the/command".into(),
4429 arguments: Some(vec![json!("the-argument")]),
4430 });
4431 }
4432 Ok(action)
4433 },
4434 );
4435
4436 // While executing the command, the language server sends the editor
4437 // a `workspaceEdit` request.
4438 fake_server
4439 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4440 let fake = fake_server.clone();
4441 move |params, _| {
4442 assert_eq!(params.command, "_the/command");
4443 let fake = fake.clone();
4444 async move {
4445 fake.server
4446 .request::<lsp::request::ApplyWorkspaceEdit>(
4447 lsp::ApplyWorkspaceEditParams {
4448 label: None,
4449 edit: lsp::WorkspaceEdit {
4450 changes: Some(
4451 [(
4452 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4453 vec![lsp::TextEdit {
4454 range: lsp::Range::new(
4455 lsp::Position::new(0, 0),
4456 lsp::Position::new(0, 0),
4457 ),
4458 new_text: "X".into(),
4459 }],
4460 )]
4461 .into_iter()
4462 .collect(),
4463 ),
4464 ..Default::default()
4465 },
4466 },
4467 )
4468 .await
4469 .into_response()
4470 .unwrap();
4471 Ok(Some(json!(null)))
4472 }
4473 }
4474 })
4475 .next()
4476 .await;
4477
4478 // Applying the code action returns a project transaction containing the edits
4479 // sent by the language server in its `workspaceEdit` request.
4480 let transaction = apply.await.unwrap();
4481 assert!(transaction.0.contains_key(&buffer));
4482 buffer.update(cx, |buffer, cx| {
4483 assert_eq!(buffer.text(), "Xa");
4484 buffer.undo(cx);
4485 assert_eq!(buffer.text(), "a");
4486 });
4487}
4488
4489#[gpui::test]
4490async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4491 init_test(cx);
4492 let fs = FakeFs::new(cx.background_executor.clone());
4493 let expected_contents = "content";
4494 fs.as_fake()
4495 .insert_tree(
4496 "/root",
4497 json!({
4498 "test.txt": expected_contents
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4504
4505 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4506 let worktree = project.worktrees(cx).next().unwrap();
4507 let entry_id = worktree
4508 .read(cx)
4509 .entry_for_path(rel_path("test.txt"))
4510 .unwrap()
4511 .id;
4512 (worktree, entry_id)
4513 });
4514 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4515 let _result = project
4516 .update(cx, |project, cx| {
4517 project.rename_entry(
4518 entry_id,
4519 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4520 cx,
4521 )
4522 })
4523 .await
4524 .unwrap();
4525 worktree.read_with(cx, |worktree, _| {
4526 assert!(
4527 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4528 "Old file should have been removed"
4529 );
4530 assert!(
4531 worktree
4532 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4533 .is_some(),
4534 "Whole directory hierarchy and the new file should have been created"
4535 );
4536 });
4537 assert_eq!(
4538 worktree
4539 .update(cx, |worktree, cx| {
4540 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4541 })
4542 .await
4543 .unwrap()
4544 .text,
4545 expected_contents,
4546 "Moved file's contents should be preserved"
4547 );
4548
4549 let entry_id = worktree.read_with(cx, |worktree, _| {
4550 worktree
4551 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4552 .unwrap()
4553 .id
4554 });
4555
4556 let _result = project
4557 .update(cx, |project, cx| {
4558 project.rename_entry(
4559 entry_id,
4560 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4561 cx,
4562 )
4563 })
4564 .await
4565 .unwrap();
4566 worktree.read_with(cx, |worktree, _| {
4567 assert!(
4568 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4569 "First file should not reappear"
4570 );
4571 assert!(
4572 worktree
4573 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4574 .is_none(),
4575 "Old file should have been removed"
4576 );
4577 assert!(
4578 worktree
4579 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4580 .is_some(),
4581 "No error should have occurred after moving into existing directory"
4582 );
4583 });
4584 assert_eq!(
4585 worktree
4586 .update(cx, |worktree, cx| {
4587 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4588 })
4589 .await
4590 .unwrap()
4591 .text,
4592 expected_contents,
4593 "Moved file's contents should be preserved"
4594 );
4595}
4596
4597#[gpui::test(iterations = 10)]
4598async fn test_save_file(cx: &mut gpui::TestAppContext) {
4599 init_test(cx);
4600
4601 let fs = FakeFs::new(cx.executor());
4602 fs.insert_tree(
4603 path!("/dir"),
4604 json!({
4605 "file1": "the old contents",
4606 }),
4607 )
4608 .await;
4609
4610 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4611 let buffer = project
4612 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4613 .await
4614 .unwrap();
4615 buffer.update(cx, |buffer, cx| {
4616 assert_eq!(buffer.text(), "the old contents");
4617 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4618 });
4619
4620 project
4621 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4622 .await
4623 .unwrap();
4624
4625 let new_text = fs
4626 .load(Path::new(path!("/dir/file1")))
4627 .await
4628 .unwrap()
4629 .replace("\r\n", "\n");
4630 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4631}
4632
4633#[gpui::test(iterations = 10)]
4634async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4635 // Issue: #24349
4636 init_test(cx);
4637
4638 let fs = FakeFs::new(cx.executor());
4639 fs.insert_tree(path!("/dir"), json!({})).await;
4640
4641 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4643
4644 language_registry.add(rust_lang());
4645 let mut fake_rust_servers = language_registry.register_fake_lsp(
4646 "Rust",
4647 FakeLspAdapter {
4648 name: "the-rust-language-server",
4649 capabilities: lsp::ServerCapabilities {
4650 completion_provider: Some(lsp::CompletionOptions {
4651 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4652 ..Default::default()
4653 }),
4654 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4655 lsp::TextDocumentSyncOptions {
4656 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4657 ..Default::default()
4658 },
4659 )),
4660 ..Default::default()
4661 },
4662 ..Default::default()
4663 },
4664 );
4665
4666 let buffer = project
4667 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4668 .unwrap()
4669 .await;
4670 project.update(cx, |this, cx| {
4671 this.register_buffer_with_language_servers(&buffer, cx);
4672 buffer.update(cx, |buffer, cx| {
4673 assert!(!this.has_language_servers_for(buffer, cx));
4674 })
4675 });
4676
4677 project
4678 .update(cx, |this, cx| {
4679 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4680 this.save_buffer_as(
4681 buffer.clone(),
4682 ProjectPath {
4683 worktree_id,
4684 path: rel_path("file.rs").into(),
4685 },
4686 cx,
4687 )
4688 })
4689 .await
4690 .unwrap();
4691 // A server is started up, and it is notified about Rust files.
4692 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4693 assert_eq!(
4694 fake_rust_server
4695 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4696 .await
4697 .text_document,
4698 lsp::TextDocumentItem {
4699 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4700 version: 0,
4701 text: "".to_string(),
4702 language_id: "rust".to_string(),
4703 }
4704 );
4705
4706 project.update(cx, |this, cx| {
4707 buffer.update(cx, |buffer, cx| {
4708 assert!(this.has_language_servers_for(buffer, cx));
4709 })
4710 });
4711}
4712
4713#[gpui::test(iterations = 30)]
4714async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4715 init_test(cx);
4716
4717 let fs = FakeFs::new(cx.executor());
4718 fs.insert_tree(
4719 path!("/dir"),
4720 json!({
4721 "file1": "the original contents",
4722 }),
4723 )
4724 .await;
4725
4726 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4727 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4728 let buffer = project
4729 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4730 .await
4731 .unwrap();
4732
4733 // Change the buffer's file on disk, and then wait for the file change
4734 // to be detected by the worktree, so that the buffer starts reloading.
4735 fs.save(
4736 path!("/dir/file1").as_ref(),
4737 &"the first contents".into(),
4738 Default::default(),
4739 )
4740 .await
4741 .unwrap();
4742 worktree.next_event(cx).await;
4743
4744 // Change the buffer's file again. Depending on the random seed, the
4745 // previous file change may still be in progress.
4746 fs.save(
4747 path!("/dir/file1").as_ref(),
4748 &"the second contents".into(),
4749 Default::default(),
4750 )
4751 .await
4752 .unwrap();
4753 worktree.next_event(cx).await;
4754
4755 cx.executor().run_until_parked();
4756 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4757 buffer.read_with(cx, |buffer, _| {
4758 assert_eq!(buffer.text(), on_disk_text);
4759 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4760 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4761 });
4762}
4763
4764#[gpui::test(iterations = 30)]
4765async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4766 init_test(cx);
4767
4768 let fs = FakeFs::new(cx.executor());
4769 fs.insert_tree(
4770 path!("/dir"),
4771 json!({
4772 "file1": "the original contents",
4773 }),
4774 )
4775 .await;
4776
4777 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4778 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4779 let buffer = project
4780 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4781 .await
4782 .unwrap();
4783
4784 // Change the buffer's file on disk, and then wait for the file change
4785 // to be detected by the worktree, so that the buffer starts reloading.
4786 fs.save(
4787 path!("/dir/file1").as_ref(),
4788 &"the first contents".into(),
4789 Default::default(),
4790 )
4791 .await
4792 .unwrap();
4793 worktree.next_event(cx).await;
4794
4795 cx.executor()
4796 .spawn(cx.executor().simulate_random_delay())
4797 .await;
4798
4799 // Perform a noop edit, causing the buffer's version to increase.
4800 buffer.update(cx, |buffer, cx| {
4801 buffer.edit([(0..0, " ")], None, cx);
4802 buffer.undo(cx);
4803 });
4804
4805 cx.executor().run_until_parked();
4806 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4807 buffer.read_with(cx, |buffer, _| {
4808 let buffer_text = buffer.text();
4809 if buffer_text == on_disk_text {
4810 assert!(
4811 !buffer.is_dirty() && !buffer.has_conflict(),
4812 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4813 );
4814 }
4815 // If the file change occurred while the buffer was processing the first
4816 // change, the buffer will be in a conflicting state.
4817 else {
4818 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4819 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4820 }
4821 });
4822}
4823
4824#[gpui::test]
4825async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4826 init_test(cx);
4827
4828 let fs = FakeFs::new(cx.executor());
4829 fs.insert_tree(
4830 path!("/dir"),
4831 json!({
4832 "file1": "the old contents",
4833 }),
4834 )
4835 .await;
4836
4837 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4838 let buffer = project
4839 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4840 .await
4841 .unwrap();
4842 buffer.update(cx, |buffer, cx| {
4843 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4844 });
4845
4846 project
4847 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4848 .await
4849 .unwrap();
4850
4851 let new_text = fs
4852 .load(Path::new(path!("/dir/file1")))
4853 .await
4854 .unwrap()
4855 .replace("\r\n", "\n");
4856 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4857}
4858
4859#[gpui::test]
4860async fn test_save_as(cx: &mut gpui::TestAppContext) {
4861 init_test(cx);
4862
4863 let fs = FakeFs::new(cx.executor());
4864 fs.insert_tree("/dir", json!({})).await;
4865
4866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4867
4868 let languages = project.update(cx, |project, _| project.languages().clone());
4869 languages.add(rust_lang());
4870
4871 let buffer = project.update(cx, |project, cx| {
4872 project.create_local_buffer("", None, false, cx)
4873 });
4874 buffer.update(cx, |buffer, cx| {
4875 buffer.edit([(0..0, "abc")], None, cx);
4876 assert!(buffer.is_dirty());
4877 assert!(!buffer.has_conflict());
4878 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4879 });
4880 project
4881 .update(cx, |project, cx| {
4882 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4883 let path = ProjectPath {
4884 worktree_id,
4885 path: rel_path("file1.rs").into(),
4886 };
4887 project.save_buffer_as(buffer.clone(), path, cx)
4888 })
4889 .await
4890 .unwrap();
4891 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4892
4893 cx.executor().run_until_parked();
4894 buffer.update(cx, |buffer, cx| {
4895 assert_eq!(
4896 buffer.file().unwrap().full_path(cx),
4897 Path::new("dir/file1.rs")
4898 );
4899 assert!(!buffer.is_dirty());
4900 assert!(!buffer.has_conflict());
4901 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4902 });
4903
4904 let opened_buffer = project
4905 .update(cx, |project, cx| {
4906 project.open_local_buffer("/dir/file1.rs", cx)
4907 })
4908 .await
4909 .unwrap();
4910 assert_eq!(opened_buffer, buffer);
4911}
4912
4913#[gpui::test]
4914async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4915 init_test(cx);
4916
4917 let fs = FakeFs::new(cx.executor());
4918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4919
4920 fs.insert_tree(
4921 path!("/dir"),
4922 json!({
4923 "data_a.txt": "data about a"
4924 }),
4925 )
4926 .await;
4927
4928 let buffer = project
4929 .update(cx, |project, cx| {
4930 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4931 })
4932 .await
4933 .unwrap();
4934
4935 buffer.update(cx, |buffer, cx| {
4936 buffer.edit([(11..12, "b")], None, cx);
4937 });
4938
4939 // Save buffer's contents as a new file and confirm that the buffer's now
4940 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4941 // file associated with the buffer has now been updated to `data_b.txt`
4942 project
4943 .update(cx, |project, cx| {
4944 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4945 let new_path = ProjectPath {
4946 worktree_id,
4947 path: rel_path("data_b.txt").into(),
4948 };
4949
4950 project.save_buffer_as(buffer.clone(), new_path, cx)
4951 })
4952 .await
4953 .unwrap();
4954
4955 buffer.update(cx, |buffer, cx| {
4956 assert_eq!(
4957 buffer.file().unwrap().full_path(cx),
4958 Path::new("dir/data_b.txt")
4959 )
4960 });
4961
4962 // Open the original `data_a.txt` file, confirming that its contents are
4963 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4964 let original_buffer = project
4965 .update(cx, |project, cx| {
4966 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4967 })
4968 .await
4969 .unwrap();
4970
4971 original_buffer.update(cx, |buffer, cx| {
4972 assert_eq!(buffer.text(), "data about a");
4973 assert_eq!(
4974 buffer.file().unwrap().full_path(cx),
4975 Path::new("dir/data_a.txt")
4976 )
4977 });
4978}
4979
4980#[gpui::test(retries = 5)]
4981async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4982 use worktree::WorktreeModelHandle as _;
4983
4984 init_test(cx);
4985 cx.executor().allow_parking();
4986
4987 let dir = TempTree::new(json!({
4988 "a": {
4989 "file1": "",
4990 "file2": "",
4991 "file3": "",
4992 },
4993 "b": {
4994 "c": {
4995 "file4": "",
4996 "file5": "",
4997 }
4998 }
4999 }));
5000
5001 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5002
5003 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5004 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5005 async move { buffer.await.unwrap() }
5006 };
5007 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5008 project.update(cx, |project, cx| {
5009 let tree = project.worktrees(cx).next().unwrap();
5010 tree.read(cx)
5011 .entry_for_path(rel_path(path))
5012 .unwrap_or_else(|| panic!("no entry for path {}", path))
5013 .id
5014 })
5015 };
5016
5017 let buffer2 = buffer_for_path("a/file2", cx).await;
5018 let buffer3 = buffer_for_path("a/file3", cx).await;
5019 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5020 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5021
5022 let file2_id = id_for_path("a/file2", cx);
5023 let file3_id = id_for_path("a/file3", cx);
5024 let file4_id = id_for_path("b/c/file4", cx);
5025
5026 // Create a remote copy of this worktree.
5027 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5028 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5029
5030 let updates = Arc::new(Mutex::new(Vec::new()));
5031 tree.update(cx, |tree, cx| {
5032 let updates = updates.clone();
5033 tree.observe_updates(0, cx, move |update| {
5034 updates.lock().push(update);
5035 async { true }
5036 });
5037 });
5038
5039 let remote = cx.update(|cx| {
5040 Worktree::remote(
5041 0,
5042 ReplicaId::REMOTE_SERVER,
5043 metadata,
5044 project.read(cx).client().into(),
5045 project.read(cx).path_style(cx),
5046 cx,
5047 )
5048 });
5049
5050 cx.executor().run_until_parked();
5051
5052 cx.update(|cx| {
5053 assert!(!buffer2.read(cx).is_dirty());
5054 assert!(!buffer3.read(cx).is_dirty());
5055 assert!(!buffer4.read(cx).is_dirty());
5056 assert!(!buffer5.read(cx).is_dirty());
5057 });
5058
5059 // Rename and delete files and directories.
5060 tree.flush_fs_events(cx).await;
5061 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5062 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5063 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5064 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5065 tree.flush_fs_events(cx).await;
5066
5067 cx.update(|app| {
5068 assert_eq!(
5069 tree.read(app).paths().collect::<Vec<_>>(),
5070 vec![
5071 rel_path("a"),
5072 rel_path("a/file1"),
5073 rel_path("a/file2.new"),
5074 rel_path("b"),
5075 rel_path("d"),
5076 rel_path("d/file3"),
5077 rel_path("d/file4"),
5078 ]
5079 );
5080 });
5081
5082 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5083 assert_eq!(id_for_path("d/file3", cx), file3_id);
5084 assert_eq!(id_for_path("d/file4", cx), file4_id);
5085
5086 cx.update(|cx| {
5087 assert_eq!(
5088 buffer2.read(cx).file().unwrap().path().as_ref(),
5089 rel_path("a/file2.new")
5090 );
5091 assert_eq!(
5092 buffer3.read(cx).file().unwrap().path().as_ref(),
5093 rel_path("d/file3")
5094 );
5095 assert_eq!(
5096 buffer4.read(cx).file().unwrap().path().as_ref(),
5097 rel_path("d/file4")
5098 );
5099 assert_eq!(
5100 buffer5.read(cx).file().unwrap().path().as_ref(),
5101 rel_path("b/c/file5")
5102 );
5103
5104 assert_matches!(
5105 buffer2.read(cx).file().unwrap().disk_state(),
5106 DiskState::Present { .. }
5107 );
5108 assert_matches!(
5109 buffer3.read(cx).file().unwrap().disk_state(),
5110 DiskState::Present { .. }
5111 );
5112 assert_matches!(
5113 buffer4.read(cx).file().unwrap().disk_state(),
5114 DiskState::Present { .. }
5115 );
5116 assert_eq!(
5117 buffer5.read(cx).file().unwrap().disk_state(),
5118 DiskState::Deleted
5119 );
5120 });
5121
5122 // Update the remote worktree. Check that it becomes consistent with the
5123 // local worktree.
5124 cx.executor().run_until_parked();
5125
5126 remote.update(cx, |remote, _| {
5127 for update in updates.lock().drain(..) {
5128 remote.as_remote_mut().unwrap().update_from_remote(update);
5129 }
5130 });
5131 cx.executor().run_until_parked();
5132 remote.update(cx, |remote, _| {
5133 assert_eq!(
5134 remote.paths().collect::<Vec<_>>(),
5135 vec![
5136 rel_path("a"),
5137 rel_path("a/file1"),
5138 rel_path("a/file2.new"),
5139 rel_path("b"),
5140 rel_path("d"),
5141 rel_path("d/file3"),
5142 rel_path("d/file4"),
5143 ]
5144 );
5145 });
5146}
5147
5148#[gpui::test(iterations = 10)]
5149async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5150 init_test(cx);
5151
5152 let fs = FakeFs::new(cx.executor());
5153 fs.insert_tree(
5154 path!("/dir"),
5155 json!({
5156 "a": {
5157 "file1": "",
5158 }
5159 }),
5160 )
5161 .await;
5162
5163 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5164 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5165 let tree_id = tree.update(cx, |tree, _| tree.id());
5166
5167 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5168 project.update(cx, |project, cx| {
5169 let tree = project.worktrees(cx).next().unwrap();
5170 tree.read(cx)
5171 .entry_for_path(rel_path(path))
5172 .unwrap_or_else(|| panic!("no entry for path {}", path))
5173 .id
5174 })
5175 };
5176
5177 let dir_id = id_for_path("a", cx);
5178 let file_id = id_for_path("a/file1", cx);
5179 let buffer = project
5180 .update(cx, |p, cx| {
5181 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5182 })
5183 .await
5184 .unwrap();
5185 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5186
5187 project
5188 .update(cx, |project, cx| {
5189 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5190 })
5191 .unwrap()
5192 .await
5193 .into_included()
5194 .unwrap();
5195 cx.executor().run_until_parked();
5196
5197 assert_eq!(id_for_path("b", cx), dir_id);
5198 assert_eq!(id_for_path("b/file1", cx), file_id);
5199 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5200}
5201
5202#[gpui::test]
5203async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5204 init_test(cx);
5205
5206 let fs = FakeFs::new(cx.executor());
5207 fs.insert_tree(
5208 "/dir",
5209 json!({
5210 "a.txt": "a-contents",
5211 "b.txt": "b-contents",
5212 }),
5213 )
5214 .await;
5215
5216 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5217
5218 // Spawn multiple tasks to open paths, repeating some paths.
5219 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5220 (
5221 p.open_local_buffer("/dir/a.txt", cx),
5222 p.open_local_buffer("/dir/b.txt", cx),
5223 p.open_local_buffer("/dir/a.txt", cx),
5224 )
5225 });
5226
5227 let buffer_a_1 = buffer_a_1.await.unwrap();
5228 let buffer_a_2 = buffer_a_2.await.unwrap();
5229 let buffer_b = buffer_b.await.unwrap();
5230 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5231 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5232
5233 // There is only one buffer per path.
5234 let buffer_a_id = buffer_a_1.entity_id();
5235 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5236
5237 // Open the same path again while it is still open.
5238 drop(buffer_a_1);
5239 let buffer_a_3 = project
5240 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5241 .await
5242 .unwrap();
5243
5244 // There's still only one buffer per path.
5245 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5246}
5247
5248#[gpui::test]
5249async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5250 init_test(cx);
5251
5252 let fs = FakeFs::new(cx.executor());
5253 fs.insert_tree(
5254 path!("/dir"),
5255 json!({
5256 "file1": "abc",
5257 "file2": "def",
5258 "file3": "ghi",
5259 }),
5260 )
5261 .await;
5262
5263 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5264
5265 let buffer1 = project
5266 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5267 .await
5268 .unwrap();
5269 let events = Arc::new(Mutex::new(Vec::new()));
5270
5271 // initially, the buffer isn't dirty.
5272 buffer1.update(cx, |buffer, cx| {
5273 cx.subscribe(&buffer1, {
5274 let events = events.clone();
5275 move |_, _, event, _| match event {
5276 BufferEvent::Operation { .. } => {}
5277 _ => events.lock().push(event.clone()),
5278 }
5279 })
5280 .detach();
5281
5282 assert!(!buffer.is_dirty());
5283 assert!(events.lock().is_empty());
5284
5285 buffer.edit([(1..2, "")], None, cx);
5286 });
5287
5288 // after the first edit, the buffer is dirty, and emits a dirtied event.
5289 buffer1.update(cx, |buffer, cx| {
5290 assert!(buffer.text() == "ac");
5291 assert!(buffer.is_dirty());
5292 assert_eq!(
5293 *events.lock(),
5294 &[
5295 language::BufferEvent::Edited,
5296 language::BufferEvent::DirtyChanged
5297 ]
5298 );
5299 events.lock().clear();
5300 buffer.did_save(
5301 buffer.version(),
5302 buffer.file().unwrap().disk_state().mtime(),
5303 cx,
5304 );
5305 });
5306
5307 // after saving, the buffer is not dirty, and emits a saved event.
5308 buffer1.update(cx, |buffer, cx| {
5309 assert!(!buffer.is_dirty());
5310 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5311 events.lock().clear();
5312
5313 buffer.edit([(1..1, "B")], None, cx);
5314 buffer.edit([(2..2, "D")], None, cx);
5315 });
5316
5317 // after editing again, the buffer is dirty, and emits another dirty event.
5318 buffer1.update(cx, |buffer, cx| {
5319 assert!(buffer.text() == "aBDc");
5320 assert!(buffer.is_dirty());
5321 assert_eq!(
5322 *events.lock(),
5323 &[
5324 language::BufferEvent::Edited,
5325 language::BufferEvent::DirtyChanged,
5326 language::BufferEvent::Edited,
5327 ],
5328 );
5329 events.lock().clear();
5330
5331 // After restoring the buffer to its previously-saved state,
5332 // the buffer is not considered dirty anymore.
5333 buffer.edit([(1..3, "")], None, cx);
5334 assert!(buffer.text() == "ac");
5335 assert!(!buffer.is_dirty());
5336 });
5337
5338 assert_eq!(
5339 *events.lock(),
5340 &[
5341 language::BufferEvent::Edited,
5342 language::BufferEvent::DirtyChanged
5343 ]
5344 );
5345
5346 // When a file is deleted, it is not considered dirty.
5347 let events = Arc::new(Mutex::new(Vec::new()));
5348 let buffer2 = project
5349 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5350 .await
5351 .unwrap();
5352 buffer2.update(cx, |_, cx| {
5353 cx.subscribe(&buffer2, {
5354 let events = events.clone();
5355 move |_, _, event, _| match event {
5356 BufferEvent::Operation { .. } => {}
5357 _ => events.lock().push(event.clone()),
5358 }
5359 })
5360 .detach();
5361 });
5362
5363 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5364 .await
5365 .unwrap();
5366 cx.executor().run_until_parked();
5367 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5368 assert_eq!(
5369 mem::take(&mut *events.lock()),
5370 &[language::BufferEvent::FileHandleChanged]
5371 );
5372
5373 // Buffer becomes dirty when edited.
5374 buffer2.update(cx, |buffer, cx| {
5375 buffer.edit([(2..3, "")], None, cx);
5376 assert_eq!(buffer.is_dirty(), true);
5377 });
5378 assert_eq!(
5379 mem::take(&mut *events.lock()),
5380 &[
5381 language::BufferEvent::Edited,
5382 language::BufferEvent::DirtyChanged
5383 ]
5384 );
5385
5386 // Buffer becomes clean again when all of its content is removed, because
5387 // the file was deleted.
5388 buffer2.update(cx, |buffer, cx| {
5389 buffer.edit([(0..2, "")], None, cx);
5390 assert_eq!(buffer.is_empty(), true);
5391 assert_eq!(buffer.is_dirty(), false);
5392 });
5393 assert_eq!(
5394 *events.lock(),
5395 &[
5396 language::BufferEvent::Edited,
5397 language::BufferEvent::DirtyChanged
5398 ]
5399 );
5400
5401 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5402 let events = Arc::new(Mutex::new(Vec::new()));
5403 let buffer3 = project
5404 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5405 .await
5406 .unwrap();
5407 buffer3.update(cx, |_, cx| {
5408 cx.subscribe(&buffer3, {
5409 let events = events.clone();
5410 move |_, _, event, _| match event {
5411 BufferEvent::Operation { .. } => {}
5412 _ => events.lock().push(event.clone()),
5413 }
5414 })
5415 .detach();
5416 });
5417
5418 buffer3.update(cx, |buffer, cx| {
5419 buffer.edit([(0..0, "x")], None, cx);
5420 });
5421 events.lock().clear();
5422 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5423 .await
5424 .unwrap();
5425 cx.executor().run_until_parked();
5426 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5427 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5428}
5429
5430#[gpui::test]
5431async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5432 init_test(cx);
5433
5434 let (initial_contents, initial_offsets) =
5435 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5436 let fs = FakeFs::new(cx.executor());
5437 fs.insert_tree(
5438 path!("/dir"),
5439 json!({
5440 "the-file": initial_contents,
5441 }),
5442 )
5443 .await;
5444 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5445 let buffer = project
5446 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5447 .await
5448 .unwrap();
5449
5450 let anchors = initial_offsets
5451 .iter()
5452 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5453 .collect::<Vec<_>>();
5454
5455 // Change the file on disk, adding two new lines of text, and removing
5456 // one line.
5457 buffer.update(cx, |buffer, _| {
5458 assert!(!buffer.is_dirty());
5459 assert!(!buffer.has_conflict());
5460 });
5461
5462 let (new_contents, new_offsets) =
5463 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5464 fs.save(
5465 path!("/dir/the-file").as_ref(),
5466 &new_contents.as_str().into(),
5467 LineEnding::Unix,
5468 )
5469 .await
5470 .unwrap();
5471
5472 // Because the buffer was not modified, it is reloaded from disk. Its
5473 // contents are edited according to the diff between the old and new
5474 // file contents.
5475 cx.executor().run_until_parked();
5476 buffer.update(cx, |buffer, _| {
5477 assert_eq!(buffer.text(), new_contents);
5478 assert!(!buffer.is_dirty());
5479 assert!(!buffer.has_conflict());
5480
5481 let anchor_offsets = anchors
5482 .iter()
5483 .map(|anchor| anchor.to_offset(&*buffer))
5484 .collect::<Vec<_>>();
5485 assert_eq!(anchor_offsets, new_offsets);
5486 });
5487
5488 // Modify the buffer
5489 buffer.update(cx, |buffer, cx| {
5490 buffer.edit([(0..0, " ")], None, cx);
5491 assert!(buffer.is_dirty());
5492 assert!(!buffer.has_conflict());
5493 });
5494
5495 // Change the file on disk again, adding blank lines to the beginning.
5496 fs.save(
5497 path!("/dir/the-file").as_ref(),
5498 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5499 LineEnding::Unix,
5500 )
5501 .await
5502 .unwrap();
5503
5504 // Because the buffer is modified, it doesn't reload from disk, but is
5505 // marked as having a conflict.
5506 cx.executor().run_until_parked();
5507 buffer.update(cx, |buffer, _| {
5508 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5509 assert!(buffer.has_conflict());
5510 });
5511}
5512
5513#[gpui::test]
5514async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5515 init_test(cx);
5516
5517 let fs = FakeFs::new(cx.executor());
5518 fs.insert_tree(
5519 path!("/dir"),
5520 json!({
5521 "file1": "a\nb\nc\n",
5522 "file2": "one\r\ntwo\r\nthree\r\n",
5523 }),
5524 )
5525 .await;
5526
5527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5528 let buffer1 = project
5529 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5530 .await
5531 .unwrap();
5532 let buffer2 = project
5533 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5534 .await
5535 .unwrap();
5536
5537 buffer1.update(cx, |buffer, _| {
5538 assert_eq!(buffer.text(), "a\nb\nc\n");
5539 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5540 });
5541 buffer2.update(cx, |buffer, _| {
5542 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5543 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5544 });
5545
5546 // Change a file's line endings on disk from unix to windows. The buffer's
5547 // state updates correctly.
5548 fs.save(
5549 path!("/dir/file1").as_ref(),
5550 &"aaa\nb\nc\n".into(),
5551 LineEnding::Windows,
5552 )
5553 .await
5554 .unwrap();
5555 cx.executor().run_until_parked();
5556 buffer1.update(cx, |buffer, _| {
5557 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5558 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5559 });
5560
5561 // Save a file with windows line endings. The file is written correctly.
5562 buffer2.update(cx, |buffer, cx| {
5563 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5564 });
5565 project
5566 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5567 .await
5568 .unwrap();
5569 assert_eq!(
5570 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5571 "one\r\ntwo\r\nthree\r\nfour\r\n",
5572 );
5573}
5574
5575#[gpui::test]
5576async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5577 init_test(cx);
5578
5579 let fs = FakeFs::new(cx.executor());
5580 fs.insert_tree(
5581 path!("/dir"),
5582 json!({
5583 "a.rs": "
5584 fn foo(mut v: Vec<usize>) {
5585 for x in &v {
5586 v.push(1);
5587 }
5588 }
5589 "
5590 .unindent(),
5591 }),
5592 )
5593 .await;
5594
5595 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5596 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5597 let buffer = project
5598 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5599 .await
5600 .unwrap();
5601
5602 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5603 let message = lsp::PublishDiagnosticsParams {
5604 uri: buffer_uri.clone(),
5605 diagnostics: vec![
5606 lsp::Diagnostic {
5607 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5608 severity: Some(DiagnosticSeverity::WARNING),
5609 message: "error 1".to_string(),
5610 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5611 location: lsp::Location {
5612 uri: buffer_uri.clone(),
5613 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5614 },
5615 message: "error 1 hint 1".to_string(),
5616 }]),
5617 ..Default::default()
5618 },
5619 lsp::Diagnostic {
5620 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5621 severity: Some(DiagnosticSeverity::HINT),
5622 message: "error 1 hint 1".to_string(),
5623 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5624 location: lsp::Location {
5625 uri: buffer_uri.clone(),
5626 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5627 },
5628 message: "original diagnostic".to_string(),
5629 }]),
5630 ..Default::default()
5631 },
5632 lsp::Diagnostic {
5633 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5634 severity: Some(DiagnosticSeverity::ERROR),
5635 message: "error 2".to_string(),
5636 related_information: Some(vec![
5637 lsp::DiagnosticRelatedInformation {
5638 location: lsp::Location {
5639 uri: buffer_uri.clone(),
5640 range: lsp::Range::new(
5641 lsp::Position::new(1, 13),
5642 lsp::Position::new(1, 15),
5643 ),
5644 },
5645 message: "error 2 hint 1".to_string(),
5646 },
5647 lsp::DiagnosticRelatedInformation {
5648 location: lsp::Location {
5649 uri: buffer_uri.clone(),
5650 range: lsp::Range::new(
5651 lsp::Position::new(1, 13),
5652 lsp::Position::new(1, 15),
5653 ),
5654 },
5655 message: "error 2 hint 2".to_string(),
5656 },
5657 ]),
5658 ..Default::default()
5659 },
5660 lsp::Diagnostic {
5661 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5662 severity: Some(DiagnosticSeverity::HINT),
5663 message: "error 2 hint 1".to_string(),
5664 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5665 location: lsp::Location {
5666 uri: buffer_uri.clone(),
5667 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5668 },
5669 message: "original diagnostic".to_string(),
5670 }]),
5671 ..Default::default()
5672 },
5673 lsp::Diagnostic {
5674 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5675 severity: Some(DiagnosticSeverity::HINT),
5676 message: "error 2 hint 2".to_string(),
5677 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5678 location: lsp::Location {
5679 uri: buffer_uri,
5680 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5681 },
5682 message: "original diagnostic".to_string(),
5683 }]),
5684 ..Default::default()
5685 },
5686 ],
5687 version: None,
5688 };
5689
5690 lsp_store
5691 .update(cx, |lsp_store, cx| {
5692 lsp_store.update_diagnostics(
5693 LanguageServerId(0),
5694 message,
5695 None,
5696 DiagnosticSourceKind::Pushed,
5697 &[],
5698 cx,
5699 )
5700 })
5701 .unwrap();
5702 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5703
5704 assert_eq!(
5705 buffer
5706 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5707 .collect::<Vec<_>>(),
5708 &[
5709 DiagnosticEntry {
5710 range: Point::new(1, 8)..Point::new(1, 9),
5711 diagnostic: Diagnostic {
5712 severity: DiagnosticSeverity::WARNING,
5713 message: "error 1".to_string(),
5714 group_id: 1,
5715 is_primary: true,
5716 source_kind: DiagnosticSourceKind::Pushed,
5717 ..Diagnostic::default()
5718 }
5719 },
5720 DiagnosticEntry {
5721 range: Point::new(1, 8)..Point::new(1, 9),
5722 diagnostic: Diagnostic {
5723 severity: DiagnosticSeverity::HINT,
5724 message: "error 1 hint 1".to_string(),
5725 group_id: 1,
5726 is_primary: false,
5727 source_kind: DiagnosticSourceKind::Pushed,
5728 ..Diagnostic::default()
5729 }
5730 },
5731 DiagnosticEntry {
5732 range: Point::new(1, 13)..Point::new(1, 15),
5733 diagnostic: Diagnostic {
5734 severity: DiagnosticSeverity::HINT,
5735 message: "error 2 hint 1".to_string(),
5736 group_id: 0,
5737 is_primary: false,
5738 source_kind: DiagnosticSourceKind::Pushed,
5739 ..Diagnostic::default()
5740 }
5741 },
5742 DiagnosticEntry {
5743 range: Point::new(1, 13)..Point::new(1, 15),
5744 diagnostic: Diagnostic {
5745 severity: DiagnosticSeverity::HINT,
5746 message: "error 2 hint 2".to_string(),
5747 group_id: 0,
5748 is_primary: false,
5749 source_kind: DiagnosticSourceKind::Pushed,
5750 ..Diagnostic::default()
5751 }
5752 },
5753 DiagnosticEntry {
5754 range: Point::new(2, 8)..Point::new(2, 17),
5755 diagnostic: Diagnostic {
5756 severity: DiagnosticSeverity::ERROR,
5757 message: "error 2".to_string(),
5758 group_id: 0,
5759 is_primary: true,
5760 source_kind: DiagnosticSourceKind::Pushed,
5761 ..Diagnostic::default()
5762 }
5763 }
5764 ]
5765 );
5766
5767 assert_eq!(
5768 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5769 &[
5770 DiagnosticEntry {
5771 range: Point::new(1, 13)..Point::new(1, 15),
5772 diagnostic: Diagnostic {
5773 severity: DiagnosticSeverity::HINT,
5774 message: "error 2 hint 1".to_string(),
5775 group_id: 0,
5776 is_primary: false,
5777 source_kind: DiagnosticSourceKind::Pushed,
5778 ..Diagnostic::default()
5779 }
5780 },
5781 DiagnosticEntry {
5782 range: Point::new(1, 13)..Point::new(1, 15),
5783 diagnostic: Diagnostic {
5784 severity: DiagnosticSeverity::HINT,
5785 message: "error 2 hint 2".to_string(),
5786 group_id: 0,
5787 is_primary: false,
5788 source_kind: DiagnosticSourceKind::Pushed,
5789 ..Diagnostic::default()
5790 }
5791 },
5792 DiagnosticEntry {
5793 range: Point::new(2, 8)..Point::new(2, 17),
5794 diagnostic: Diagnostic {
5795 severity: DiagnosticSeverity::ERROR,
5796 message: "error 2".to_string(),
5797 group_id: 0,
5798 is_primary: true,
5799 source_kind: DiagnosticSourceKind::Pushed,
5800 ..Diagnostic::default()
5801 }
5802 }
5803 ]
5804 );
5805
5806 assert_eq!(
5807 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5808 &[
5809 DiagnosticEntry {
5810 range: Point::new(1, 8)..Point::new(1, 9),
5811 diagnostic: Diagnostic {
5812 severity: DiagnosticSeverity::WARNING,
5813 message: "error 1".to_string(),
5814 group_id: 1,
5815 is_primary: true,
5816 source_kind: DiagnosticSourceKind::Pushed,
5817 ..Diagnostic::default()
5818 }
5819 },
5820 DiagnosticEntry {
5821 range: Point::new(1, 8)..Point::new(1, 9),
5822 diagnostic: Diagnostic {
5823 severity: DiagnosticSeverity::HINT,
5824 message: "error 1 hint 1".to_string(),
5825 group_id: 1,
5826 is_primary: false,
5827 source_kind: DiagnosticSourceKind::Pushed,
5828 ..Diagnostic::default()
5829 }
5830 },
5831 ]
5832 );
5833}
5834
5835#[gpui::test]
5836async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5837 init_test(cx);
5838
5839 let fs = FakeFs::new(cx.executor());
5840 fs.insert_tree(
5841 path!("/dir"),
5842 json!({
5843 "one.rs": "const ONE: usize = 1;",
5844 "two": {
5845 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5846 }
5847
5848 }),
5849 )
5850 .await;
5851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5852
5853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5854 language_registry.add(rust_lang());
5855 let watched_paths = lsp::FileOperationRegistrationOptions {
5856 filters: vec![
5857 FileOperationFilter {
5858 scheme: Some("file".to_owned()),
5859 pattern: lsp::FileOperationPattern {
5860 glob: "**/*.rs".to_owned(),
5861 matches: Some(lsp::FileOperationPatternKind::File),
5862 options: None,
5863 },
5864 },
5865 FileOperationFilter {
5866 scheme: Some("file".to_owned()),
5867 pattern: lsp::FileOperationPattern {
5868 glob: "**/**".to_owned(),
5869 matches: Some(lsp::FileOperationPatternKind::Folder),
5870 options: None,
5871 },
5872 },
5873 ],
5874 };
5875 let mut fake_servers = language_registry.register_fake_lsp(
5876 "Rust",
5877 FakeLspAdapter {
5878 capabilities: lsp::ServerCapabilities {
5879 workspace: Some(lsp::WorkspaceServerCapabilities {
5880 workspace_folders: None,
5881 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5882 did_rename: Some(watched_paths.clone()),
5883 will_rename: Some(watched_paths),
5884 ..Default::default()
5885 }),
5886 }),
5887 ..Default::default()
5888 },
5889 ..Default::default()
5890 },
5891 );
5892
5893 let _ = project
5894 .update(cx, |project, cx| {
5895 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5896 })
5897 .await
5898 .unwrap();
5899
5900 let fake_server = fake_servers.next().await.unwrap();
5901 cx.executor().run_until_parked();
5902 let response = project.update(cx, |project, cx| {
5903 let worktree = project.worktrees(cx).next().unwrap();
5904 let entry = worktree
5905 .read(cx)
5906 .entry_for_path(rel_path("one.rs"))
5907 .unwrap();
5908 project.rename_entry(
5909 entry.id,
5910 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5911 cx,
5912 )
5913 });
5914 let expected_edit = lsp::WorkspaceEdit {
5915 changes: None,
5916 document_changes: Some(DocumentChanges::Edits({
5917 vec![TextDocumentEdit {
5918 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5919 range: lsp::Range {
5920 start: lsp::Position {
5921 line: 0,
5922 character: 1,
5923 },
5924 end: lsp::Position {
5925 line: 0,
5926 character: 3,
5927 },
5928 },
5929 new_text: "This is not a drill".to_owned(),
5930 })],
5931 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5932 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5933 version: Some(1337),
5934 },
5935 }]
5936 })),
5937 change_annotations: None,
5938 };
5939 let resolved_workspace_edit = Arc::new(OnceLock::new());
5940 fake_server
5941 .set_request_handler::<WillRenameFiles, _, _>({
5942 let resolved_workspace_edit = resolved_workspace_edit.clone();
5943 let expected_edit = expected_edit.clone();
5944 move |params, _| {
5945 let resolved_workspace_edit = resolved_workspace_edit.clone();
5946 let expected_edit = expected_edit.clone();
5947 async move {
5948 assert_eq!(params.files.len(), 1);
5949 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5950 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5951 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5952 Ok(Some(expected_edit))
5953 }
5954 }
5955 })
5956 .next()
5957 .await
5958 .unwrap();
5959 let _ = response.await.unwrap();
5960 fake_server
5961 .handle_notification::<DidRenameFiles, _>(|params, _| {
5962 assert_eq!(params.files.len(), 1);
5963 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5964 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5965 })
5966 .next()
5967 .await
5968 .unwrap();
5969 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5970}
5971
5972#[gpui::test]
5973async fn test_rename(cx: &mut gpui::TestAppContext) {
5974 // hi
5975 init_test(cx);
5976
5977 let fs = FakeFs::new(cx.executor());
5978 fs.insert_tree(
5979 path!("/dir"),
5980 json!({
5981 "one.rs": "const ONE: usize = 1;",
5982 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5983 }),
5984 )
5985 .await;
5986
5987 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5988
5989 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5990 language_registry.add(rust_lang());
5991 let mut fake_servers = language_registry.register_fake_lsp(
5992 "Rust",
5993 FakeLspAdapter {
5994 capabilities: lsp::ServerCapabilities {
5995 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5996 prepare_provider: Some(true),
5997 work_done_progress_options: Default::default(),
5998 })),
5999 ..Default::default()
6000 },
6001 ..Default::default()
6002 },
6003 );
6004
6005 let (buffer, _handle) = project
6006 .update(cx, |project, cx| {
6007 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6008 })
6009 .await
6010 .unwrap();
6011
6012 let fake_server = fake_servers.next().await.unwrap();
6013 cx.executor().run_until_parked();
6014
6015 let response = project.update(cx, |project, cx| {
6016 project.prepare_rename(buffer.clone(), 7, cx)
6017 });
6018 fake_server
6019 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6020 assert_eq!(
6021 params.text_document.uri.as_str(),
6022 uri!("file:///dir/one.rs")
6023 );
6024 assert_eq!(params.position, lsp::Position::new(0, 7));
6025 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6026 lsp::Position::new(0, 6),
6027 lsp::Position::new(0, 9),
6028 ))))
6029 })
6030 .next()
6031 .await
6032 .unwrap();
6033 let response = response.await.unwrap();
6034 let PrepareRenameResponse::Success(range) = response else {
6035 panic!("{:?}", response);
6036 };
6037 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6038 assert_eq!(range, 6..9);
6039
6040 let response = project.update(cx, |project, cx| {
6041 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6042 });
6043 fake_server
6044 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6045 assert_eq!(
6046 params.text_document_position.text_document.uri.as_str(),
6047 uri!("file:///dir/one.rs")
6048 );
6049 assert_eq!(
6050 params.text_document_position.position,
6051 lsp::Position::new(0, 7)
6052 );
6053 assert_eq!(params.new_name, "THREE");
6054 Ok(Some(lsp::WorkspaceEdit {
6055 changes: Some(
6056 [
6057 (
6058 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6059 vec![lsp::TextEdit::new(
6060 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6061 "THREE".to_string(),
6062 )],
6063 ),
6064 (
6065 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6066 vec![
6067 lsp::TextEdit::new(
6068 lsp::Range::new(
6069 lsp::Position::new(0, 24),
6070 lsp::Position::new(0, 27),
6071 ),
6072 "THREE".to_string(),
6073 ),
6074 lsp::TextEdit::new(
6075 lsp::Range::new(
6076 lsp::Position::new(0, 35),
6077 lsp::Position::new(0, 38),
6078 ),
6079 "THREE".to_string(),
6080 ),
6081 ],
6082 ),
6083 ]
6084 .into_iter()
6085 .collect(),
6086 ),
6087 ..Default::default()
6088 }))
6089 })
6090 .next()
6091 .await
6092 .unwrap();
6093 let mut transaction = response.await.unwrap().0;
6094 assert_eq!(transaction.len(), 2);
6095 assert_eq!(
6096 transaction
6097 .remove_entry(&buffer)
6098 .unwrap()
6099 .0
6100 .update(cx, |buffer, _| buffer.text()),
6101 "const THREE: usize = 1;"
6102 );
6103 assert_eq!(
6104 transaction
6105 .into_keys()
6106 .next()
6107 .unwrap()
6108 .update(cx, |buffer, _| buffer.text()),
6109 "const TWO: usize = one::THREE + one::THREE;"
6110 );
6111}
6112
6113#[gpui::test]
6114async fn test_search(cx: &mut gpui::TestAppContext) {
6115 init_test(cx);
6116
6117 let fs = FakeFs::new(cx.executor());
6118 fs.insert_tree(
6119 path!("/dir"),
6120 json!({
6121 "one.rs": "const ONE: usize = 1;",
6122 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6123 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6124 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6125 }),
6126 )
6127 .await;
6128 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6129 assert_eq!(
6130 search(
6131 &project,
6132 SearchQuery::text(
6133 "TWO",
6134 false,
6135 true,
6136 false,
6137 Default::default(),
6138 Default::default(),
6139 false,
6140 None
6141 )
6142 .unwrap(),
6143 cx
6144 )
6145 .await
6146 .unwrap(),
6147 HashMap::from_iter([
6148 (path!("dir/two.rs").to_string(), vec![6..9]),
6149 (path!("dir/three.rs").to_string(), vec![37..40])
6150 ])
6151 );
6152
6153 let buffer_4 = project
6154 .update(cx, |project, cx| {
6155 project.open_local_buffer(path!("/dir/four.rs"), cx)
6156 })
6157 .await
6158 .unwrap();
6159 buffer_4.update(cx, |buffer, cx| {
6160 let text = "two::TWO";
6161 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6162 });
6163
6164 assert_eq!(
6165 search(
6166 &project,
6167 SearchQuery::text(
6168 "TWO",
6169 false,
6170 true,
6171 false,
6172 Default::default(),
6173 Default::default(),
6174 false,
6175 None,
6176 )
6177 .unwrap(),
6178 cx
6179 )
6180 .await
6181 .unwrap(),
6182 HashMap::from_iter([
6183 (path!("dir/two.rs").to_string(), vec![6..9]),
6184 (path!("dir/three.rs").to_string(), vec![37..40]),
6185 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6186 ])
6187 );
6188}
6189
6190#[gpui::test]
6191async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6192 init_test(cx);
6193
6194 let search_query = "file";
6195
6196 let fs = FakeFs::new(cx.executor());
6197 fs.insert_tree(
6198 path!("/dir"),
6199 json!({
6200 "one.rs": r#"// Rust file one"#,
6201 "one.ts": r#"// TypeScript file one"#,
6202 "two.rs": r#"// Rust file two"#,
6203 "two.ts": r#"// TypeScript file two"#,
6204 }),
6205 )
6206 .await;
6207 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6208
6209 assert!(
6210 search(
6211 &project,
6212 SearchQuery::text(
6213 search_query,
6214 false,
6215 true,
6216 false,
6217 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6218 Default::default(),
6219 false,
6220 None
6221 )
6222 .unwrap(),
6223 cx
6224 )
6225 .await
6226 .unwrap()
6227 .is_empty(),
6228 "If no inclusions match, no files should be returned"
6229 );
6230
6231 assert_eq!(
6232 search(
6233 &project,
6234 SearchQuery::text(
6235 search_query,
6236 false,
6237 true,
6238 false,
6239 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6240 Default::default(),
6241 false,
6242 None
6243 )
6244 .unwrap(),
6245 cx
6246 )
6247 .await
6248 .unwrap(),
6249 HashMap::from_iter([
6250 (path!("dir/one.rs").to_string(), vec![8..12]),
6251 (path!("dir/two.rs").to_string(), vec![8..12]),
6252 ]),
6253 "Rust only search should give only Rust files"
6254 );
6255
6256 assert_eq!(
6257 search(
6258 &project,
6259 SearchQuery::text(
6260 search_query,
6261 false,
6262 true,
6263 false,
6264 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6265 .unwrap(),
6266 Default::default(),
6267 false,
6268 None,
6269 )
6270 .unwrap(),
6271 cx
6272 )
6273 .await
6274 .unwrap(),
6275 HashMap::from_iter([
6276 (path!("dir/one.ts").to_string(), vec![14..18]),
6277 (path!("dir/two.ts").to_string(), vec![14..18]),
6278 ]),
6279 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6280 );
6281
6282 assert_eq!(
6283 search(
6284 &project,
6285 SearchQuery::text(
6286 search_query,
6287 false,
6288 true,
6289 false,
6290 PathMatcher::new(
6291 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6292 PathStyle::local()
6293 )
6294 .unwrap(),
6295 Default::default(),
6296 false,
6297 None,
6298 )
6299 .unwrap(),
6300 cx
6301 )
6302 .await
6303 .unwrap(),
6304 HashMap::from_iter([
6305 (path!("dir/two.ts").to_string(), vec![14..18]),
6306 (path!("dir/one.rs").to_string(), vec![8..12]),
6307 (path!("dir/one.ts").to_string(), vec![14..18]),
6308 (path!("dir/two.rs").to_string(), vec![8..12]),
6309 ]),
6310 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6311 );
6312}
6313
6314#[gpui::test]
6315async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6316 init_test(cx);
6317
6318 let search_query = "file";
6319
6320 let fs = FakeFs::new(cx.executor());
6321 fs.insert_tree(
6322 path!("/dir"),
6323 json!({
6324 "one.rs": r#"// Rust file one"#,
6325 "one.ts": r#"// TypeScript file one"#,
6326 "two.rs": r#"// Rust file two"#,
6327 "two.ts": r#"// TypeScript file two"#,
6328 }),
6329 )
6330 .await;
6331 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6332
6333 assert_eq!(
6334 search(
6335 &project,
6336 SearchQuery::text(
6337 search_query,
6338 false,
6339 true,
6340 false,
6341 Default::default(),
6342 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6343 false,
6344 None,
6345 )
6346 .unwrap(),
6347 cx
6348 )
6349 .await
6350 .unwrap(),
6351 HashMap::from_iter([
6352 (path!("dir/one.rs").to_string(), vec![8..12]),
6353 (path!("dir/one.ts").to_string(), vec![14..18]),
6354 (path!("dir/two.rs").to_string(), vec![8..12]),
6355 (path!("dir/two.ts").to_string(), vec![14..18]),
6356 ]),
6357 "If no exclusions match, all files should be returned"
6358 );
6359
6360 assert_eq!(
6361 search(
6362 &project,
6363 SearchQuery::text(
6364 search_query,
6365 false,
6366 true,
6367 false,
6368 Default::default(),
6369 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6370 false,
6371 None,
6372 )
6373 .unwrap(),
6374 cx
6375 )
6376 .await
6377 .unwrap(),
6378 HashMap::from_iter([
6379 (path!("dir/one.ts").to_string(), vec![14..18]),
6380 (path!("dir/two.ts").to_string(), vec![14..18]),
6381 ]),
6382 "Rust exclusion search should give only TypeScript files"
6383 );
6384
6385 assert_eq!(
6386 search(
6387 &project,
6388 SearchQuery::text(
6389 search_query,
6390 false,
6391 true,
6392 false,
6393 Default::default(),
6394 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6395 .unwrap(),
6396 false,
6397 None,
6398 )
6399 .unwrap(),
6400 cx
6401 )
6402 .await
6403 .unwrap(),
6404 HashMap::from_iter([
6405 (path!("dir/one.rs").to_string(), vec![8..12]),
6406 (path!("dir/two.rs").to_string(), vec![8..12]),
6407 ]),
6408 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6409 );
6410
6411 assert!(
6412 search(
6413 &project,
6414 SearchQuery::text(
6415 search_query,
6416 false,
6417 true,
6418 false,
6419 Default::default(),
6420 PathMatcher::new(
6421 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6422 PathStyle::local(),
6423 )
6424 .unwrap(),
6425 false,
6426 None,
6427 )
6428 .unwrap(),
6429 cx
6430 )
6431 .await
6432 .unwrap()
6433 .is_empty(),
6434 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6435 );
6436}
6437
6438#[gpui::test]
6439async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6440 init_test(cx);
6441
6442 let search_query = "file";
6443
6444 let fs = FakeFs::new(cx.executor());
6445 fs.insert_tree(
6446 path!("/dir"),
6447 json!({
6448 "one.rs": r#"// Rust file one"#,
6449 "one.ts": r#"// TypeScript file one"#,
6450 "two.rs": r#"// Rust file two"#,
6451 "two.ts": r#"// TypeScript file two"#,
6452 }),
6453 )
6454 .await;
6455
6456 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6457 let path_style = PathStyle::local();
6458 let _buffer = project.update(cx, |project, cx| {
6459 project.create_local_buffer("file", None, false, cx)
6460 });
6461
6462 assert_eq!(
6463 search(
6464 &project,
6465 SearchQuery::text(
6466 search_query,
6467 false,
6468 true,
6469 false,
6470 Default::default(),
6471 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6472 false,
6473 None,
6474 )
6475 .unwrap(),
6476 cx
6477 )
6478 .await
6479 .unwrap(),
6480 HashMap::from_iter([
6481 (path!("dir/one.rs").to_string(), vec![8..12]),
6482 (path!("dir/one.ts").to_string(), vec![14..18]),
6483 (path!("dir/two.rs").to_string(), vec![8..12]),
6484 (path!("dir/two.ts").to_string(), vec![14..18]),
6485 ]),
6486 "If no exclusions match, all files should be returned"
6487 );
6488
6489 assert_eq!(
6490 search(
6491 &project,
6492 SearchQuery::text(
6493 search_query,
6494 false,
6495 true,
6496 false,
6497 Default::default(),
6498 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6499 false,
6500 None,
6501 )
6502 .unwrap(),
6503 cx
6504 )
6505 .await
6506 .unwrap(),
6507 HashMap::from_iter([
6508 (path!("dir/one.ts").to_string(), vec![14..18]),
6509 (path!("dir/two.ts").to_string(), vec![14..18]),
6510 ]),
6511 "Rust exclusion search should give only TypeScript files"
6512 );
6513
6514 assert_eq!(
6515 search(
6516 &project,
6517 SearchQuery::text(
6518 search_query,
6519 false,
6520 true,
6521 false,
6522 Default::default(),
6523 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6524 false,
6525 None,
6526 )
6527 .unwrap(),
6528 cx
6529 )
6530 .await
6531 .unwrap(),
6532 HashMap::from_iter([
6533 (path!("dir/one.rs").to_string(), vec![8..12]),
6534 (path!("dir/two.rs").to_string(), vec![8..12]),
6535 ]),
6536 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6537 );
6538
6539 assert!(
6540 search(
6541 &project,
6542 SearchQuery::text(
6543 search_query,
6544 false,
6545 true,
6546 false,
6547 Default::default(),
6548 PathMatcher::new(
6549 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6550 PathStyle::local(),
6551 )
6552 .unwrap(),
6553 false,
6554 None,
6555 )
6556 .unwrap(),
6557 cx
6558 )
6559 .await
6560 .unwrap()
6561 .is_empty(),
6562 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6563 );
6564}
6565
6566#[gpui::test]
6567async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6568 init_test(cx);
6569
6570 let search_query = "file";
6571
6572 let fs = FakeFs::new(cx.executor());
6573 fs.insert_tree(
6574 path!("/dir"),
6575 json!({
6576 "one.rs": r#"// Rust file one"#,
6577 "one.ts": r#"// TypeScript file one"#,
6578 "two.rs": r#"// Rust file two"#,
6579 "two.ts": r#"// TypeScript file two"#,
6580 }),
6581 )
6582 .await;
6583 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6584 assert!(
6585 search(
6586 &project,
6587 SearchQuery::text(
6588 search_query,
6589 false,
6590 true,
6591 false,
6592 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6593 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6594 false,
6595 None,
6596 )
6597 .unwrap(),
6598 cx
6599 )
6600 .await
6601 .unwrap()
6602 .is_empty(),
6603 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6604 );
6605
6606 assert!(
6607 search(
6608 &project,
6609 SearchQuery::text(
6610 search_query,
6611 false,
6612 true,
6613 false,
6614 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6615 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6616 false,
6617 None,
6618 )
6619 .unwrap(),
6620 cx
6621 )
6622 .await
6623 .unwrap()
6624 .is_empty(),
6625 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6626 );
6627
6628 assert!(
6629 search(
6630 &project,
6631 SearchQuery::text(
6632 search_query,
6633 false,
6634 true,
6635 false,
6636 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6637 .unwrap(),
6638 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6639 .unwrap(),
6640 false,
6641 None,
6642 )
6643 .unwrap(),
6644 cx
6645 )
6646 .await
6647 .unwrap()
6648 .is_empty(),
6649 "Non-matching inclusions and exclusions should not change that."
6650 );
6651
6652 assert_eq!(
6653 search(
6654 &project,
6655 SearchQuery::text(
6656 search_query,
6657 false,
6658 true,
6659 false,
6660 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6661 .unwrap(),
6662 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6663 .unwrap(),
6664 false,
6665 None,
6666 )
6667 .unwrap(),
6668 cx
6669 )
6670 .await
6671 .unwrap(),
6672 HashMap::from_iter([
6673 (path!("dir/one.ts").to_string(), vec![14..18]),
6674 (path!("dir/two.ts").to_string(), vec![14..18]),
6675 ]),
6676 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6677 );
6678}
6679
6680#[gpui::test]
6681async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6682 init_test(cx);
6683
6684 let fs = FakeFs::new(cx.executor());
6685 fs.insert_tree(
6686 path!("/worktree-a"),
6687 json!({
6688 "haystack.rs": r#"// NEEDLE"#,
6689 "haystack.ts": r#"// NEEDLE"#,
6690 }),
6691 )
6692 .await;
6693 fs.insert_tree(
6694 path!("/worktree-b"),
6695 json!({
6696 "haystack.rs": r#"// NEEDLE"#,
6697 "haystack.ts": r#"// NEEDLE"#,
6698 }),
6699 )
6700 .await;
6701
6702 let path_style = PathStyle::local();
6703 let project = Project::test(
6704 fs.clone(),
6705 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6706 cx,
6707 )
6708 .await;
6709
6710 assert_eq!(
6711 search(
6712 &project,
6713 SearchQuery::text(
6714 "NEEDLE",
6715 false,
6716 true,
6717 false,
6718 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6719 Default::default(),
6720 true,
6721 None,
6722 )
6723 .unwrap(),
6724 cx
6725 )
6726 .await
6727 .unwrap(),
6728 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6729 "should only return results from included worktree"
6730 );
6731 assert_eq!(
6732 search(
6733 &project,
6734 SearchQuery::text(
6735 "NEEDLE",
6736 false,
6737 true,
6738 false,
6739 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6740 Default::default(),
6741 true,
6742 None,
6743 )
6744 .unwrap(),
6745 cx
6746 )
6747 .await
6748 .unwrap(),
6749 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6750 "should only return results from included worktree"
6751 );
6752
6753 assert_eq!(
6754 search(
6755 &project,
6756 SearchQuery::text(
6757 "NEEDLE",
6758 false,
6759 true,
6760 false,
6761 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6762 Default::default(),
6763 false,
6764 None,
6765 )
6766 .unwrap(),
6767 cx
6768 )
6769 .await
6770 .unwrap(),
6771 HashMap::from_iter([
6772 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6773 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6774 ]),
6775 "should return results from both worktrees"
6776 );
6777}
6778
6779#[gpui::test]
6780async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6781 init_test(cx);
6782
6783 let fs = FakeFs::new(cx.background_executor.clone());
6784 fs.insert_tree(
6785 path!("/dir"),
6786 json!({
6787 ".git": {},
6788 ".gitignore": "**/target\n/node_modules\n",
6789 "target": {
6790 "index.txt": "index_key:index_value"
6791 },
6792 "node_modules": {
6793 "eslint": {
6794 "index.ts": "const eslint_key = 'eslint value'",
6795 "package.json": r#"{ "some_key": "some value" }"#,
6796 },
6797 "prettier": {
6798 "index.ts": "const prettier_key = 'prettier value'",
6799 "package.json": r#"{ "other_key": "other value" }"#,
6800 },
6801 },
6802 "package.json": r#"{ "main_key": "main value" }"#,
6803 }),
6804 )
6805 .await;
6806 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6807
6808 let query = "key";
6809 assert_eq!(
6810 search(
6811 &project,
6812 SearchQuery::text(
6813 query,
6814 false,
6815 false,
6816 false,
6817 Default::default(),
6818 Default::default(),
6819 false,
6820 None,
6821 )
6822 .unwrap(),
6823 cx
6824 )
6825 .await
6826 .unwrap(),
6827 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6828 "Only one non-ignored file should have the query"
6829 );
6830
6831 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6832 let path_style = PathStyle::local();
6833 assert_eq!(
6834 search(
6835 &project,
6836 SearchQuery::text(
6837 query,
6838 false,
6839 false,
6840 true,
6841 Default::default(),
6842 Default::default(),
6843 false,
6844 None,
6845 )
6846 .unwrap(),
6847 cx
6848 )
6849 .await
6850 .unwrap(),
6851 HashMap::from_iter([
6852 (path!("dir/package.json").to_string(), vec![8..11]),
6853 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6854 (
6855 path!("dir/node_modules/prettier/package.json").to_string(),
6856 vec![9..12]
6857 ),
6858 (
6859 path!("dir/node_modules/prettier/index.ts").to_string(),
6860 vec![15..18]
6861 ),
6862 (
6863 path!("dir/node_modules/eslint/index.ts").to_string(),
6864 vec![13..16]
6865 ),
6866 (
6867 path!("dir/node_modules/eslint/package.json").to_string(),
6868 vec![8..11]
6869 ),
6870 ]),
6871 "Unrestricted search with ignored directories should find every file with the query"
6872 );
6873
6874 let files_to_include =
6875 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6876 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6877 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6878 assert_eq!(
6879 search(
6880 &project,
6881 SearchQuery::text(
6882 query,
6883 false,
6884 false,
6885 true,
6886 files_to_include,
6887 files_to_exclude,
6888 false,
6889 None,
6890 )
6891 .unwrap(),
6892 cx
6893 )
6894 .await
6895 .unwrap(),
6896 HashMap::from_iter([(
6897 path!("dir/node_modules/prettier/package.json").to_string(),
6898 vec![9..12]
6899 )]),
6900 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6901 );
6902}
6903
6904#[gpui::test]
6905async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6906 init_test(cx);
6907
6908 let fs = FakeFs::new(cx.executor());
6909 fs.insert_tree(
6910 path!("/dir"),
6911 json!({
6912 "one.rs": "// ПРИВЕТ? привет!",
6913 "two.rs": "// ПРИВЕТ.",
6914 "three.rs": "// привет",
6915 }),
6916 )
6917 .await;
6918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6919 let unicode_case_sensitive_query = SearchQuery::text(
6920 "привет",
6921 false,
6922 true,
6923 false,
6924 Default::default(),
6925 Default::default(),
6926 false,
6927 None,
6928 );
6929 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6930 assert_eq!(
6931 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6932 .await
6933 .unwrap(),
6934 HashMap::from_iter([
6935 (path!("dir/one.rs").to_string(), vec![17..29]),
6936 (path!("dir/three.rs").to_string(), vec![3..15]),
6937 ])
6938 );
6939
6940 let unicode_case_insensitive_query = SearchQuery::text(
6941 "привет",
6942 false,
6943 false,
6944 false,
6945 Default::default(),
6946 Default::default(),
6947 false,
6948 None,
6949 );
6950 assert_matches!(
6951 unicode_case_insensitive_query,
6952 Ok(SearchQuery::Regex { .. })
6953 );
6954 assert_eq!(
6955 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6956 .await
6957 .unwrap(),
6958 HashMap::from_iter([
6959 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6960 (path!("dir/two.rs").to_string(), vec![3..15]),
6961 (path!("dir/three.rs").to_string(), vec![3..15]),
6962 ])
6963 );
6964
6965 assert_eq!(
6966 search(
6967 &project,
6968 SearchQuery::text(
6969 "привет.",
6970 false,
6971 false,
6972 false,
6973 Default::default(),
6974 Default::default(),
6975 false,
6976 None,
6977 )
6978 .unwrap(),
6979 cx
6980 )
6981 .await
6982 .unwrap(),
6983 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6984 );
6985}
6986
6987#[gpui::test]
6988async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6989 init_test(cx);
6990
6991 let fs = FakeFs::new(cx.executor());
6992 fs.insert_tree(
6993 "/one/two",
6994 json!({
6995 "three": {
6996 "a.txt": "",
6997 "four": {}
6998 },
6999 "c.rs": ""
7000 }),
7001 )
7002 .await;
7003
7004 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7005 project
7006 .update(cx, |project, cx| {
7007 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7008 project.create_entry((id, rel_path("b..")), true, cx)
7009 })
7010 .await
7011 .unwrap()
7012 .into_included()
7013 .unwrap();
7014
7015 assert_eq!(
7016 fs.paths(true),
7017 vec![
7018 PathBuf::from(path!("/")),
7019 PathBuf::from(path!("/one")),
7020 PathBuf::from(path!("/one/two")),
7021 PathBuf::from(path!("/one/two/c.rs")),
7022 PathBuf::from(path!("/one/two/three")),
7023 PathBuf::from(path!("/one/two/three/a.txt")),
7024 PathBuf::from(path!("/one/two/three/b..")),
7025 PathBuf::from(path!("/one/two/three/four")),
7026 ]
7027 );
7028}
7029
7030#[gpui::test]
7031async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7032 init_test(cx);
7033
7034 let fs = FakeFs::new(cx.executor());
7035 fs.insert_tree(
7036 path!("/dir"),
7037 json!({
7038 "a.tsx": "a",
7039 }),
7040 )
7041 .await;
7042
7043 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7044
7045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7046 language_registry.add(tsx_lang());
7047 let language_server_names = [
7048 "TypeScriptServer",
7049 "TailwindServer",
7050 "ESLintServer",
7051 "NoHoverCapabilitiesServer",
7052 ];
7053 let mut language_servers = [
7054 language_registry.register_fake_lsp(
7055 "tsx",
7056 FakeLspAdapter {
7057 name: language_server_names[0],
7058 capabilities: lsp::ServerCapabilities {
7059 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7060 ..lsp::ServerCapabilities::default()
7061 },
7062 ..FakeLspAdapter::default()
7063 },
7064 ),
7065 language_registry.register_fake_lsp(
7066 "tsx",
7067 FakeLspAdapter {
7068 name: language_server_names[1],
7069 capabilities: lsp::ServerCapabilities {
7070 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7071 ..lsp::ServerCapabilities::default()
7072 },
7073 ..FakeLspAdapter::default()
7074 },
7075 ),
7076 language_registry.register_fake_lsp(
7077 "tsx",
7078 FakeLspAdapter {
7079 name: language_server_names[2],
7080 capabilities: lsp::ServerCapabilities {
7081 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7082 ..lsp::ServerCapabilities::default()
7083 },
7084 ..FakeLspAdapter::default()
7085 },
7086 ),
7087 language_registry.register_fake_lsp(
7088 "tsx",
7089 FakeLspAdapter {
7090 name: language_server_names[3],
7091 capabilities: lsp::ServerCapabilities {
7092 hover_provider: None,
7093 ..lsp::ServerCapabilities::default()
7094 },
7095 ..FakeLspAdapter::default()
7096 },
7097 ),
7098 ];
7099
7100 let (buffer, _handle) = project
7101 .update(cx, |p, cx| {
7102 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7103 })
7104 .await
7105 .unwrap();
7106 cx.executor().run_until_parked();
7107
7108 let mut servers_with_hover_requests = HashMap::default();
7109 for i in 0..language_server_names.len() {
7110 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7111 panic!(
7112 "Failed to get language server #{i} with name {}",
7113 &language_server_names[i]
7114 )
7115 });
7116 let new_server_name = new_server.server.name();
7117 assert!(
7118 !servers_with_hover_requests.contains_key(&new_server_name),
7119 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7120 );
7121 match new_server_name.as_ref() {
7122 "TailwindServer" | "TypeScriptServer" => {
7123 servers_with_hover_requests.insert(
7124 new_server_name.clone(),
7125 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7126 move |_, _| {
7127 let name = new_server_name.clone();
7128 async move {
7129 Ok(Some(lsp::Hover {
7130 contents: lsp::HoverContents::Scalar(
7131 lsp::MarkedString::String(format!("{name} hover")),
7132 ),
7133 range: None,
7134 }))
7135 }
7136 },
7137 ),
7138 );
7139 }
7140 "ESLintServer" => {
7141 servers_with_hover_requests.insert(
7142 new_server_name,
7143 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7144 |_, _| async move { Ok(None) },
7145 ),
7146 );
7147 }
7148 "NoHoverCapabilitiesServer" => {
7149 let _never_handled = new_server
7150 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7151 panic!(
7152 "Should not call for hovers server with no corresponding capabilities"
7153 )
7154 });
7155 }
7156 unexpected => panic!("Unexpected server name: {unexpected}"),
7157 }
7158 }
7159
7160 let hover_task = project.update(cx, |project, cx| {
7161 project.hover(&buffer, Point::new(0, 0), cx)
7162 });
7163 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7164 |mut hover_request| async move {
7165 hover_request
7166 .next()
7167 .await
7168 .expect("All hover requests should have been triggered")
7169 },
7170 ))
7171 .await;
7172 assert_eq!(
7173 vec!["TailwindServer hover", "TypeScriptServer hover"],
7174 hover_task
7175 .await
7176 .into_iter()
7177 .flatten()
7178 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7179 .sorted()
7180 .collect::<Vec<_>>(),
7181 "Should receive hover responses from all related servers with hover capabilities"
7182 );
7183}
7184
7185#[gpui::test]
7186async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7187 init_test(cx);
7188
7189 let fs = FakeFs::new(cx.executor());
7190 fs.insert_tree(
7191 path!("/dir"),
7192 json!({
7193 "a.ts": "a",
7194 }),
7195 )
7196 .await;
7197
7198 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7199
7200 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7201 language_registry.add(typescript_lang());
7202 let mut fake_language_servers = language_registry.register_fake_lsp(
7203 "TypeScript",
7204 FakeLspAdapter {
7205 capabilities: lsp::ServerCapabilities {
7206 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7207 ..lsp::ServerCapabilities::default()
7208 },
7209 ..FakeLspAdapter::default()
7210 },
7211 );
7212
7213 let (buffer, _handle) = project
7214 .update(cx, |p, cx| {
7215 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7216 })
7217 .await
7218 .unwrap();
7219 cx.executor().run_until_parked();
7220
7221 let fake_server = fake_language_servers
7222 .next()
7223 .await
7224 .expect("failed to get the language server");
7225
7226 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7227 move |_, _| async move {
7228 Ok(Some(lsp::Hover {
7229 contents: lsp::HoverContents::Array(vec![
7230 lsp::MarkedString::String("".to_string()),
7231 lsp::MarkedString::String(" ".to_string()),
7232 lsp::MarkedString::String("\n\n\n".to_string()),
7233 ]),
7234 range: None,
7235 }))
7236 },
7237 );
7238
7239 let hover_task = project.update(cx, |project, cx| {
7240 project.hover(&buffer, Point::new(0, 0), cx)
7241 });
7242 let () = request_handled
7243 .next()
7244 .await
7245 .expect("All hover requests should have been triggered");
7246 assert_eq!(
7247 Vec::<String>::new(),
7248 hover_task
7249 .await
7250 .into_iter()
7251 .flatten()
7252 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7253 .sorted()
7254 .collect::<Vec<_>>(),
7255 "Empty hover parts should be ignored"
7256 );
7257}
7258
7259#[gpui::test]
7260async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7261 init_test(cx);
7262
7263 let fs = FakeFs::new(cx.executor());
7264 fs.insert_tree(
7265 path!("/dir"),
7266 json!({
7267 "a.ts": "a",
7268 }),
7269 )
7270 .await;
7271
7272 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7273
7274 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7275 language_registry.add(typescript_lang());
7276 let mut fake_language_servers = language_registry.register_fake_lsp(
7277 "TypeScript",
7278 FakeLspAdapter {
7279 capabilities: lsp::ServerCapabilities {
7280 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7281 ..lsp::ServerCapabilities::default()
7282 },
7283 ..FakeLspAdapter::default()
7284 },
7285 );
7286
7287 let (buffer, _handle) = project
7288 .update(cx, |p, cx| {
7289 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7290 })
7291 .await
7292 .unwrap();
7293 cx.executor().run_until_parked();
7294
7295 let fake_server = fake_language_servers
7296 .next()
7297 .await
7298 .expect("failed to get the language server");
7299
7300 let mut request_handled = fake_server
7301 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7302 Ok(Some(vec![
7303 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7304 title: "organize imports".to_string(),
7305 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7306 ..lsp::CodeAction::default()
7307 }),
7308 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7309 title: "fix code".to_string(),
7310 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7311 ..lsp::CodeAction::default()
7312 }),
7313 ]))
7314 });
7315
7316 let code_actions_task = project.update(cx, |project, cx| {
7317 project.code_actions(
7318 &buffer,
7319 0..buffer.read(cx).len(),
7320 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7321 cx,
7322 )
7323 });
7324
7325 let () = request_handled
7326 .next()
7327 .await
7328 .expect("The code action request should have been triggered");
7329
7330 let code_actions = code_actions_task.await.unwrap().unwrap();
7331 assert_eq!(code_actions.len(), 1);
7332 assert_eq!(
7333 code_actions[0].lsp_action.action_kind(),
7334 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7335 );
7336}
7337
7338#[gpui::test]
7339async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7340 init_test(cx);
7341
7342 let fs = FakeFs::new(cx.executor());
7343 fs.insert_tree(
7344 path!("/dir"),
7345 json!({
7346 "a.tsx": "a",
7347 }),
7348 )
7349 .await;
7350
7351 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7352
7353 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7354 language_registry.add(tsx_lang());
7355 let language_server_names = [
7356 "TypeScriptServer",
7357 "TailwindServer",
7358 "ESLintServer",
7359 "NoActionsCapabilitiesServer",
7360 ];
7361
7362 let mut language_server_rxs = [
7363 language_registry.register_fake_lsp(
7364 "tsx",
7365 FakeLspAdapter {
7366 name: language_server_names[0],
7367 capabilities: lsp::ServerCapabilities {
7368 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7369 ..lsp::ServerCapabilities::default()
7370 },
7371 ..FakeLspAdapter::default()
7372 },
7373 ),
7374 language_registry.register_fake_lsp(
7375 "tsx",
7376 FakeLspAdapter {
7377 name: language_server_names[1],
7378 capabilities: lsp::ServerCapabilities {
7379 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7380 ..lsp::ServerCapabilities::default()
7381 },
7382 ..FakeLspAdapter::default()
7383 },
7384 ),
7385 language_registry.register_fake_lsp(
7386 "tsx",
7387 FakeLspAdapter {
7388 name: language_server_names[2],
7389 capabilities: lsp::ServerCapabilities {
7390 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7391 ..lsp::ServerCapabilities::default()
7392 },
7393 ..FakeLspAdapter::default()
7394 },
7395 ),
7396 language_registry.register_fake_lsp(
7397 "tsx",
7398 FakeLspAdapter {
7399 name: language_server_names[3],
7400 capabilities: lsp::ServerCapabilities {
7401 code_action_provider: None,
7402 ..lsp::ServerCapabilities::default()
7403 },
7404 ..FakeLspAdapter::default()
7405 },
7406 ),
7407 ];
7408
7409 let (buffer, _handle) = project
7410 .update(cx, |p, cx| {
7411 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7412 })
7413 .await
7414 .unwrap();
7415 cx.executor().run_until_parked();
7416
7417 let mut servers_with_actions_requests = HashMap::default();
7418 for i in 0..language_server_names.len() {
7419 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7420 panic!(
7421 "Failed to get language server #{i} with name {}",
7422 &language_server_names[i]
7423 )
7424 });
7425 let new_server_name = new_server.server.name();
7426
7427 assert!(
7428 !servers_with_actions_requests.contains_key(&new_server_name),
7429 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7430 );
7431 match new_server_name.0.as_ref() {
7432 "TailwindServer" | "TypeScriptServer" => {
7433 servers_with_actions_requests.insert(
7434 new_server_name.clone(),
7435 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7436 move |_, _| {
7437 let name = new_server_name.clone();
7438 async move {
7439 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7440 lsp::CodeAction {
7441 title: format!("{name} code action"),
7442 ..lsp::CodeAction::default()
7443 },
7444 )]))
7445 }
7446 },
7447 ),
7448 );
7449 }
7450 "ESLintServer" => {
7451 servers_with_actions_requests.insert(
7452 new_server_name,
7453 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7454 |_, _| async move { Ok(None) },
7455 ),
7456 );
7457 }
7458 "NoActionsCapabilitiesServer" => {
7459 let _never_handled = new_server
7460 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7461 panic!(
7462 "Should not call for code actions server with no corresponding capabilities"
7463 )
7464 });
7465 }
7466 unexpected => panic!("Unexpected server name: {unexpected}"),
7467 }
7468 }
7469
7470 let code_actions_task = project.update(cx, |project, cx| {
7471 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7472 });
7473
7474 // cx.run_until_parked();
7475 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7476 |mut code_actions_request| async move {
7477 code_actions_request
7478 .next()
7479 .await
7480 .expect("All code actions requests should have been triggered")
7481 },
7482 ))
7483 .await;
7484 assert_eq!(
7485 vec!["TailwindServer code action", "TypeScriptServer code action"],
7486 code_actions_task
7487 .await
7488 .unwrap()
7489 .unwrap()
7490 .into_iter()
7491 .map(|code_action| code_action.lsp_action.title().to_owned())
7492 .sorted()
7493 .collect::<Vec<_>>(),
7494 "Should receive code actions responses from all related servers with hover capabilities"
7495 );
7496}
7497
7498#[gpui::test]
7499async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7500 init_test(cx);
7501
7502 let fs = FakeFs::new(cx.executor());
7503 fs.insert_tree(
7504 "/dir",
7505 json!({
7506 "a.rs": "let a = 1;",
7507 "b.rs": "let b = 2;",
7508 "c.rs": "let c = 2;",
7509 }),
7510 )
7511 .await;
7512
7513 let project = Project::test(
7514 fs,
7515 [
7516 "/dir/a.rs".as_ref(),
7517 "/dir/b.rs".as_ref(),
7518 "/dir/c.rs".as_ref(),
7519 ],
7520 cx,
7521 )
7522 .await;
7523
7524 // check the initial state and get the worktrees
7525 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7526 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7527 assert_eq!(worktrees.len(), 3);
7528
7529 let worktree_a = worktrees[0].read(cx);
7530 let worktree_b = worktrees[1].read(cx);
7531 let worktree_c = worktrees[2].read(cx);
7532
7533 // check they start in the right order
7534 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7535 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7536 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7537
7538 (
7539 worktrees[0].clone(),
7540 worktrees[1].clone(),
7541 worktrees[2].clone(),
7542 )
7543 });
7544
7545 // move first worktree to after the second
7546 // [a, b, c] -> [b, a, c]
7547 project
7548 .update(cx, |project, cx| {
7549 let first = worktree_a.read(cx);
7550 let second = worktree_b.read(cx);
7551 project.move_worktree(first.id(), second.id(), cx)
7552 })
7553 .expect("moving first after second");
7554
7555 // check the state after moving
7556 project.update(cx, |project, cx| {
7557 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7558 assert_eq!(worktrees.len(), 3);
7559
7560 let first = worktrees[0].read(cx);
7561 let second = worktrees[1].read(cx);
7562 let third = worktrees[2].read(cx);
7563
7564 // check they are now in the right order
7565 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7566 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7567 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7568 });
7569
7570 // move the second worktree to before the first
7571 // [b, a, c] -> [a, b, c]
7572 project
7573 .update(cx, |project, cx| {
7574 let second = worktree_a.read(cx);
7575 let first = worktree_b.read(cx);
7576 project.move_worktree(first.id(), second.id(), cx)
7577 })
7578 .expect("moving second before first");
7579
7580 // check the state after moving
7581 project.update(cx, |project, cx| {
7582 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7583 assert_eq!(worktrees.len(), 3);
7584
7585 let first = worktrees[0].read(cx);
7586 let second = worktrees[1].read(cx);
7587 let third = worktrees[2].read(cx);
7588
7589 // check they are now in the right order
7590 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7591 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7592 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7593 });
7594
7595 // move the second worktree to after the third
7596 // [a, b, c] -> [a, c, b]
7597 project
7598 .update(cx, |project, cx| {
7599 let second = worktree_b.read(cx);
7600 let third = worktree_c.read(cx);
7601 project.move_worktree(second.id(), third.id(), cx)
7602 })
7603 .expect("moving second after third");
7604
7605 // check the state after moving
7606 project.update(cx, |project, cx| {
7607 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7608 assert_eq!(worktrees.len(), 3);
7609
7610 let first = worktrees[0].read(cx);
7611 let second = worktrees[1].read(cx);
7612 let third = worktrees[2].read(cx);
7613
7614 // check they are now in the right order
7615 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7616 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7617 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7618 });
7619
7620 // move the third worktree to before the second
7621 // [a, c, b] -> [a, b, c]
7622 project
7623 .update(cx, |project, cx| {
7624 let third = worktree_c.read(cx);
7625 let second = worktree_b.read(cx);
7626 project.move_worktree(third.id(), second.id(), cx)
7627 })
7628 .expect("moving third before second");
7629
7630 // check the state after moving
7631 project.update(cx, |project, cx| {
7632 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7633 assert_eq!(worktrees.len(), 3);
7634
7635 let first = worktrees[0].read(cx);
7636 let second = worktrees[1].read(cx);
7637 let third = worktrees[2].read(cx);
7638
7639 // check they are now in the right order
7640 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7641 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7642 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7643 });
7644
7645 // move the first worktree to after the third
7646 // [a, b, c] -> [b, c, a]
7647 project
7648 .update(cx, |project, cx| {
7649 let first = worktree_a.read(cx);
7650 let third = worktree_c.read(cx);
7651 project.move_worktree(first.id(), third.id(), cx)
7652 })
7653 .expect("moving first after third");
7654
7655 // check the state after moving
7656 project.update(cx, |project, cx| {
7657 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7658 assert_eq!(worktrees.len(), 3);
7659
7660 let first = worktrees[0].read(cx);
7661 let second = worktrees[1].read(cx);
7662 let third = worktrees[2].read(cx);
7663
7664 // check they are now in the right order
7665 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7666 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7667 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7668 });
7669
7670 // move the third worktree to before the first
7671 // [b, c, a] -> [a, b, c]
7672 project
7673 .update(cx, |project, cx| {
7674 let third = worktree_a.read(cx);
7675 let first = worktree_b.read(cx);
7676 project.move_worktree(third.id(), first.id(), cx)
7677 })
7678 .expect("moving third before first");
7679
7680 // check the state after moving
7681 project.update(cx, |project, cx| {
7682 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7683 assert_eq!(worktrees.len(), 3);
7684
7685 let first = worktrees[0].read(cx);
7686 let second = worktrees[1].read(cx);
7687 let third = worktrees[2].read(cx);
7688
7689 // check they are now in the right order
7690 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7691 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7692 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7693 });
7694}
7695
7696#[gpui::test]
7697async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7698 init_test(cx);
7699
7700 let staged_contents = r#"
7701 fn main() {
7702 println!("hello world");
7703 }
7704 "#
7705 .unindent();
7706 let file_contents = r#"
7707 // print goodbye
7708 fn main() {
7709 println!("goodbye world");
7710 }
7711 "#
7712 .unindent();
7713
7714 let fs = FakeFs::new(cx.background_executor.clone());
7715 fs.insert_tree(
7716 "/dir",
7717 json!({
7718 ".git": {},
7719 "src": {
7720 "main.rs": file_contents,
7721 }
7722 }),
7723 )
7724 .await;
7725
7726 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7727
7728 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7729
7730 let buffer = project
7731 .update(cx, |project, cx| {
7732 project.open_local_buffer("/dir/src/main.rs", cx)
7733 })
7734 .await
7735 .unwrap();
7736 let unstaged_diff = project
7737 .update(cx, |project, cx| {
7738 project.open_unstaged_diff(buffer.clone(), cx)
7739 })
7740 .await
7741 .unwrap();
7742
7743 cx.run_until_parked();
7744 unstaged_diff.update(cx, |unstaged_diff, cx| {
7745 let snapshot = buffer.read(cx).snapshot();
7746 assert_hunks(
7747 unstaged_diff.snapshot(cx).hunks(&snapshot),
7748 &snapshot,
7749 &unstaged_diff.base_text_string(cx).unwrap(),
7750 &[
7751 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7752 (
7753 2..3,
7754 " println!(\"hello world\");\n",
7755 " println!(\"goodbye world\");\n",
7756 DiffHunkStatus::modified_none(),
7757 ),
7758 ],
7759 );
7760 });
7761
7762 let staged_contents = r#"
7763 // print goodbye
7764 fn main() {
7765 }
7766 "#
7767 .unindent();
7768
7769 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7770
7771 cx.run_until_parked();
7772 unstaged_diff.update(cx, |unstaged_diff, cx| {
7773 let snapshot = buffer.read(cx).snapshot();
7774 assert_hunks(
7775 unstaged_diff
7776 .snapshot(cx)
7777 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7778 &snapshot,
7779 &unstaged_diff.base_text(cx).text(),
7780 &[(
7781 2..3,
7782 "",
7783 " println!(\"goodbye world\");\n",
7784 DiffHunkStatus::added_none(),
7785 )],
7786 );
7787 });
7788}
7789
7790#[gpui::test]
7791async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7792 init_test(cx);
7793
7794 let committed_contents = r#"
7795 fn main() {
7796 println!("hello world");
7797 }
7798 "#
7799 .unindent();
7800 let staged_contents = r#"
7801 fn main() {
7802 println!("goodbye world");
7803 }
7804 "#
7805 .unindent();
7806 let file_contents = r#"
7807 // print goodbye
7808 fn main() {
7809 println!("goodbye world");
7810 }
7811 "#
7812 .unindent();
7813
7814 let fs = FakeFs::new(cx.background_executor.clone());
7815 fs.insert_tree(
7816 "/dir",
7817 json!({
7818 ".git": {},
7819 "src": {
7820 "modification.rs": file_contents,
7821 }
7822 }),
7823 )
7824 .await;
7825
7826 fs.set_head_for_repo(
7827 Path::new("/dir/.git"),
7828 &[
7829 ("src/modification.rs", committed_contents),
7830 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7831 ],
7832 "deadbeef",
7833 );
7834 fs.set_index_for_repo(
7835 Path::new("/dir/.git"),
7836 &[
7837 ("src/modification.rs", staged_contents),
7838 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7839 ],
7840 );
7841
7842 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7844 let language = rust_lang();
7845 language_registry.add(language.clone());
7846
7847 let buffer_1 = project
7848 .update(cx, |project, cx| {
7849 project.open_local_buffer("/dir/src/modification.rs", cx)
7850 })
7851 .await
7852 .unwrap();
7853 let diff_1 = project
7854 .update(cx, |project, cx| {
7855 project.open_uncommitted_diff(buffer_1.clone(), cx)
7856 })
7857 .await
7858 .unwrap();
7859 diff_1.read_with(cx, |diff, cx| {
7860 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7861 });
7862 cx.run_until_parked();
7863 diff_1.update(cx, |diff, cx| {
7864 let snapshot = buffer_1.read(cx).snapshot();
7865 assert_hunks(
7866 diff.snapshot(cx)
7867 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7868 &snapshot,
7869 &diff.base_text_string(cx).unwrap(),
7870 &[
7871 (
7872 0..1,
7873 "",
7874 "// print goodbye\n",
7875 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7876 ),
7877 (
7878 2..3,
7879 " println!(\"hello world\");\n",
7880 " println!(\"goodbye world\");\n",
7881 DiffHunkStatus::modified_none(),
7882 ),
7883 ],
7884 );
7885 });
7886
7887 // Reset HEAD to a version that differs from both the buffer and the index.
7888 let committed_contents = r#"
7889 // print goodbye
7890 fn main() {
7891 }
7892 "#
7893 .unindent();
7894 fs.set_head_for_repo(
7895 Path::new("/dir/.git"),
7896 &[
7897 ("src/modification.rs", committed_contents.clone()),
7898 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7899 ],
7900 "deadbeef",
7901 );
7902
7903 // Buffer now has an unstaged hunk.
7904 cx.run_until_parked();
7905 diff_1.update(cx, |diff, cx| {
7906 let snapshot = buffer_1.read(cx).snapshot();
7907 assert_hunks(
7908 diff.snapshot(cx)
7909 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7910 &snapshot,
7911 &diff.base_text(cx).text(),
7912 &[(
7913 2..3,
7914 "",
7915 " println!(\"goodbye world\");\n",
7916 DiffHunkStatus::added_none(),
7917 )],
7918 );
7919 });
7920
7921 // Open a buffer for a file that's been deleted.
7922 let buffer_2 = project
7923 .update(cx, |project, cx| {
7924 project.open_local_buffer("/dir/src/deletion.rs", cx)
7925 })
7926 .await
7927 .unwrap();
7928 let diff_2 = project
7929 .update(cx, |project, cx| {
7930 project.open_uncommitted_diff(buffer_2.clone(), cx)
7931 })
7932 .await
7933 .unwrap();
7934 cx.run_until_parked();
7935 diff_2.update(cx, |diff, cx| {
7936 let snapshot = buffer_2.read(cx).snapshot();
7937 assert_hunks(
7938 diff.snapshot(cx)
7939 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7940 &snapshot,
7941 &diff.base_text_string(cx).unwrap(),
7942 &[(
7943 0..0,
7944 "// the-deleted-contents\n",
7945 "",
7946 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7947 )],
7948 );
7949 });
7950
7951 // Stage the deletion of this file
7952 fs.set_index_for_repo(
7953 Path::new("/dir/.git"),
7954 &[("src/modification.rs", committed_contents.clone())],
7955 );
7956 cx.run_until_parked();
7957 diff_2.update(cx, |diff, cx| {
7958 let snapshot = buffer_2.read(cx).snapshot();
7959 assert_hunks(
7960 diff.snapshot(cx)
7961 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7962 &snapshot,
7963 &diff.base_text_string(cx).unwrap(),
7964 &[(
7965 0..0,
7966 "// the-deleted-contents\n",
7967 "",
7968 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7969 )],
7970 );
7971 });
7972}
7973
7974#[gpui::test]
7975async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7976 use DiffHunkSecondaryStatus::*;
7977 init_test(cx);
7978
7979 let committed_contents = r#"
7980 zero
7981 one
7982 two
7983 three
7984 four
7985 five
7986 "#
7987 .unindent();
7988 let file_contents = r#"
7989 one
7990 TWO
7991 three
7992 FOUR
7993 five
7994 "#
7995 .unindent();
7996
7997 let fs = FakeFs::new(cx.background_executor.clone());
7998 fs.insert_tree(
7999 "/dir",
8000 json!({
8001 ".git": {},
8002 "file.txt": file_contents.clone()
8003 }),
8004 )
8005 .await;
8006
8007 fs.set_head_and_index_for_repo(
8008 path!("/dir/.git").as_ref(),
8009 &[("file.txt", committed_contents.clone())],
8010 );
8011
8012 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8013
8014 let buffer = project
8015 .update(cx, |project, cx| {
8016 project.open_local_buffer("/dir/file.txt", cx)
8017 })
8018 .await
8019 .unwrap();
8020 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8021 let uncommitted_diff = project
8022 .update(cx, |project, cx| {
8023 project.open_uncommitted_diff(buffer.clone(), cx)
8024 })
8025 .await
8026 .unwrap();
8027 let mut diff_events = cx.events(&uncommitted_diff);
8028
8029 // The hunks are initially unstaged.
8030 uncommitted_diff.read_with(cx, |diff, cx| {
8031 assert_hunks(
8032 diff.snapshot(cx).hunks(&snapshot),
8033 &snapshot,
8034 &diff.base_text_string(cx).unwrap(),
8035 &[
8036 (
8037 0..0,
8038 "zero\n",
8039 "",
8040 DiffHunkStatus::deleted(HasSecondaryHunk),
8041 ),
8042 (
8043 1..2,
8044 "two\n",
8045 "TWO\n",
8046 DiffHunkStatus::modified(HasSecondaryHunk),
8047 ),
8048 (
8049 3..4,
8050 "four\n",
8051 "FOUR\n",
8052 DiffHunkStatus::modified(HasSecondaryHunk),
8053 ),
8054 ],
8055 );
8056 });
8057
8058 // Stage a hunk. It appears as optimistically staged.
8059 uncommitted_diff.update(cx, |diff, cx| {
8060 let range =
8061 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8062 let hunks = diff
8063 .snapshot(cx)
8064 .hunks_intersecting_range(range, &snapshot)
8065 .collect::<Vec<_>>();
8066 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8067
8068 assert_hunks(
8069 diff.snapshot(cx).hunks(&snapshot),
8070 &snapshot,
8071 &diff.base_text_string(cx).unwrap(),
8072 &[
8073 (
8074 0..0,
8075 "zero\n",
8076 "",
8077 DiffHunkStatus::deleted(HasSecondaryHunk),
8078 ),
8079 (
8080 1..2,
8081 "two\n",
8082 "TWO\n",
8083 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8084 ),
8085 (
8086 3..4,
8087 "four\n",
8088 "FOUR\n",
8089 DiffHunkStatus::modified(HasSecondaryHunk),
8090 ),
8091 ],
8092 );
8093 });
8094
8095 // The diff emits a change event for the range of the staged hunk.
8096 assert!(matches!(
8097 diff_events.next().await.unwrap(),
8098 BufferDiffEvent::HunksStagedOrUnstaged(_)
8099 ));
8100 let event = diff_events.next().await.unwrap();
8101 if let BufferDiffEvent::DiffChanged(DiffChanged {
8102 changed_range: Some(changed_range),
8103 base_text_changed_range: _,
8104 extended_range: _,
8105 }) = event
8106 {
8107 let changed_range = changed_range.to_point(&snapshot);
8108 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8109 } else {
8110 panic!("Unexpected event {event:?}");
8111 }
8112
8113 // When the write to the index completes, it appears as staged.
8114 cx.run_until_parked();
8115 uncommitted_diff.update(cx, |diff, cx| {
8116 assert_hunks(
8117 diff.snapshot(cx).hunks(&snapshot),
8118 &snapshot,
8119 &diff.base_text_string(cx).unwrap(),
8120 &[
8121 (
8122 0..0,
8123 "zero\n",
8124 "",
8125 DiffHunkStatus::deleted(HasSecondaryHunk),
8126 ),
8127 (
8128 1..2,
8129 "two\n",
8130 "TWO\n",
8131 DiffHunkStatus::modified(NoSecondaryHunk),
8132 ),
8133 (
8134 3..4,
8135 "four\n",
8136 "FOUR\n",
8137 DiffHunkStatus::modified(HasSecondaryHunk),
8138 ),
8139 ],
8140 );
8141 });
8142
8143 // The diff emits a change event for the changed index text.
8144 let event = diff_events.next().await.unwrap();
8145 if let BufferDiffEvent::DiffChanged(DiffChanged {
8146 changed_range: Some(changed_range),
8147 base_text_changed_range: _,
8148 extended_range: _,
8149 }) = event
8150 {
8151 let changed_range = changed_range.to_point(&snapshot);
8152 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8153 } else {
8154 panic!("Unexpected event {event:?}");
8155 }
8156
8157 // Simulate a problem writing to the git index.
8158 fs.set_error_message_for_index_write(
8159 "/dir/.git".as_ref(),
8160 Some("failed to write git index".into()),
8161 );
8162
8163 // Stage another hunk.
8164 uncommitted_diff.update(cx, |diff, cx| {
8165 let range =
8166 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8167 let hunks = diff
8168 .snapshot(cx)
8169 .hunks_intersecting_range(range, &snapshot)
8170 .collect::<Vec<_>>();
8171 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8172
8173 assert_hunks(
8174 diff.snapshot(cx).hunks(&snapshot),
8175 &snapshot,
8176 &diff.base_text_string(cx).unwrap(),
8177 &[
8178 (
8179 0..0,
8180 "zero\n",
8181 "",
8182 DiffHunkStatus::deleted(HasSecondaryHunk),
8183 ),
8184 (
8185 1..2,
8186 "two\n",
8187 "TWO\n",
8188 DiffHunkStatus::modified(NoSecondaryHunk),
8189 ),
8190 (
8191 3..4,
8192 "four\n",
8193 "FOUR\n",
8194 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8195 ),
8196 ],
8197 );
8198 });
8199 assert!(matches!(
8200 diff_events.next().await.unwrap(),
8201 BufferDiffEvent::HunksStagedOrUnstaged(_)
8202 ));
8203 let event = diff_events.next().await.unwrap();
8204 if let BufferDiffEvent::DiffChanged(DiffChanged {
8205 changed_range: Some(changed_range),
8206 base_text_changed_range: _,
8207 extended_range: _,
8208 }) = event
8209 {
8210 let changed_range = changed_range.to_point(&snapshot);
8211 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8212 } else {
8213 panic!("Unexpected event {event:?}");
8214 }
8215
8216 // When the write fails, the hunk returns to being unstaged.
8217 cx.run_until_parked();
8218 uncommitted_diff.update(cx, |diff, cx| {
8219 assert_hunks(
8220 diff.snapshot(cx).hunks(&snapshot),
8221 &snapshot,
8222 &diff.base_text_string(cx).unwrap(),
8223 &[
8224 (
8225 0..0,
8226 "zero\n",
8227 "",
8228 DiffHunkStatus::deleted(HasSecondaryHunk),
8229 ),
8230 (
8231 1..2,
8232 "two\n",
8233 "TWO\n",
8234 DiffHunkStatus::modified(NoSecondaryHunk),
8235 ),
8236 (
8237 3..4,
8238 "four\n",
8239 "FOUR\n",
8240 DiffHunkStatus::modified(HasSecondaryHunk),
8241 ),
8242 ],
8243 );
8244 });
8245
8246 let event = diff_events.next().await.unwrap();
8247 if let BufferDiffEvent::DiffChanged(DiffChanged {
8248 changed_range: Some(changed_range),
8249 base_text_changed_range: _,
8250 extended_range: _,
8251 }) = event
8252 {
8253 let changed_range = changed_range.to_point(&snapshot);
8254 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8255 } else {
8256 panic!("Unexpected event {event:?}");
8257 }
8258
8259 // Allow writing to the git index to succeed again.
8260 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8261
8262 // Stage two hunks with separate operations.
8263 uncommitted_diff.update(cx, |diff, cx| {
8264 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8265 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8266 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8267 });
8268
8269 // Both staged hunks appear as pending.
8270 uncommitted_diff.update(cx, |diff, cx| {
8271 assert_hunks(
8272 diff.snapshot(cx).hunks(&snapshot),
8273 &snapshot,
8274 &diff.base_text_string(cx).unwrap(),
8275 &[
8276 (
8277 0..0,
8278 "zero\n",
8279 "",
8280 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8281 ),
8282 (
8283 1..2,
8284 "two\n",
8285 "TWO\n",
8286 DiffHunkStatus::modified(NoSecondaryHunk),
8287 ),
8288 (
8289 3..4,
8290 "four\n",
8291 "FOUR\n",
8292 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8293 ),
8294 ],
8295 );
8296 });
8297
8298 // Both staging operations take effect.
8299 cx.run_until_parked();
8300 uncommitted_diff.update(cx, |diff, cx| {
8301 assert_hunks(
8302 diff.snapshot(cx).hunks(&snapshot),
8303 &snapshot,
8304 &diff.base_text_string(cx).unwrap(),
8305 &[
8306 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8307 (
8308 1..2,
8309 "two\n",
8310 "TWO\n",
8311 DiffHunkStatus::modified(NoSecondaryHunk),
8312 ),
8313 (
8314 3..4,
8315 "four\n",
8316 "FOUR\n",
8317 DiffHunkStatus::modified(NoSecondaryHunk),
8318 ),
8319 ],
8320 );
8321 });
8322}
8323
8324#[gpui::test(seeds(340, 472))]
8325async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8326 use DiffHunkSecondaryStatus::*;
8327 init_test(cx);
8328
8329 let committed_contents = r#"
8330 zero
8331 one
8332 two
8333 three
8334 four
8335 five
8336 "#
8337 .unindent();
8338 let file_contents = r#"
8339 one
8340 TWO
8341 three
8342 FOUR
8343 five
8344 "#
8345 .unindent();
8346
8347 let fs = FakeFs::new(cx.background_executor.clone());
8348 fs.insert_tree(
8349 "/dir",
8350 json!({
8351 ".git": {},
8352 "file.txt": file_contents.clone()
8353 }),
8354 )
8355 .await;
8356
8357 fs.set_head_for_repo(
8358 "/dir/.git".as_ref(),
8359 &[("file.txt", committed_contents.clone())],
8360 "deadbeef",
8361 );
8362 fs.set_index_for_repo(
8363 "/dir/.git".as_ref(),
8364 &[("file.txt", committed_contents.clone())],
8365 );
8366
8367 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8368
8369 let buffer = project
8370 .update(cx, |project, cx| {
8371 project.open_local_buffer("/dir/file.txt", cx)
8372 })
8373 .await
8374 .unwrap();
8375 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8376 let uncommitted_diff = project
8377 .update(cx, |project, cx| {
8378 project.open_uncommitted_diff(buffer.clone(), cx)
8379 })
8380 .await
8381 .unwrap();
8382
8383 // The hunks are initially unstaged.
8384 uncommitted_diff.read_with(cx, |diff, cx| {
8385 assert_hunks(
8386 diff.snapshot(cx).hunks(&snapshot),
8387 &snapshot,
8388 &diff.base_text_string(cx).unwrap(),
8389 &[
8390 (
8391 0..0,
8392 "zero\n",
8393 "",
8394 DiffHunkStatus::deleted(HasSecondaryHunk),
8395 ),
8396 (
8397 1..2,
8398 "two\n",
8399 "TWO\n",
8400 DiffHunkStatus::modified(HasSecondaryHunk),
8401 ),
8402 (
8403 3..4,
8404 "four\n",
8405 "FOUR\n",
8406 DiffHunkStatus::modified(HasSecondaryHunk),
8407 ),
8408 ],
8409 );
8410 });
8411
8412 // Pause IO events
8413 fs.pause_events();
8414
8415 // Stage the first hunk.
8416 uncommitted_diff.update(cx, |diff, cx| {
8417 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8418 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8419 assert_hunks(
8420 diff.snapshot(cx).hunks(&snapshot),
8421 &snapshot,
8422 &diff.base_text_string(cx).unwrap(),
8423 &[
8424 (
8425 0..0,
8426 "zero\n",
8427 "",
8428 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8429 ),
8430 (
8431 1..2,
8432 "two\n",
8433 "TWO\n",
8434 DiffHunkStatus::modified(HasSecondaryHunk),
8435 ),
8436 (
8437 3..4,
8438 "four\n",
8439 "FOUR\n",
8440 DiffHunkStatus::modified(HasSecondaryHunk),
8441 ),
8442 ],
8443 );
8444 });
8445
8446 // Stage the second hunk *before* receiving the FS event for the first hunk.
8447 cx.run_until_parked();
8448 uncommitted_diff.update(cx, |diff, cx| {
8449 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8450 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8451 assert_hunks(
8452 diff.snapshot(cx).hunks(&snapshot),
8453 &snapshot,
8454 &diff.base_text_string(cx).unwrap(),
8455 &[
8456 (
8457 0..0,
8458 "zero\n",
8459 "",
8460 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8461 ),
8462 (
8463 1..2,
8464 "two\n",
8465 "TWO\n",
8466 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8467 ),
8468 (
8469 3..4,
8470 "four\n",
8471 "FOUR\n",
8472 DiffHunkStatus::modified(HasSecondaryHunk),
8473 ),
8474 ],
8475 );
8476 });
8477
8478 // Process the FS event for staging the first hunk (second event is still pending).
8479 fs.flush_events(1);
8480 cx.run_until_parked();
8481
8482 // Stage the third hunk before receiving the second FS event.
8483 uncommitted_diff.update(cx, |diff, cx| {
8484 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8485 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8486 });
8487
8488 // Wait for all remaining IO.
8489 cx.run_until_parked();
8490 fs.flush_events(fs.buffered_event_count());
8491
8492 // Now all hunks are staged.
8493 cx.run_until_parked();
8494 uncommitted_diff.update(cx, |diff, cx| {
8495 assert_hunks(
8496 diff.snapshot(cx).hunks(&snapshot),
8497 &snapshot,
8498 &diff.base_text_string(cx).unwrap(),
8499 &[
8500 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8501 (
8502 1..2,
8503 "two\n",
8504 "TWO\n",
8505 DiffHunkStatus::modified(NoSecondaryHunk),
8506 ),
8507 (
8508 3..4,
8509 "four\n",
8510 "FOUR\n",
8511 DiffHunkStatus::modified(NoSecondaryHunk),
8512 ),
8513 ],
8514 );
8515 });
8516}
8517
8518#[gpui::test(iterations = 25)]
8519async fn test_staging_random_hunks(
8520 mut rng: StdRng,
8521 _executor: BackgroundExecutor,
8522 cx: &mut gpui::TestAppContext,
8523) {
8524 let operations = env::var("OPERATIONS")
8525 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8526 .unwrap_or(20);
8527
8528 use DiffHunkSecondaryStatus::*;
8529 init_test(cx);
8530
8531 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8532 let index_text = committed_text.clone();
8533 let buffer_text = (0..30)
8534 .map(|i| match i % 5 {
8535 0 => format!("line {i} (modified)\n"),
8536 _ => format!("line {i}\n"),
8537 })
8538 .collect::<String>();
8539
8540 let fs = FakeFs::new(cx.background_executor.clone());
8541 fs.insert_tree(
8542 path!("/dir"),
8543 json!({
8544 ".git": {},
8545 "file.txt": buffer_text.clone()
8546 }),
8547 )
8548 .await;
8549 fs.set_head_for_repo(
8550 path!("/dir/.git").as_ref(),
8551 &[("file.txt", committed_text.clone())],
8552 "deadbeef",
8553 );
8554 fs.set_index_for_repo(
8555 path!("/dir/.git").as_ref(),
8556 &[("file.txt", index_text.clone())],
8557 );
8558 let repo = fs
8559 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8560 .unwrap();
8561
8562 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8563 let buffer = project
8564 .update(cx, |project, cx| {
8565 project.open_local_buffer(path!("/dir/file.txt"), cx)
8566 })
8567 .await
8568 .unwrap();
8569 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8570 let uncommitted_diff = project
8571 .update(cx, |project, cx| {
8572 project.open_uncommitted_diff(buffer.clone(), cx)
8573 })
8574 .await
8575 .unwrap();
8576
8577 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8578 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8579 });
8580 assert_eq!(hunks.len(), 6);
8581
8582 for _i in 0..operations {
8583 let hunk_ix = rng.random_range(0..hunks.len());
8584 let hunk = &mut hunks[hunk_ix];
8585 let row = hunk.range.start.row;
8586
8587 if hunk.status().has_secondary_hunk() {
8588 log::info!("staging hunk at {row}");
8589 uncommitted_diff.update(cx, |diff, cx| {
8590 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8591 });
8592 hunk.secondary_status = SecondaryHunkRemovalPending;
8593 } else {
8594 log::info!("unstaging hunk at {row}");
8595 uncommitted_diff.update(cx, |diff, cx| {
8596 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8597 });
8598 hunk.secondary_status = SecondaryHunkAdditionPending;
8599 }
8600
8601 for _ in 0..rng.random_range(0..10) {
8602 log::info!("yielding");
8603 cx.executor().simulate_random_delay().await;
8604 }
8605 }
8606
8607 cx.executor().run_until_parked();
8608
8609 for hunk in &mut hunks {
8610 if hunk.secondary_status == SecondaryHunkRemovalPending {
8611 hunk.secondary_status = NoSecondaryHunk;
8612 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8613 hunk.secondary_status = HasSecondaryHunk;
8614 }
8615 }
8616
8617 log::info!(
8618 "index text:\n{}",
8619 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8620 .await
8621 .unwrap()
8622 );
8623
8624 uncommitted_diff.update(cx, |diff, cx| {
8625 let expected_hunks = hunks
8626 .iter()
8627 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8628 .collect::<Vec<_>>();
8629 let actual_hunks = diff
8630 .snapshot(cx)
8631 .hunks(&snapshot)
8632 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8633 .collect::<Vec<_>>();
8634 assert_eq!(actual_hunks, expected_hunks);
8635 });
8636}
8637
8638#[gpui::test]
8639async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8640 init_test(cx);
8641
8642 let committed_contents = r#"
8643 fn main() {
8644 println!("hello from HEAD");
8645 }
8646 "#
8647 .unindent();
8648 let file_contents = r#"
8649 fn main() {
8650 println!("hello from the working copy");
8651 }
8652 "#
8653 .unindent();
8654
8655 let fs = FakeFs::new(cx.background_executor.clone());
8656 fs.insert_tree(
8657 "/dir",
8658 json!({
8659 ".git": {},
8660 "src": {
8661 "main.rs": file_contents,
8662 }
8663 }),
8664 )
8665 .await;
8666
8667 fs.set_head_for_repo(
8668 Path::new("/dir/.git"),
8669 &[("src/main.rs", committed_contents.clone())],
8670 "deadbeef",
8671 );
8672 fs.set_index_for_repo(
8673 Path::new("/dir/.git"),
8674 &[("src/main.rs", committed_contents.clone())],
8675 );
8676
8677 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8678
8679 let buffer = project
8680 .update(cx, |project, cx| {
8681 project.open_local_buffer("/dir/src/main.rs", cx)
8682 })
8683 .await
8684 .unwrap();
8685 let uncommitted_diff = project
8686 .update(cx, |project, cx| {
8687 project.open_uncommitted_diff(buffer.clone(), cx)
8688 })
8689 .await
8690 .unwrap();
8691
8692 cx.run_until_parked();
8693 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8694 let snapshot = buffer.read(cx).snapshot();
8695 assert_hunks(
8696 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8697 &snapshot,
8698 &uncommitted_diff.base_text_string(cx).unwrap(),
8699 &[(
8700 1..2,
8701 " println!(\"hello from HEAD\");\n",
8702 " println!(\"hello from the working copy\");\n",
8703 DiffHunkStatus {
8704 kind: DiffHunkStatusKind::Modified,
8705 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8706 },
8707 )],
8708 );
8709 });
8710}
8711
8712// TODO: Should we test this on Windows also?
8713#[gpui::test]
8714#[cfg(not(windows))]
8715async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8716 use std::os::unix::fs::PermissionsExt;
8717 init_test(cx);
8718 cx.executor().allow_parking();
8719 let committed_contents = "bar\n";
8720 let file_contents = "baz\n";
8721 let root = TempTree::new(json!({
8722 "project": {
8723 "foo": committed_contents
8724 },
8725 }));
8726
8727 let work_dir = root.path().join("project");
8728 let file_path = work_dir.join("foo");
8729 let repo = git_init(work_dir.as_path());
8730 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8731 perms.set_mode(0o755);
8732 std::fs::set_permissions(&file_path, perms).unwrap();
8733 git_add("foo", &repo);
8734 git_commit("Initial commit", &repo);
8735 std::fs::write(&file_path, file_contents).unwrap();
8736
8737 let project = Project::test(
8738 Arc::new(RealFs::new(None, cx.executor())),
8739 [root.path()],
8740 cx,
8741 )
8742 .await;
8743
8744 let buffer = project
8745 .update(cx, |project, cx| {
8746 project.open_local_buffer(file_path.as_path(), cx)
8747 })
8748 .await
8749 .unwrap();
8750
8751 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8752
8753 let uncommitted_diff = project
8754 .update(cx, |project, cx| {
8755 project.open_uncommitted_diff(buffer.clone(), cx)
8756 })
8757 .await
8758 .unwrap();
8759
8760 uncommitted_diff.update(cx, |diff, cx| {
8761 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8762 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8763 });
8764
8765 cx.run_until_parked();
8766
8767 let output = smol::process::Command::new("git")
8768 .current_dir(&work_dir)
8769 .args(["diff", "--staged"])
8770 .output()
8771 .await
8772 .unwrap();
8773
8774 let staged_diff = String::from_utf8_lossy(&output.stdout);
8775
8776 assert!(
8777 !staged_diff.contains("new mode 100644"),
8778 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8779 staged_diff
8780 );
8781
8782 let output = smol::process::Command::new("git")
8783 .current_dir(&work_dir)
8784 .args(["ls-files", "-s"])
8785 .output()
8786 .await
8787 .unwrap();
8788 let index_contents = String::from_utf8_lossy(&output.stdout);
8789
8790 assert!(
8791 index_contents.contains("100755"),
8792 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8793 index_contents
8794 );
8795}
8796
8797#[gpui::test]
8798async fn test_repository_and_path_for_project_path(
8799 background_executor: BackgroundExecutor,
8800 cx: &mut gpui::TestAppContext,
8801) {
8802 init_test(cx);
8803 let fs = FakeFs::new(background_executor);
8804 fs.insert_tree(
8805 path!("/root"),
8806 json!({
8807 "c.txt": "",
8808 "dir1": {
8809 ".git": {},
8810 "deps": {
8811 "dep1": {
8812 ".git": {},
8813 "src": {
8814 "a.txt": ""
8815 }
8816 }
8817 },
8818 "src": {
8819 "b.txt": ""
8820 }
8821 },
8822 }),
8823 )
8824 .await;
8825
8826 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8827 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8828 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8829 project
8830 .update(cx, |project, cx| project.git_scans_complete(cx))
8831 .await;
8832 cx.run_until_parked();
8833
8834 project.read_with(cx, |project, cx| {
8835 let git_store = project.git_store().read(cx);
8836 let pairs = [
8837 ("c.txt", None),
8838 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8839 (
8840 "dir1/deps/dep1/src/a.txt",
8841 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8842 ),
8843 ];
8844 let expected = pairs
8845 .iter()
8846 .map(|(path, result)| {
8847 (
8848 path,
8849 result.map(|(repo, repo_path)| {
8850 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8851 }),
8852 )
8853 })
8854 .collect::<Vec<_>>();
8855 let actual = pairs
8856 .iter()
8857 .map(|(path, _)| {
8858 let project_path = (tree_id, rel_path(path)).into();
8859 let result = maybe!({
8860 let (repo, repo_path) =
8861 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8862 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8863 });
8864 (path, result)
8865 })
8866 .collect::<Vec<_>>();
8867 pretty_assertions::assert_eq!(expected, actual);
8868 });
8869
8870 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8871 .await
8872 .unwrap();
8873 cx.run_until_parked();
8874
8875 project.read_with(cx, |project, cx| {
8876 let git_store = project.git_store().read(cx);
8877 assert_eq!(
8878 git_store.repository_and_path_for_project_path(
8879 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8880 cx
8881 ),
8882 None
8883 );
8884 });
8885}
8886
8887#[gpui::test]
8888async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8889 init_test(cx);
8890 let fs = FakeFs::new(cx.background_executor.clone());
8891 let home = paths::home_dir();
8892 fs.insert_tree(
8893 home,
8894 json!({
8895 ".git": {},
8896 "project": {
8897 "a.txt": "A"
8898 },
8899 }),
8900 )
8901 .await;
8902
8903 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8904 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8905 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8906
8907 project
8908 .update(cx, |project, cx| project.git_scans_complete(cx))
8909 .await;
8910 tree.flush_fs_events(cx).await;
8911
8912 project.read_with(cx, |project, cx| {
8913 let containing = project
8914 .git_store()
8915 .read(cx)
8916 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8917 assert!(containing.is_none());
8918 });
8919
8920 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8921 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8922 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8923 project
8924 .update(cx, |project, cx| project.git_scans_complete(cx))
8925 .await;
8926 tree.flush_fs_events(cx).await;
8927
8928 project.read_with(cx, |project, cx| {
8929 let containing = project
8930 .git_store()
8931 .read(cx)
8932 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8933 assert_eq!(
8934 containing
8935 .unwrap()
8936 .0
8937 .read(cx)
8938 .work_directory_abs_path
8939 .as_ref(),
8940 home,
8941 );
8942 });
8943}
8944
8945#[gpui::test]
8946async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8947 init_test(cx);
8948 cx.executor().allow_parking();
8949
8950 let root = TempTree::new(json!({
8951 "project": {
8952 "a.txt": "a", // Modified
8953 "b.txt": "bb", // Added
8954 "c.txt": "ccc", // Unchanged
8955 "d.txt": "dddd", // Deleted
8956 },
8957 }));
8958
8959 // Set up git repository before creating the project.
8960 let work_dir = root.path().join("project");
8961 let repo = git_init(work_dir.as_path());
8962 git_add("a.txt", &repo);
8963 git_add("c.txt", &repo);
8964 git_add("d.txt", &repo);
8965 git_commit("Initial commit", &repo);
8966 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8967 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8968
8969 let project = Project::test(
8970 Arc::new(RealFs::new(None, cx.executor())),
8971 [root.path()],
8972 cx,
8973 )
8974 .await;
8975
8976 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8977 tree.flush_fs_events(cx).await;
8978 project
8979 .update(cx, |project, cx| project.git_scans_complete(cx))
8980 .await;
8981 cx.executor().run_until_parked();
8982
8983 let repository = project.read_with(cx, |project, cx| {
8984 project.repositories(cx).values().next().unwrap().clone()
8985 });
8986
8987 // Check that the right git state is observed on startup
8988 repository.read_with(cx, |repository, _| {
8989 let entries = repository.cached_status().collect::<Vec<_>>();
8990 assert_eq!(
8991 entries,
8992 [
8993 StatusEntry {
8994 repo_path: repo_path("a.txt"),
8995 status: StatusCode::Modified.worktree(),
8996 },
8997 StatusEntry {
8998 repo_path: repo_path("b.txt"),
8999 status: FileStatus::Untracked,
9000 },
9001 StatusEntry {
9002 repo_path: repo_path("d.txt"),
9003 status: StatusCode::Deleted.worktree(),
9004 },
9005 ]
9006 );
9007 });
9008
9009 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9010
9011 tree.flush_fs_events(cx).await;
9012 project
9013 .update(cx, |project, cx| project.git_scans_complete(cx))
9014 .await;
9015 cx.executor().run_until_parked();
9016
9017 repository.read_with(cx, |repository, _| {
9018 let entries = repository.cached_status().collect::<Vec<_>>();
9019 assert_eq!(
9020 entries,
9021 [
9022 StatusEntry {
9023 repo_path: repo_path("a.txt"),
9024 status: StatusCode::Modified.worktree(),
9025 },
9026 StatusEntry {
9027 repo_path: repo_path("b.txt"),
9028 status: FileStatus::Untracked,
9029 },
9030 StatusEntry {
9031 repo_path: repo_path("c.txt"),
9032 status: StatusCode::Modified.worktree(),
9033 },
9034 StatusEntry {
9035 repo_path: repo_path("d.txt"),
9036 status: StatusCode::Deleted.worktree(),
9037 },
9038 ]
9039 );
9040 });
9041
9042 git_add("a.txt", &repo);
9043 git_add("c.txt", &repo);
9044 git_remove_index(Path::new("d.txt"), &repo);
9045 git_commit("Another commit", &repo);
9046 tree.flush_fs_events(cx).await;
9047 project
9048 .update(cx, |project, cx| project.git_scans_complete(cx))
9049 .await;
9050 cx.executor().run_until_parked();
9051
9052 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9053 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9054 tree.flush_fs_events(cx).await;
9055 project
9056 .update(cx, |project, cx| project.git_scans_complete(cx))
9057 .await;
9058 cx.executor().run_until_parked();
9059
9060 repository.read_with(cx, |repository, _cx| {
9061 let entries = repository.cached_status().collect::<Vec<_>>();
9062
9063 // Deleting an untracked entry, b.txt, should leave no status
9064 // a.txt was tracked, and so should have a status
9065 assert_eq!(
9066 entries,
9067 [StatusEntry {
9068 repo_path: repo_path("a.txt"),
9069 status: StatusCode::Deleted.worktree(),
9070 }]
9071 );
9072 });
9073}
9074
9075#[gpui::test]
9076#[ignore]
9077async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9078 init_test(cx);
9079 cx.executor().allow_parking();
9080
9081 let root = TempTree::new(json!({
9082 "project": {
9083 "sub": {},
9084 "a.txt": "",
9085 },
9086 }));
9087
9088 let work_dir = root.path().join("project");
9089 let repo = git_init(work_dir.as_path());
9090 // a.txt exists in HEAD and the working copy but is deleted in the index.
9091 git_add("a.txt", &repo);
9092 git_commit("Initial commit", &repo);
9093 git_remove_index("a.txt".as_ref(), &repo);
9094 // `sub` is a nested git repository.
9095 let _sub = git_init(&work_dir.join("sub"));
9096
9097 let project = Project::test(
9098 Arc::new(RealFs::new(None, cx.executor())),
9099 [root.path()],
9100 cx,
9101 )
9102 .await;
9103
9104 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9105 tree.flush_fs_events(cx).await;
9106 project
9107 .update(cx, |project, cx| project.git_scans_complete(cx))
9108 .await;
9109 cx.executor().run_until_parked();
9110
9111 let repository = project.read_with(cx, |project, cx| {
9112 project
9113 .repositories(cx)
9114 .values()
9115 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9116 .unwrap()
9117 .clone()
9118 });
9119
9120 repository.read_with(cx, |repository, _cx| {
9121 let entries = repository.cached_status().collect::<Vec<_>>();
9122
9123 // `sub` doesn't appear in our computed statuses.
9124 // a.txt appears with a combined `DA` status.
9125 assert_eq!(
9126 entries,
9127 [StatusEntry {
9128 repo_path: repo_path("a.txt"),
9129 status: TrackedStatus {
9130 index_status: StatusCode::Deleted,
9131 worktree_status: StatusCode::Added
9132 }
9133 .into(),
9134 }]
9135 )
9136 });
9137}
9138
9139#[track_caller]
9140/// We merge lhs into rhs.
9141fn merge_pending_ops_snapshots(
9142 source: Vec<pending_op::PendingOps>,
9143 mut target: Vec<pending_op::PendingOps>,
9144) -> Vec<pending_op::PendingOps> {
9145 for s_ops in source {
9146 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9147 if ops.repo_path == s_ops.repo_path {
9148 Some(idx)
9149 } else {
9150 None
9151 }
9152 }) {
9153 let t_ops = &mut target[idx];
9154 for s_op in s_ops.ops {
9155 if let Some(op_idx) = t_ops
9156 .ops
9157 .iter()
9158 .zip(0..)
9159 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9160 {
9161 let t_op = &mut t_ops.ops[op_idx];
9162 match (s_op.job_status, t_op.job_status) {
9163 (pending_op::JobStatus::Running, _) => {}
9164 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9165 (s_st, t_st) if s_st == t_st => {}
9166 _ => unreachable!(),
9167 }
9168 } else {
9169 t_ops.ops.push(s_op);
9170 }
9171 }
9172 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9173 } else {
9174 target.push(s_ops);
9175 }
9176 }
9177 target
9178}
9179
9180#[gpui::test]
9181async fn test_repository_pending_ops_staging(
9182 executor: gpui::BackgroundExecutor,
9183 cx: &mut gpui::TestAppContext,
9184) {
9185 init_test(cx);
9186
9187 let fs = FakeFs::new(executor);
9188 fs.insert_tree(
9189 path!("/root"),
9190 json!({
9191 "my-repo": {
9192 ".git": {},
9193 "a.txt": "a",
9194 }
9195
9196 }),
9197 )
9198 .await;
9199
9200 fs.set_status_for_repo(
9201 path!("/root/my-repo/.git").as_ref(),
9202 &[("a.txt", FileStatus::Untracked)],
9203 );
9204
9205 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9206 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9207 project.update(cx, |project, cx| {
9208 let pending_ops_all = pending_ops_all.clone();
9209 cx.subscribe(project.git_store(), move |_, _, e, _| {
9210 if let GitStoreEvent::RepositoryUpdated(
9211 _,
9212 RepositoryEvent::PendingOpsChanged { pending_ops },
9213 _,
9214 ) = e
9215 {
9216 let merged = merge_pending_ops_snapshots(
9217 pending_ops.items(()),
9218 pending_ops_all.lock().items(()),
9219 );
9220 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9221 }
9222 })
9223 .detach();
9224 });
9225 project
9226 .update(cx, |project, cx| project.git_scans_complete(cx))
9227 .await;
9228
9229 let repo = project.read_with(cx, |project, cx| {
9230 project.repositories(cx).values().next().unwrap().clone()
9231 });
9232
9233 // Ensure we have no pending ops for any of the untracked files
9234 repo.read_with(cx, |repo, _cx| {
9235 assert!(repo.pending_ops().next().is_none());
9236 });
9237
9238 let mut id = 1u16;
9239
9240 let mut assert_stage = async |path: RepoPath, stage| {
9241 let git_status = if stage {
9242 pending_op::GitStatus::Staged
9243 } else {
9244 pending_op::GitStatus::Unstaged
9245 };
9246 repo.update(cx, |repo, cx| {
9247 let task = if stage {
9248 repo.stage_entries(vec![path.clone()], cx)
9249 } else {
9250 repo.unstage_entries(vec![path.clone()], cx)
9251 };
9252 let ops = repo.pending_ops_for_path(&path).unwrap();
9253 assert_eq!(
9254 ops.ops.last(),
9255 Some(&pending_op::PendingOp {
9256 id: id.into(),
9257 git_status,
9258 job_status: pending_op::JobStatus::Running
9259 })
9260 );
9261 task
9262 })
9263 .await
9264 .unwrap();
9265
9266 repo.read_with(cx, |repo, _cx| {
9267 let ops = repo.pending_ops_for_path(&path).unwrap();
9268 assert_eq!(
9269 ops.ops.last(),
9270 Some(&pending_op::PendingOp {
9271 id: id.into(),
9272 git_status,
9273 job_status: pending_op::JobStatus::Finished
9274 })
9275 );
9276 });
9277
9278 id += 1;
9279 };
9280
9281 assert_stage(repo_path("a.txt"), true).await;
9282 assert_stage(repo_path("a.txt"), false).await;
9283 assert_stage(repo_path("a.txt"), true).await;
9284 assert_stage(repo_path("a.txt"), false).await;
9285 assert_stage(repo_path("a.txt"), true).await;
9286
9287 cx.run_until_parked();
9288
9289 assert_eq!(
9290 pending_ops_all
9291 .lock()
9292 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9293 .unwrap()
9294 .ops,
9295 vec![
9296 pending_op::PendingOp {
9297 id: 1u16.into(),
9298 git_status: pending_op::GitStatus::Staged,
9299 job_status: pending_op::JobStatus::Finished
9300 },
9301 pending_op::PendingOp {
9302 id: 2u16.into(),
9303 git_status: pending_op::GitStatus::Unstaged,
9304 job_status: pending_op::JobStatus::Finished
9305 },
9306 pending_op::PendingOp {
9307 id: 3u16.into(),
9308 git_status: pending_op::GitStatus::Staged,
9309 job_status: pending_op::JobStatus::Finished
9310 },
9311 pending_op::PendingOp {
9312 id: 4u16.into(),
9313 git_status: pending_op::GitStatus::Unstaged,
9314 job_status: pending_op::JobStatus::Finished
9315 },
9316 pending_op::PendingOp {
9317 id: 5u16.into(),
9318 git_status: pending_op::GitStatus::Staged,
9319 job_status: pending_op::JobStatus::Finished
9320 }
9321 ],
9322 );
9323
9324 repo.update(cx, |repo, _cx| {
9325 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9326
9327 assert_eq!(
9328 git_statuses,
9329 [StatusEntry {
9330 repo_path: repo_path("a.txt"),
9331 status: TrackedStatus {
9332 index_status: StatusCode::Added,
9333 worktree_status: StatusCode::Unmodified
9334 }
9335 .into(),
9336 }]
9337 );
9338 });
9339}
9340
9341#[gpui::test]
9342async fn test_repository_pending_ops_long_running_staging(
9343 executor: gpui::BackgroundExecutor,
9344 cx: &mut gpui::TestAppContext,
9345) {
9346 init_test(cx);
9347
9348 let fs = FakeFs::new(executor);
9349 fs.insert_tree(
9350 path!("/root"),
9351 json!({
9352 "my-repo": {
9353 ".git": {},
9354 "a.txt": "a",
9355 }
9356
9357 }),
9358 )
9359 .await;
9360
9361 fs.set_status_for_repo(
9362 path!("/root/my-repo/.git").as_ref(),
9363 &[("a.txt", FileStatus::Untracked)],
9364 );
9365
9366 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9367 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9368 project.update(cx, |project, cx| {
9369 let pending_ops_all = pending_ops_all.clone();
9370 cx.subscribe(project.git_store(), move |_, _, e, _| {
9371 if let GitStoreEvent::RepositoryUpdated(
9372 _,
9373 RepositoryEvent::PendingOpsChanged { pending_ops },
9374 _,
9375 ) = e
9376 {
9377 let merged = merge_pending_ops_snapshots(
9378 pending_ops.items(()),
9379 pending_ops_all.lock().items(()),
9380 );
9381 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9382 }
9383 })
9384 .detach();
9385 });
9386
9387 project
9388 .update(cx, |project, cx| project.git_scans_complete(cx))
9389 .await;
9390
9391 let repo = project.read_with(cx, |project, cx| {
9392 project.repositories(cx).values().next().unwrap().clone()
9393 });
9394
9395 repo.update(cx, |repo, cx| {
9396 repo.stage_entries(vec![repo_path("a.txt")], cx)
9397 })
9398 .detach();
9399
9400 repo.update(cx, |repo, cx| {
9401 repo.stage_entries(vec![repo_path("a.txt")], cx)
9402 })
9403 .unwrap()
9404 .with_timeout(Duration::from_secs(1), &cx.executor())
9405 .await
9406 .unwrap();
9407
9408 cx.run_until_parked();
9409
9410 assert_eq!(
9411 pending_ops_all
9412 .lock()
9413 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9414 .unwrap()
9415 .ops,
9416 vec![
9417 pending_op::PendingOp {
9418 id: 1u16.into(),
9419 git_status: pending_op::GitStatus::Staged,
9420 job_status: pending_op::JobStatus::Skipped
9421 },
9422 pending_op::PendingOp {
9423 id: 2u16.into(),
9424 git_status: pending_op::GitStatus::Staged,
9425 job_status: pending_op::JobStatus::Finished
9426 }
9427 ],
9428 );
9429
9430 repo.update(cx, |repo, _cx| {
9431 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9432
9433 assert_eq!(
9434 git_statuses,
9435 [StatusEntry {
9436 repo_path: repo_path("a.txt"),
9437 status: TrackedStatus {
9438 index_status: StatusCode::Added,
9439 worktree_status: StatusCode::Unmodified
9440 }
9441 .into(),
9442 }]
9443 );
9444 });
9445}
9446
9447#[gpui::test]
9448async fn test_repository_pending_ops_stage_all(
9449 executor: gpui::BackgroundExecutor,
9450 cx: &mut gpui::TestAppContext,
9451) {
9452 init_test(cx);
9453
9454 let fs = FakeFs::new(executor);
9455 fs.insert_tree(
9456 path!("/root"),
9457 json!({
9458 "my-repo": {
9459 ".git": {},
9460 "a.txt": "a",
9461 "b.txt": "b"
9462 }
9463
9464 }),
9465 )
9466 .await;
9467
9468 fs.set_status_for_repo(
9469 path!("/root/my-repo/.git").as_ref(),
9470 &[
9471 ("a.txt", FileStatus::Untracked),
9472 ("b.txt", FileStatus::Untracked),
9473 ],
9474 );
9475
9476 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9477 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9478 project.update(cx, |project, cx| {
9479 let pending_ops_all = pending_ops_all.clone();
9480 cx.subscribe(project.git_store(), move |_, _, e, _| {
9481 if let GitStoreEvent::RepositoryUpdated(
9482 _,
9483 RepositoryEvent::PendingOpsChanged { pending_ops },
9484 _,
9485 ) = e
9486 {
9487 let merged = merge_pending_ops_snapshots(
9488 pending_ops.items(()),
9489 pending_ops_all.lock().items(()),
9490 );
9491 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9492 }
9493 })
9494 .detach();
9495 });
9496 project
9497 .update(cx, |project, cx| project.git_scans_complete(cx))
9498 .await;
9499
9500 let repo = project.read_with(cx, |project, cx| {
9501 project.repositories(cx).values().next().unwrap().clone()
9502 });
9503
9504 repo.update(cx, |repo, cx| {
9505 repo.stage_entries(vec![repo_path("a.txt")], cx)
9506 })
9507 .await
9508 .unwrap();
9509 repo.update(cx, |repo, cx| repo.stage_all(cx))
9510 .await
9511 .unwrap();
9512 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9513 .await
9514 .unwrap();
9515
9516 cx.run_until_parked();
9517
9518 assert_eq!(
9519 pending_ops_all
9520 .lock()
9521 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9522 .unwrap()
9523 .ops,
9524 vec![
9525 pending_op::PendingOp {
9526 id: 1u16.into(),
9527 git_status: pending_op::GitStatus::Staged,
9528 job_status: pending_op::JobStatus::Finished
9529 },
9530 pending_op::PendingOp {
9531 id: 2u16.into(),
9532 git_status: pending_op::GitStatus::Unstaged,
9533 job_status: pending_op::JobStatus::Finished
9534 },
9535 ],
9536 );
9537 assert_eq!(
9538 pending_ops_all
9539 .lock()
9540 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9541 .unwrap()
9542 .ops,
9543 vec![
9544 pending_op::PendingOp {
9545 id: 1u16.into(),
9546 git_status: pending_op::GitStatus::Staged,
9547 job_status: pending_op::JobStatus::Finished
9548 },
9549 pending_op::PendingOp {
9550 id: 2u16.into(),
9551 git_status: pending_op::GitStatus::Unstaged,
9552 job_status: pending_op::JobStatus::Finished
9553 },
9554 ],
9555 );
9556
9557 repo.update(cx, |repo, _cx| {
9558 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9559
9560 assert_eq!(
9561 git_statuses,
9562 [
9563 StatusEntry {
9564 repo_path: repo_path("a.txt"),
9565 status: FileStatus::Untracked,
9566 },
9567 StatusEntry {
9568 repo_path: repo_path("b.txt"),
9569 status: FileStatus::Untracked,
9570 },
9571 ]
9572 );
9573 });
9574}
9575
9576#[gpui::test]
9577async fn test_repository_subfolder_git_status(
9578 executor: gpui::BackgroundExecutor,
9579 cx: &mut gpui::TestAppContext,
9580) {
9581 init_test(cx);
9582
9583 let fs = FakeFs::new(executor);
9584 fs.insert_tree(
9585 path!("/root"),
9586 json!({
9587 "my-repo": {
9588 ".git": {},
9589 "a.txt": "a",
9590 "sub-folder-1": {
9591 "sub-folder-2": {
9592 "c.txt": "cc",
9593 "d": {
9594 "e.txt": "eee"
9595 }
9596 },
9597 }
9598 },
9599 }),
9600 )
9601 .await;
9602
9603 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9604 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9605
9606 fs.set_status_for_repo(
9607 path!("/root/my-repo/.git").as_ref(),
9608 &[(E_TXT, FileStatus::Untracked)],
9609 );
9610
9611 let project = Project::test(
9612 fs.clone(),
9613 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9614 cx,
9615 )
9616 .await;
9617
9618 project
9619 .update(cx, |project, cx| project.git_scans_complete(cx))
9620 .await;
9621 cx.run_until_parked();
9622
9623 let repository = project.read_with(cx, |project, cx| {
9624 project.repositories(cx).values().next().unwrap().clone()
9625 });
9626
9627 // Ensure that the git status is loaded correctly
9628 repository.read_with(cx, |repository, _cx| {
9629 assert_eq!(
9630 repository.work_directory_abs_path,
9631 Path::new(path!("/root/my-repo")).into()
9632 );
9633
9634 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9635 assert_eq!(
9636 repository
9637 .status_for_path(&repo_path(E_TXT))
9638 .unwrap()
9639 .status,
9640 FileStatus::Untracked
9641 );
9642 });
9643
9644 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9645 project
9646 .update(cx, |project, cx| project.git_scans_complete(cx))
9647 .await;
9648 cx.run_until_parked();
9649
9650 repository.read_with(cx, |repository, _cx| {
9651 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9652 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9653 });
9654}
9655
9656// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9657#[cfg(any())]
9658#[gpui::test]
9659async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9660 init_test(cx);
9661 cx.executor().allow_parking();
9662
9663 let root = TempTree::new(json!({
9664 "project": {
9665 "a.txt": "a",
9666 },
9667 }));
9668 let root_path = root.path();
9669
9670 let repo = git_init(&root_path.join("project"));
9671 git_add("a.txt", &repo);
9672 git_commit("init", &repo);
9673
9674 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9675
9676 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9677 tree.flush_fs_events(cx).await;
9678 project
9679 .update(cx, |project, cx| project.git_scans_complete(cx))
9680 .await;
9681 cx.executor().run_until_parked();
9682
9683 let repository = project.read_with(cx, |project, cx| {
9684 project.repositories(cx).values().next().unwrap().clone()
9685 });
9686
9687 git_branch("other-branch", &repo);
9688 git_checkout("refs/heads/other-branch", &repo);
9689 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9690 git_add("a.txt", &repo);
9691 git_commit("capitalize", &repo);
9692 let commit = repo
9693 .head()
9694 .expect("Failed to get HEAD")
9695 .peel_to_commit()
9696 .expect("HEAD is not a commit");
9697 git_checkout("refs/heads/main", &repo);
9698 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9699 git_add("a.txt", &repo);
9700 git_commit("improve letter", &repo);
9701 git_cherry_pick(&commit, &repo);
9702 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9703 .expect("No CHERRY_PICK_HEAD");
9704 pretty_assertions::assert_eq!(
9705 git_status(&repo),
9706 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9707 );
9708 tree.flush_fs_events(cx).await;
9709 project
9710 .update(cx, |project, cx| project.git_scans_complete(cx))
9711 .await;
9712 cx.executor().run_until_parked();
9713 let conflicts = repository.update(cx, |repository, _| {
9714 repository
9715 .merge_conflicts
9716 .iter()
9717 .cloned()
9718 .collect::<Vec<_>>()
9719 });
9720 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9721
9722 git_add("a.txt", &repo);
9723 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9724 git_commit("whatevs", &repo);
9725 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9726 .expect("Failed to remove CHERRY_PICK_HEAD");
9727 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9728 tree.flush_fs_events(cx).await;
9729 let conflicts = repository.update(cx, |repository, _| {
9730 repository
9731 .merge_conflicts
9732 .iter()
9733 .cloned()
9734 .collect::<Vec<_>>()
9735 });
9736 pretty_assertions::assert_eq!(conflicts, []);
9737}
9738
9739#[gpui::test]
9740async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9741 init_test(cx);
9742 let fs = FakeFs::new(cx.background_executor.clone());
9743 fs.insert_tree(
9744 path!("/root"),
9745 json!({
9746 ".git": {},
9747 ".gitignore": "*.txt\n",
9748 "a.xml": "<a></a>",
9749 "b.txt": "Some text"
9750 }),
9751 )
9752 .await;
9753
9754 fs.set_head_and_index_for_repo(
9755 path!("/root/.git").as_ref(),
9756 &[
9757 (".gitignore", "*.txt\n".into()),
9758 ("a.xml", "<a></a>".into()),
9759 ],
9760 );
9761
9762 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9763
9764 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9765 tree.flush_fs_events(cx).await;
9766 project
9767 .update(cx, |project, cx| project.git_scans_complete(cx))
9768 .await;
9769 cx.executor().run_until_parked();
9770
9771 let repository = project.read_with(cx, |project, cx| {
9772 project.repositories(cx).values().next().unwrap().clone()
9773 });
9774
9775 // One file is unmodified, the other is ignored.
9776 cx.read(|cx| {
9777 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9778 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9779 });
9780
9781 // Change the gitignore, and stage the newly non-ignored file.
9782 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9783 .await
9784 .unwrap();
9785 fs.set_index_for_repo(
9786 Path::new(path!("/root/.git")),
9787 &[
9788 (".gitignore", "*.txt\n".into()),
9789 ("a.xml", "<a></a>".into()),
9790 ("b.txt", "Some text".into()),
9791 ],
9792 );
9793
9794 cx.executor().run_until_parked();
9795 cx.read(|cx| {
9796 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9797 assert_entry_git_state(
9798 tree.read(cx),
9799 repository.read(cx),
9800 "b.txt",
9801 Some(StatusCode::Added),
9802 false,
9803 );
9804 });
9805}
9806
9807// NOTE:
9808// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9809// a directory which some program has already open.
9810// This is a limitation of the Windows.
9811// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9812// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9813#[gpui::test]
9814#[cfg_attr(target_os = "windows", ignore)]
9815async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9816 init_test(cx);
9817 cx.executor().allow_parking();
9818 let root = TempTree::new(json!({
9819 "projects": {
9820 "project1": {
9821 "a": "",
9822 "b": "",
9823 }
9824 },
9825
9826 }));
9827 let root_path = root.path();
9828
9829 let repo = git_init(&root_path.join("projects/project1"));
9830 git_add("a", &repo);
9831 git_commit("init", &repo);
9832 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9833
9834 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9835
9836 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9837 tree.flush_fs_events(cx).await;
9838 project
9839 .update(cx, |project, cx| project.git_scans_complete(cx))
9840 .await;
9841 cx.executor().run_until_parked();
9842
9843 let repository = project.read_with(cx, |project, cx| {
9844 project.repositories(cx).values().next().unwrap().clone()
9845 });
9846
9847 repository.read_with(cx, |repository, _| {
9848 assert_eq!(
9849 repository.work_directory_abs_path.as_ref(),
9850 root_path.join("projects/project1").as_path()
9851 );
9852 assert_eq!(
9853 repository
9854 .status_for_path(&repo_path("a"))
9855 .map(|entry| entry.status),
9856 Some(StatusCode::Modified.worktree()),
9857 );
9858 assert_eq!(
9859 repository
9860 .status_for_path(&repo_path("b"))
9861 .map(|entry| entry.status),
9862 Some(FileStatus::Untracked),
9863 );
9864 });
9865
9866 std::fs::rename(
9867 root_path.join("projects/project1"),
9868 root_path.join("projects/project2"),
9869 )
9870 .unwrap();
9871 tree.flush_fs_events(cx).await;
9872
9873 repository.read_with(cx, |repository, _| {
9874 assert_eq!(
9875 repository.work_directory_abs_path.as_ref(),
9876 root_path.join("projects/project2").as_path()
9877 );
9878 assert_eq!(
9879 repository.status_for_path(&repo_path("a")).unwrap().status,
9880 StatusCode::Modified.worktree(),
9881 );
9882 assert_eq!(
9883 repository.status_for_path(&repo_path("b")).unwrap().status,
9884 FileStatus::Untracked,
9885 );
9886 });
9887}
9888
9889// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9890// you can't rename a directory which some program has already open. This is a
9891// limitation of the Windows. See:
9892// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9893// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9894#[gpui::test]
9895#[cfg_attr(target_os = "windows", ignore)]
9896async fn test_file_status(cx: &mut gpui::TestAppContext) {
9897 init_test(cx);
9898 cx.executor().allow_parking();
9899 const IGNORE_RULE: &str = "**/target";
9900
9901 let root = TempTree::new(json!({
9902 "project": {
9903 "a.txt": "a",
9904 "b.txt": "bb",
9905 "c": {
9906 "d": {
9907 "e.txt": "eee"
9908 }
9909 },
9910 "f.txt": "ffff",
9911 "target": {
9912 "build_file": "???"
9913 },
9914 ".gitignore": IGNORE_RULE
9915 },
9916
9917 }));
9918 let root_path = root.path();
9919
9920 const A_TXT: &str = "a.txt";
9921 const B_TXT: &str = "b.txt";
9922 const E_TXT: &str = "c/d/e.txt";
9923 const F_TXT: &str = "f.txt";
9924 const DOTGITIGNORE: &str = ".gitignore";
9925 const BUILD_FILE: &str = "target/build_file";
9926
9927 // Set up git repository before creating the worktree.
9928 let work_dir = root.path().join("project");
9929 let mut repo = git_init(work_dir.as_path());
9930 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9931 git_add(A_TXT, &repo);
9932 git_add(E_TXT, &repo);
9933 git_add(DOTGITIGNORE, &repo);
9934 git_commit("Initial commit", &repo);
9935
9936 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9937
9938 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9939 tree.flush_fs_events(cx).await;
9940 project
9941 .update(cx, |project, cx| project.git_scans_complete(cx))
9942 .await;
9943 cx.executor().run_until_parked();
9944
9945 let repository = project.read_with(cx, |project, cx| {
9946 project.repositories(cx).values().next().unwrap().clone()
9947 });
9948
9949 // Check that the right git state is observed on startup
9950 repository.read_with(cx, |repository, _cx| {
9951 assert_eq!(
9952 repository.work_directory_abs_path.as_ref(),
9953 root_path.join("project").as_path()
9954 );
9955
9956 assert_eq!(
9957 repository
9958 .status_for_path(&repo_path(B_TXT))
9959 .unwrap()
9960 .status,
9961 FileStatus::Untracked,
9962 );
9963 assert_eq!(
9964 repository
9965 .status_for_path(&repo_path(F_TXT))
9966 .unwrap()
9967 .status,
9968 FileStatus::Untracked,
9969 );
9970 });
9971
9972 // Modify a file in the working copy.
9973 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9974 tree.flush_fs_events(cx).await;
9975 project
9976 .update(cx, |project, cx| project.git_scans_complete(cx))
9977 .await;
9978 cx.executor().run_until_parked();
9979
9980 // The worktree detects that the file's git status has changed.
9981 repository.read_with(cx, |repository, _| {
9982 assert_eq!(
9983 repository
9984 .status_for_path(&repo_path(A_TXT))
9985 .unwrap()
9986 .status,
9987 StatusCode::Modified.worktree(),
9988 );
9989 });
9990
9991 // Create a commit in the git repository.
9992 git_add(A_TXT, &repo);
9993 git_add(B_TXT, &repo);
9994 git_commit("Committing modified and added", &repo);
9995 tree.flush_fs_events(cx).await;
9996 project
9997 .update(cx, |project, cx| project.git_scans_complete(cx))
9998 .await;
9999 cx.executor().run_until_parked();
10000
10001 // The worktree detects that the files' git status have changed.
10002 repository.read_with(cx, |repository, _cx| {
10003 assert_eq!(
10004 repository
10005 .status_for_path(&repo_path(F_TXT))
10006 .unwrap()
10007 .status,
10008 FileStatus::Untracked,
10009 );
10010 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10011 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10012 });
10013
10014 // Modify files in the working copy and perform git operations on other files.
10015 git_reset(0, &repo);
10016 git_remove_index(Path::new(B_TXT), &repo);
10017 git_stash(&mut repo);
10018 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10019 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10020 tree.flush_fs_events(cx).await;
10021 project
10022 .update(cx, |project, cx| project.git_scans_complete(cx))
10023 .await;
10024 cx.executor().run_until_parked();
10025
10026 // Check that more complex repo changes are tracked
10027 repository.read_with(cx, |repository, _cx| {
10028 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10029 assert_eq!(
10030 repository
10031 .status_for_path(&repo_path(B_TXT))
10032 .unwrap()
10033 .status,
10034 FileStatus::Untracked,
10035 );
10036 assert_eq!(
10037 repository
10038 .status_for_path(&repo_path(E_TXT))
10039 .unwrap()
10040 .status,
10041 StatusCode::Modified.worktree(),
10042 );
10043 });
10044
10045 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10046 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10047 std::fs::write(
10048 work_dir.join(DOTGITIGNORE),
10049 [IGNORE_RULE, "f.txt"].join("\n"),
10050 )
10051 .unwrap();
10052
10053 git_add(Path::new(DOTGITIGNORE), &repo);
10054 git_commit("Committing modified git ignore", &repo);
10055
10056 tree.flush_fs_events(cx).await;
10057 cx.executor().run_until_parked();
10058
10059 let mut renamed_dir_name = "first_directory/second_directory";
10060 const RENAMED_FILE: &str = "rf.txt";
10061
10062 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10063 std::fs::write(
10064 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10065 "new-contents",
10066 )
10067 .unwrap();
10068
10069 tree.flush_fs_events(cx).await;
10070 project
10071 .update(cx, |project, cx| project.git_scans_complete(cx))
10072 .await;
10073 cx.executor().run_until_parked();
10074
10075 repository.read_with(cx, |repository, _cx| {
10076 assert_eq!(
10077 repository
10078 .status_for_path(&RepoPath::from_rel_path(
10079 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10080 ))
10081 .unwrap()
10082 .status,
10083 FileStatus::Untracked,
10084 );
10085 });
10086
10087 renamed_dir_name = "new_first_directory/second_directory";
10088
10089 std::fs::rename(
10090 work_dir.join("first_directory"),
10091 work_dir.join("new_first_directory"),
10092 )
10093 .unwrap();
10094
10095 tree.flush_fs_events(cx).await;
10096 project
10097 .update(cx, |project, cx| project.git_scans_complete(cx))
10098 .await;
10099 cx.executor().run_until_parked();
10100
10101 repository.read_with(cx, |repository, _cx| {
10102 assert_eq!(
10103 repository
10104 .status_for_path(&RepoPath::from_rel_path(
10105 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10106 ))
10107 .unwrap()
10108 .status,
10109 FileStatus::Untracked,
10110 );
10111 });
10112}
10113
10114#[gpui::test]
10115#[ignore]
10116async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10117 init_test(cx);
10118 cx.executor().allow_parking();
10119
10120 const IGNORE_RULE: &str = "**/target";
10121
10122 let root = TempTree::new(json!({
10123 "project": {
10124 "src": {
10125 "main.rs": "fn main() {}"
10126 },
10127 "target": {
10128 "debug": {
10129 "important_text.txt": "important text",
10130 },
10131 },
10132 ".gitignore": IGNORE_RULE
10133 },
10134
10135 }));
10136 let root_path = root.path();
10137
10138 // Set up git repository before creating the worktree.
10139 let work_dir = root.path().join("project");
10140 let repo = git_init(work_dir.as_path());
10141 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10142 git_add("src/main.rs", &repo);
10143 git_add(".gitignore", &repo);
10144 git_commit("Initial commit", &repo);
10145
10146 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10147 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10148 let project_events = Arc::new(Mutex::new(Vec::new()));
10149 project.update(cx, |project, cx| {
10150 let repo_events = repository_updates.clone();
10151 cx.subscribe(project.git_store(), move |_, _, e, _| {
10152 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10153 repo_events.lock().push(e.clone());
10154 }
10155 })
10156 .detach();
10157 let project_events = project_events.clone();
10158 cx.subscribe_self(move |_, e, _| {
10159 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10160 project_events.lock().extend(
10161 updates
10162 .iter()
10163 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10164 .filter(|(path, _)| path != "fs-event-sentinel"),
10165 );
10166 }
10167 })
10168 .detach();
10169 });
10170
10171 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10172 tree.flush_fs_events(cx).await;
10173 tree.update(cx, |tree, cx| {
10174 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10175 })
10176 .await
10177 .unwrap();
10178 tree.update(cx, |tree, _| {
10179 assert_eq!(
10180 tree.entries(true, 0)
10181 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10182 .collect::<Vec<_>>(),
10183 vec![
10184 (rel_path(""), false),
10185 (rel_path("project/"), false),
10186 (rel_path("project/.gitignore"), false),
10187 (rel_path("project/src"), false),
10188 (rel_path("project/src/main.rs"), false),
10189 (rel_path("project/target"), true),
10190 (rel_path("project/target/debug"), true),
10191 (rel_path("project/target/debug/important_text.txt"), true),
10192 ]
10193 );
10194 });
10195
10196 assert_eq!(
10197 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10198 vec![
10199 RepositoryEvent::StatusesChanged,
10200 RepositoryEvent::MergeHeadsChanged,
10201 ],
10202 "Initial worktree scan should produce a repo update event"
10203 );
10204 assert_eq!(
10205 project_events.lock().drain(..).collect::<Vec<_>>(),
10206 vec![
10207 ("project/target".to_string(), PathChange::Loaded),
10208 ("project/target/debug".to_string(), PathChange::Loaded),
10209 (
10210 "project/target/debug/important_text.txt".to_string(),
10211 PathChange::Loaded
10212 ),
10213 ],
10214 "Initial project changes should show that all not-ignored and all opened files are loaded"
10215 );
10216
10217 let deps_dir = work_dir.join("target").join("debug").join("deps");
10218 std::fs::create_dir_all(&deps_dir).unwrap();
10219 tree.flush_fs_events(cx).await;
10220 project
10221 .update(cx, |project, cx| project.git_scans_complete(cx))
10222 .await;
10223 cx.executor().run_until_parked();
10224 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10225 tree.flush_fs_events(cx).await;
10226 project
10227 .update(cx, |project, cx| project.git_scans_complete(cx))
10228 .await;
10229 cx.executor().run_until_parked();
10230 std::fs::remove_dir_all(&deps_dir).unwrap();
10231 tree.flush_fs_events(cx).await;
10232 project
10233 .update(cx, |project, cx| project.git_scans_complete(cx))
10234 .await;
10235 cx.executor().run_until_parked();
10236
10237 tree.update(cx, |tree, _| {
10238 assert_eq!(
10239 tree.entries(true, 0)
10240 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10241 .collect::<Vec<_>>(),
10242 vec![
10243 (rel_path(""), false),
10244 (rel_path("project/"), false),
10245 (rel_path("project/.gitignore"), false),
10246 (rel_path("project/src"), false),
10247 (rel_path("project/src/main.rs"), false),
10248 (rel_path("project/target"), true),
10249 (rel_path("project/target/debug"), true),
10250 (rel_path("project/target/debug/important_text.txt"), true),
10251 ],
10252 "No stray temp files should be left after the flycheck changes"
10253 );
10254 });
10255
10256 assert_eq!(
10257 repository_updates
10258 .lock()
10259 .iter()
10260 .cloned()
10261 .collect::<Vec<_>>(),
10262 Vec::new(),
10263 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10264 );
10265 assert_eq!(
10266 project_events.lock().as_slice(),
10267 vec![
10268 ("project/target/debug/deps".to_string(), PathChange::Added),
10269 ("project/target/debug/deps".to_string(), PathChange::Removed),
10270 ],
10271 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10272 No updates for more nested directories should happen as those are ignored",
10273 );
10274}
10275
10276// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10277// to different timings/ordering of events.
10278#[ignore]
10279#[gpui::test]
10280async fn test_odd_events_for_ignored_dirs(
10281 executor: BackgroundExecutor,
10282 cx: &mut gpui::TestAppContext,
10283) {
10284 init_test(cx);
10285 let fs = FakeFs::new(executor);
10286 fs.insert_tree(
10287 path!("/root"),
10288 json!({
10289 ".git": {},
10290 ".gitignore": "**/target/",
10291 "src": {
10292 "main.rs": "fn main() {}",
10293 },
10294 "target": {
10295 "debug": {
10296 "foo.txt": "foo",
10297 "deps": {}
10298 }
10299 }
10300 }),
10301 )
10302 .await;
10303 fs.set_head_and_index_for_repo(
10304 path!("/root/.git").as_ref(),
10305 &[
10306 (".gitignore", "**/target/".into()),
10307 ("src/main.rs", "fn main() {}".into()),
10308 ],
10309 );
10310
10311 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10312 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10313 let project_events = Arc::new(Mutex::new(Vec::new()));
10314 project.update(cx, |project, cx| {
10315 let repository_updates = repository_updates.clone();
10316 cx.subscribe(project.git_store(), move |_, _, e, _| {
10317 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10318 repository_updates.lock().push(e.clone());
10319 }
10320 })
10321 .detach();
10322 let project_events = project_events.clone();
10323 cx.subscribe_self(move |_, e, _| {
10324 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10325 project_events.lock().extend(
10326 updates
10327 .iter()
10328 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10329 .filter(|(path, _)| path != "fs-event-sentinel"),
10330 );
10331 }
10332 })
10333 .detach();
10334 });
10335
10336 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10337 tree.update(cx, |tree, cx| {
10338 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10339 })
10340 .await
10341 .unwrap();
10342 tree.flush_fs_events(cx).await;
10343 project
10344 .update(cx, |project, cx| project.git_scans_complete(cx))
10345 .await;
10346 cx.run_until_parked();
10347 tree.update(cx, |tree, _| {
10348 assert_eq!(
10349 tree.entries(true, 0)
10350 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10351 .collect::<Vec<_>>(),
10352 vec![
10353 (rel_path(""), false),
10354 (rel_path(".gitignore"), false),
10355 (rel_path("src"), false),
10356 (rel_path("src/main.rs"), false),
10357 (rel_path("target"), true),
10358 (rel_path("target/debug"), true),
10359 (rel_path("target/debug/deps"), true),
10360 (rel_path("target/debug/foo.txt"), true),
10361 ]
10362 );
10363 });
10364
10365 assert_eq!(
10366 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10367 vec![
10368 RepositoryEvent::MergeHeadsChanged,
10369 RepositoryEvent::BranchChanged,
10370 RepositoryEvent::StatusesChanged,
10371 RepositoryEvent::StatusesChanged,
10372 ],
10373 "Initial worktree scan should produce a repo update event"
10374 );
10375 assert_eq!(
10376 project_events.lock().drain(..).collect::<Vec<_>>(),
10377 vec![
10378 ("target".to_string(), PathChange::Loaded),
10379 ("target/debug".to_string(), PathChange::Loaded),
10380 ("target/debug/deps".to_string(), PathChange::Loaded),
10381 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10382 ],
10383 "All non-ignored entries and all opened firs should be getting a project event",
10384 );
10385
10386 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10387 // This may happen multiple times during a single flycheck, but once is enough for testing.
10388 fs.emit_fs_event("/root/target/debug/deps", None);
10389 tree.flush_fs_events(cx).await;
10390 project
10391 .update(cx, |project, cx| project.git_scans_complete(cx))
10392 .await;
10393 cx.executor().run_until_parked();
10394
10395 assert_eq!(
10396 repository_updates
10397 .lock()
10398 .iter()
10399 .cloned()
10400 .collect::<Vec<_>>(),
10401 Vec::new(),
10402 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10403 );
10404 assert_eq!(
10405 project_events.lock().as_slice(),
10406 Vec::new(),
10407 "No further project events should happen, as only ignored dirs received FS events",
10408 );
10409}
10410
10411#[gpui::test]
10412async fn test_repos_in_invisible_worktrees(
10413 executor: BackgroundExecutor,
10414 cx: &mut gpui::TestAppContext,
10415) {
10416 init_test(cx);
10417 let fs = FakeFs::new(executor);
10418 fs.insert_tree(
10419 path!("/root"),
10420 json!({
10421 "dir1": {
10422 ".git": {},
10423 "dep1": {
10424 ".git": {},
10425 "src": {
10426 "a.txt": "",
10427 },
10428 },
10429 "b.txt": "",
10430 },
10431 }),
10432 )
10433 .await;
10434
10435 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10436 let _visible_worktree =
10437 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10438 project
10439 .update(cx, |project, cx| project.git_scans_complete(cx))
10440 .await;
10441
10442 let repos = project.read_with(cx, |project, cx| {
10443 project
10444 .repositories(cx)
10445 .values()
10446 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10447 .collect::<Vec<_>>()
10448 });
10449 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10450
10451 let (_invisible_worktree, _) = project
10452 .update(cx, |project, cx| {
10453 project.worktree_store.update(cx, |worktree_store, cx| {
10454 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10455 })
10456 })
10457 .await
10458 .expect("failed to create worktree");
10459 project
10460 .update(cx, |project, cx| project.git_scans_complete(cx))
10461 .await;
10462
10463 let repos = project.read_with(cx, |project, cx| {
10464 project
10465 .repositories(cx)
10466 .values()
10467 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10468 .collect::<Vec<_>>()
10469 });
10470 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10471}
10472
10473#[gpui::test(iterations = 10)]
10474async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10475 init_test(cx);
10476 cx.update(|cx| {
10477 cx.update_global::<SettingsStore, _>(|store, cx| {
10478 store.update_user_settings(cx, |settings| {
10479 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10480 });
10481 });
10482 });
10483 let fs = FakeFs::new(cx.background_executor.clone());
10484 fs.insert_tree(
10485 path!("/root"),
10486 json!({
10487 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10488 "tree": {
10489 ".git": {},
10490 ".gitignore": "ignored-dir\n",
10491 "tracked-dir": {
10492 "tracked-file1": "",
10493 "ancestor-ignored-file1": "",
10494 },
10495 "ignored-dir": {
10496 "ignored-file1": ""
10497 }
10498 }
10499 }),
10500 )
10501 .await;
10502 fs.set_head_and_index_for_repo(
10503 path!("/root/tree/.git").as_ref(),
10504 &[
10505 (".gitignore", "ignored-dir\n".into()),
10506 ("tracked-dir/tracked-file1", "".into()),
10507 ],
10508 );
10509
10510 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10511
10512 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10513 tree.flush_fs_events(cx).await;
10514 project
10515 .update(cx, |project, cx| project.git_scans_complete(cx))
10516 .await;
10517 cx.executor().run_until_parked();
10518
10519 let repository = project.read_with(cx, |project, cx| {
10520 project.repositories(cx).values().next().unwrap().clone()
10521 });
10522
10523 tree.read_with(cx, |tree, _| {
10524 tree.as_local()
10525 .unwrap()
10526 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10527 })
10528 .recv()
10529 .await;
10530
10531 cx.read(|cx| {
10532 assert_entry_git_state(
10533 tree.read(cx),
10534 repository.read(cx),
10535 "tracked-dir/tracked-file1",
10536 None,
10537 false,
10538 );
10539 assert_entry_git_state(
10540 tree.read(cx),
10541 repository.read(cx),
10542 "tracked-dir/ancestor-ignored-file1",
10543 None,
10544 false,
10545 );
10546 assert_entry_git_state(
10547 tree.read(cx),
10548 repository.read(cx),
10549 "ignored-dir/ignored-file1",
10550 None,
10551 true,
10552 );
10553 });
10554
10555 fs.create_file(
10556 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10557 Default::default(),
10558 )
10559 .await
10560 .unwrap();
10561 fs.set_index_for_repo(
10562 path!("/root/tree/.git").as_ref(),
10563 &[
10564 (".gitignore", "ignored-dir\n".into()),
10565 ("tracked-dir/tracked-file1", "".into()),
10566 ("tracked-dir/tracked-file2", "".into()),
10567 ],
10568 );
10569 fs.create_file(
10570 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10571 Default::default(),
10572 )
10573 .await
10574 .unwrap();
10575 fs.create_file(
10576 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10577 Default::default(),
10578 )
10579 .await
10580 .unwrap();
10581
10582 cx.executor().run_until_parked();
10583 cx.read(|cx| {
10584 assert_entry_git_state(
10585 tree.read(cx),
10586 repository.read(cx),
10587 "tracked-dir/tracked-file2",
10588 Some(StatusCode::Added),
10589 false,
10590 );
10591 assert_entry_git_state(
10592 tree.read(cx),
10593 repository.read(cx),
10594 "tracked-dir/ancestor-ignored-file2",
10595 None,
10596 false,
10597 );
10598 assert_entry_git_state(
10599 tree.read(cx),
10600 repository.read(cx),
10601 "ignored-dir/ignored-file2",
10602 None,
10603 true,
10604 );
10605 assert!(
10606 tree.read(cx)
10607 .entry_for_path(&rel_path(".git"))
10608 .unwrap()
10609 .is_ignored
10610 );
10611 });
10612}
10613
10614#[gpui::test]
10615async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10616 init_test(cx);
10617
10618 let fs = FakeFs::new(cx.executor());
10619 fs.insert_tree(
10620 path!("/project"),
10621 json!({
10622 ".git": {
10623 "worktrees": {
10624 "some-worktree": {
10625 "commondir": "../..\n",
10626 // For is_git_dir
10627 "HEAD": "",
10628 "config": ""
10629 }
10630 },
10631 "modules": {
10632 "subdir": {
10633 "some-submodule": {
10634 // For is_git_dir
10635 "HEAD": "",
10636 "config": "",
10637 }
10638 }
10639 }
10640 },
10641 "src": {
10642 "a.txt": "A",
10643 },
10644 "some-worktree": {
10645 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10646 "src": {
10647 "b.txt": "B",
10648 }
10649 },
10650 "subdir": {
10651 "some-submodule": {
10652 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10653 "c.txt": "C",
10654 }
10655 }
10656 }),
10657 )
10658 .await;
10659
10660 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10661 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10662 scan_complete.await;
10663
10664 let mut repositories = project.update(cx, |project, cx| {
10665 project
10666 .repositories(cx)
10667 .values()
10668 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10669 .collect::<Vec<_>>()
10670 });
10671 repositories.sort();
10672 pretty_assertions::assert_eq!(
10673 repositories,
10674 [
10675 Path::new(path!("/project")).into(),
10676 Path::new(path!("/project/some-worktree")).into(),
10677 Path::new(path!("/project/subdir/some-submodule")).into(),
10678 ]
10679 );
10680
10681 // Generate a git-related event for the worktree and check that it's refreshed.
10682 fs.with_git_state(
10683 path!("/project/some-worktree/.git").as_ref(),
10684 true,
10685 |state| {
10686 state
10687 .head_contents
10688 .insert(repo_path("src/b.txt"), "b".to_owned());
10689 state
10690 .index_contents
10691 .insert(repo_path("src/b.txt"), "b".to_owned());
10692 },
10693 )
10694 .unwrap();
10695 cx.run_until_parked();
10696
10697 let buffer = project
10698 .update(cx, |project, cx| {
10699 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10700 })
10701 .await
10702 .unwrap();
10703 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10704 let (repo, _) = project
10705 .git_store()
10706 .read(cx)
10707 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10708 .unwrap();
10709 pretty_assertions::assert_eq!(
10710 repo.read(cx).work_directory_abs_path,
10711 Path::new(path!("/project/some-worktree")).into(),
10712 );
10713 let barrier = repo.update(cx, |repo, _| repo.barrier());
10714 (repo.clone(), barrier)
10715 });
10716 barrier.await.unwrap();
10717 worktree_repo.update(cx, |repo, _| {
10718 pretty_assertions::assert_eq!(
10719 repo.status_for_path(&repo_path("src/b.txt"))
10720 .unwrap()
10721 .status,
10722 StatusCode::Modified.worktree(),
10723 );
10724 });
10725
10726 // The same for the submodule.
10727 fs.with_git_state(
10728 path!("/project/subdir/some-submodule/.git").as_ref(),
10729 true,
10730 |state| {
10731 state
10732 .head_contents
10733 .insert(repo_path("c.txt"), "c".to_owned());
10734 state
10735 .index_contents
10736 .insert(repo_path("c.txt"), "c".to_owned());
10737 },
10738 )
10739 .unwrap();
10740 cx.run_until_parked();
10741
10742 let buffer = project
10743 .update(cx, |project, cx| {
10744 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10745 })
10746 .await
10747 .unwrap();
10748 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10749 let (repo, _) = project
10750 .git_store()
10751 .read(cx)
10752 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10753 .unwrap();
10754 pretty_assertions::assert_eq!(
10755 repo.read(cx).work_directory_abs_path,
10756 Path::new(path!("/project/subdir/some-submodule")).into(),
10757 );
10758 let barrier = repo.update(cx, |repo, _| repo.barrier());
10759 (repo.clone(), barrier)
10760 });
10761 barrier.await.unwrap();
10762 submodule_repo.update(cx, |repo, _| {
10763 pretty_assertions::assert_eq!(
10764 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10765 StatusCode::Modified.worktree(),
10766 );
10767 });
10768}
10769
10770#[gpui::test]
10771async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10772 init_test(cx);
10773 let fs = FakeFs::new(cx.background_executor.clone());
10774 fs.insert_tree(
10775 path!("/root"),
10776 json!({
10777 "project": {
10778 ".git": {},
10779 "child1": {
10780 "a.txt": "A",
10781 },
10782 "child2": {
10783 "b.txt": "B",
10784 }
10785 }
10786 }),
10787 )
10788 .await;
10789
10790 let project = Project::test(
10791 fs.clone(),
10792 [
10793 path!("/root/project/child1").as_ref(),
10794 path!("/root/project/child2").as_ref(),
10795 ],
10796 cx,
10797 )
10798 .await;
10799
10800 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10801 tree.flush_fs_events(cx).await;
10802 project
10803 .update(cx, |project, cx| project.git_scans_complete(cx))
10804 .await;
10805 cx.executor().run_until_parked();
10806
10807 let repos = project.read_with(cx, |project, cx| {
10808 project
10809 .repositories(cx)
10810 .values()
10811 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10812 .collect::<Vec<_>>()
10813 });
10814 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10815}
10816
10817#[gpui::test]
10818async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10819 init_test(cx);
10820
10821 let file_1_committed = String::from(r#"file_1_committed"#);
10822 let file_1_staged = String::from(r#"file_1_staged"#);
10823 let file_2_committed = String::from(r#"file_2_committed"#);
10824 let file_2_staged = String::from(r#"file_2_staged"#);
10825 let buffer_contents = String::from(r#"buffer"#);
10826
10827 let fs = FakeFs::new(cx.background_executor.clone());
10828 fs.insert_tree(
10829 path!("/dir"),
10830 json!({
10831 ".git": {},
10832 "src": {
10833 "file_1.rs": file_1_committed.clone(),
10834 "file_2.rs": file_2_committed.clone(),
10835 }
10836 }),
10837 )
10838 .await;
10839
10840 fs.set_head_for_repo(
10841 path!("/dir/.git").as_ref(),
10842 &[
10843 ("src/file_1.rs", file_1_committed.clone()),
10844 ("src/file_2.rs", file_2_committed.clone()),
10845 ],
10846 "deadbeef",
10847 );
10848 fs.set_index_for_repo(
10849 path!("/dir/.git").as_ref(),
10850 &[
10851 ("src/file_1.rs", file_1_staged.clone()),
10852 ("src/file_2.rs", file_2_staged.clone()),
10853 ],
10854 );
10855
10856 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10857
10858 let buffer = project
10859 .update(cx, |project, cx| {
10860 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10861 })
10862 .await
10863 .unwrap();
10864
10865 buffer.update(cx, |buffer, cx| {
10866 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10867 });
10868
10869 let unstaged_diff = project
10870 .update(cx, |project, cx| {
10871 project.open_unstaged_diff(buffer.clone(), cx)
10872 })
10873 .await
10874 .unwrap();
10875
10876 cx.run_until_parked();
10877
10878 unstaged_diff.update(cx, |unstaged_diff, cx| {
10879 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10880 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10881 });
10882
10883 // Save the buffer as `file_2.rs`, which should trigger the
10884 // `BufferChangedFilePath` event.
10885 project
10886 .update(cx, |project, cx| {
10887 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10888 let path = ProjectPath {
10889 worktree_id,
10890 path: rel_path("src/file_2.rs").into(),
10891 };
10892 project.save_buffer_as(buffer.clone(), path, cx)
10893 })
10894 .await
10895 .unwrap();
10896
10897 cx.run_until_parked();
10898
10899 // Verify that the diff bases have been updated to file_2's contents due to
10900 // the `BufferChangedFilePath` event being handled.
10901 unstaged_diff.update(cx, |unstaged_diff, cx| {
10902 let snapshot = buffer.read(cx).snapshot();
10903 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10904 assert_eq!(
10905 base_text, file_2_staged,
10906 "Diff bases should be automatically updated to file_2 staged content"
10907 );
10908
10909 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10910 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10911 });
10912
10913 let uncommitted_diff = project
10914 .update(cx, |project, cx| {
10915 project.open_uncommitted_diff(buffer.clone(), cx)
10916 })
10917 .await
10918 .unwrap();
10919
10920 cx.run_until_parked();
10921
10922 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10923 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10924 assert_eq!(
10925 base_text, file_2_committed,
10926 "Uncommitted diff should compare against file_2 committed content"
10927 );
10928 });
10929}
10930
10931async fn search(
10932 project: &Entity<Project>,
10933 query: SearchQuery,
10934 cx: &mut gpui::TestAppContext,
10935) -> Result<HashMap<String, Vec<Range<usize>>>> {
10936 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10937 let mut results = HashMap::default();
10938 while let Ok(search_result) = search_rx.rx.recv().await {
10939 match search_result {
10940 SearchResult::Buffer { buffer, ranges } => {
10941 results.entry(buffer).or_insert(ranges);
10942 }
10943 SearchResult::LimitReached => {}
10944 }
10945 }
10946 Ok(results
10947 .into_iter()
10948 .map(|(buffer, ranges)| {
10949 buffer.update(cx, |buffer, cx| {
10950 let path = buffer
10951 .file()
10952 .unwrap()
10953 .full_path(cx)
10954 .to_string_lossy()
10955 .to_string();
10956 let ranges = ranges
10957 .into_iter()
10958 .map(|range| range.to_offset(buffer))
10959 .collect::<Vec<_>>();
10960 (path, ranges)
10961 })
10962 })
10963 .collect())
10964}
10965
10966pub fn init_test(cx: &mut gpui::TestAppContext) {
10967 zlog::init_test();
10968
10969 cx.update(|cx| {
10970 let settings_store = SettingsStore::test(cx);
10971 cx.set_global(settings_store);
10972 release_channel::init(semver::Version::new(0, 0, 0), cx);
10973 });
10974}
10975
10976fn json_lang() -> Arc<Language> {
10977 Arc::new(Language::new(
10978 LanguageConfig {
10979 name: "JSON".into(),
10980 matcher: LanguageMatcher {
10981 path_suffixes: vec!["json".to_string()],
10982 ..Default::default()
10983 },
10984 ..Default::default()
10985 },
10986 None,
10987 ))
10988}
10989
10990fn js_lang() -> Arc<Language> {
10991 Arc::new(Language::new(
10992 LanguageConfig {
10993 name: "JavaScript".into(),
10994 matcher: LanguageMatcher {
10995 path_suffixes: vec!["js".to_string()],
10996 ..Default::default()
10997 },
10998 ..Default::default()
10999 },
11000 None,
11001 ))
11002}
11003
11004fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11005 struct PythonMootToolchainLister(Arc<FakeFs>);
11006 #[async_trait]
11007 impl ToolchainLister for PythonMootToolchainLister {
11008 async fn list(
11009 &self,
11010 worktree_root: PathBuf,
11011 subroot_relative_path: Arc<RelPath>,
11012 _: Option<HashMap<String, String>>,
11013 _: &dyn Fs,
11014 ) -> ToolchainList {
11015 // This lister will always return a path .venv directories within ancestors
11016 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11017 let mut toolchains = vec![];
11018 for ancestor in ancestors {
11019 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11020 if self.0.is_dir(&venv_path).await {
11021 toolchains.push(Toolchain {
11022 name: SharedString::new("Python Venv"),
11023 path: venv_path.to_string_lossy().into_owned().into(),
11024 language_name: LanguageName(SharedString::new_static("Python")),
11025 as_json: serde_json::Value::Null,
11026 })
11027 }
11028 }
11029 ToolchainList {
11030 toolchains,
11031 ..Default::default()
11032 }
11033 }
11034 async fn resolve(
11035 &self,
11036 _: PathBuf,
11037 _: Option<HashMap<String, String>>,
11038 _: &dyn Fs,
11039 ) -> anyhow::Result<Toolchain> {
11040 Err(anyhow::anyhow!("Not implemented"))
11041 }
11042 fn meta(&self) -> ToolchainMetadata {
11043 ToolchainMetadata {
11044 term: SharedString::new_static("Virtual Environment"),
11045 new_toolchain_placeholder: SharedString::new_static(
11046 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11047 ),
11048 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11049 }
11050 }
11051 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
11052 vec![]
11053 }
11054 }
11055 Arc::new(
11056 Language::new(
11057 LanguageConfig {
11058 name: "Python".into(),
11059 matcher: LanguageMatcher {
11060 path_suffixes: vec!["py".to_string()],
11061 ..Default::default()
11062 },
11063 ..Default::default()
11064 },
11065 None, // We're not testing Python parsing with this language.
11066 )
11067 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11068 "pyproject.toml",
11069 ))))
11070 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11071 )
11072}
11073
11074fn typescript_lang() -> Arc<Language> {
11075 Arc::new(Language::new(
11076 LanguageConfig {
11077 name: "TypeScript".into(),
11078 matcher: LanguageMatcher {
11079 path_suffixes: vec!["ts".to_string()],
11080 ..Default::default()
11081 },
11082 ..Default::default()
11083 },
11084 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11085 ))
11086}
11087
11088fn tsx_lang() -> Arc<Language> {
11089 Arc::new(Language::new(
11090 LanguageConfig {
11091 name: "tsx".into(),
11092 matcher: LanguageMatcher {
11093 path_suffixes: vec!["tsx".to_string()],
11094 ..Default::default()
11095 },
11096 ..Default::default()
11097 },
11098 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11099 ))
11100}
11101
11102fn get_all_tasks(
11103 project: &Entity<Project>,
11104 task_contexts: Arc<TaskContexts>,
11105 cx: &mut App,
11106) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11107 let new_tasks = project.update(cx, |project, cx| {
11108 project.task_store.update(cx, |task_store, cx| {
11109 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11110 this.used_and_current_resolved_tasks(task_contexts, cx)
11111 })
11112 })
11113 });
11114
11115 cx.background_spawn(async move {
11116 let (mut old, new) = new_tasks.await;
11117 old.extend(new);
11118 old
11119 })
11120}
11121
11122#[track_caller]
11123fn assert_entry_git_state(
11124 tree: &Worktree,
11125 repository: &Repository,
11126 path: &str,
11127 index_status: Option<StatusCode>,
11128 is_ignored: bool,
11129) {
11130 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11131 let entry = tree
11132 .entry_for_path(&rel_path(path))
11133 .unwrap_or_else(|| panic!("entry {path} not found"));
11134 let status = repository
11135 .status_for_path(&repo_path(path))
11136 .map(|entry| entry.status);
11137 let expected = index_status.map(|index_status| {
11138 TrackedStatus {
11139 index_status,
11140 worktree_status: StatusCode::Unmodified,
11141 }
11142 .into()
11143 });
11144 assert_eq!(
11145 status, expected,
11146 "expected {path} to have git status: {expected:?}"
11147 );
11148 assert_eq!(
11149 entry.is_ignored, is_ignored,
11150 "expected {path} to have is_ignored: {is_ignored}"
11151 );
11152}
11153
11154#[track_caller]
11155fn git_init(path: &Path) -> git2::Repository {
11156 let mut init_opts = RepositoryInitOptions::new();
11157 init_opts.initial_head("main");
11158 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11159}
11160
11161#[track_caller]
11162fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11163 let path = path.as_ref();
11164 let mut index = repo.index().expect("Failed to get index");
11165 index.add_path(path).expect("Failed to add file");
11166 index.write().expect("Failed to write index");
11167}
11168
11169#[track_caller]
11170fn git_remove_index(path: &Path, repo: &git2::Repository) {
11171 let mut index = repo.index().expect("Failed to get index");
11172 index.remove_path(path).expect("Failed to add file");
11173 index.write().expect("Failed to write index");
11174}
11175
11176#[track_caller]
11177fn git_commit(msg: &'static str, repo: &git2::Repository) {
11178 use git2::Signature;
11179
11180 let signature = Signature::now("test", "test@zed.dev").unwrap();
11181 let oid = repo.index().unwrap().write_tree().unwrap();
11182 let tree = repo.find_tree(oid).unwrap();
11183 if let Ok(head) = repo.head() {
11184 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11185
11186 let parent_commit = parent_obj.as_commit().unwrap();
11187
11188 repo.commit(
11189 Some("HEAD"),
11190 &signature,
11191 &signature,
11192 msg,
11193 &tree,
11194 &[parent_commit],
11195 )
11196 .expect("Failed to commit with parent");
11197 } else {
11198 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11199 .expect("Failed to commit");
11200 }
11201}
11202
11203#[cfg(any())]
11204#[track_caller]
11205fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11206 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11207}
11208
11209#[track_caller]
11210fn git_stash(repo: &mut git2::Repository) {
11211 use git2::Signature;
11212
11213 let signature = Signature::now("test", "test@zed.dev").unwrap();
11214 repo.stash_save(&signature, "N/A", None)
11215 .expect("Failed to stash");
11216}
11217
11218#[track_caller]
11219fn git_reset(offset: usize, repo: &git2::Repository) {
11220 let head = repo.head().expect("Couldn't get repo head");
11221 let object = head.peel(git2::ObjectType::Commit).unwrap();
11222 let commit = object.as_commit().unwrap();
11223 let new_head = commit
11224 .parents()
11225 .inspect(|parnet| {
11226 parnet.message();
11227 })
11228 .nth(offset)
11229 .expect("Not enough history");
11230 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11231 .expect("Could not reset");
11232}
11233
11234#[cfg(any())]
11235#[track_caller]
11236fn git_branch(name: &str, repo: &git2::Repository) {
11237 let head = repo
11238 .head()
11239 .expect("Couldn't get repo head")
11240 .peel_to_commit()
11241 .expect("HEAD is not a commit");
11242 repo.branch(name, &head, false).expect("Failed to commit");
11243}
11244
11245#[cfg(any())]
11246#[track_caller]
11247fn git_checkout(name: &str, repo: &git2::Repository) {
11248 repo.set_head(name).expect("Failed to set head");
11249 repo.checkout_head(None).expect("Failed to check out head");
11250}
11251
11252#[cfg(any())]
11253#[track_caller]
11254fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11255 repo.statuses(None)
11256 .unwrap()
11257 .iter()
11258 .map(|status| (status.path().unwrap().to_string(), status.status()))
11259 .collect()
11260}
11261
11262#[gpui::test]
11263async fn test_find_project_path_abs(
11264 background_executor: BackgroundExecutor,
11265 cx: &mut gpui::TestAppContext,
11266) {
11267 // find_project_path should work with absolute paths
11268 init_test(cx);
11269
11270 let fs = FakeFs::new(background_executor);
11271 fs.insert_tree(
11272 path!("/root"),
11273 json!({
11274 "project1": {
11275 "file1.txt": "content1",
11276 "subdir": {
11277 "file2.txt": "content2"
11278 }
11279 },
11280 "project2": {
11281 "file3.txt": "content3"
11282 }
11283 }),
11284 )
11285 .await;
11286
11287 let project = Project::test(
11288 fs.clone(),
11289 [
11290 path!("/root/project1").as_ref(),
11291 path!("/root/project2").as_ref(),
11292 ],
11293 cx,
11294 )
11295 .await;
11296
11297 // Make sure the worktrees are fully initialized
11298 project
11299 .update(cx, |project, cx| project.git_scans_complete(cx))
11300 .await;
11301 cx.run_until_parked();
11302
11303 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11304 project.read_with(cx, |project, cx| {
11305 let worktrees: Vec<_> = project.worktrees(cx).collect();
11306 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11307 let id1 = worktrees[0].read(cx).id();
11308 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11309 let id2 = worktrees[1].read(cx).id();
11310 (abs_path1, id1, abs_path2, id2)
11311 });
11312
11313 project.update(cx, |project, cx| {
11314 let abs_path = project1_abs_path.join("file1.txt");
11315 let found_path = project.find_project_path(abs_path, cx).unwrap();
11316 assert_eq!(found_path.worktree_id, project1_id);
11317 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11318
11319 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11320 let found_path = project.find_project_path(abs_path, cx).unwrap();
11321 assert_eq!(found_path.worktree_id, project1_id);
11322 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11323
11324 let abs_path = project2_abs_path.join("file3.txt");
11325 let found_path = project.find_project_path(abs_path, cx).unwrap();
11326 assert_eq!(found_path.worktree_id, project2_id);
11327 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11328
11329 let abs_path = project1_abs_path.join("nonexistent.txt");
11330 let found_path = project.find_project_path(abs_path, cx);
11331 assert!(
11332 found_path.is_some(),
11333 "Should find project path for nonexistent file in worktree"
11334 );
11335
11336 // Test with an absolute path outside any worktree
11337 let abs_path = Path::new("/some/other/path");
11338 let found_path = project.find_project_path(abs_path, cx);
11339 assert!(
11340 found_path.is_none(),
11341 "Should not find project path for path outside any worktree"
11342 );
11343 });
11344}
11345
11346#[gpui::test]
11347async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11348 init_test(cx);
11349
11350 let fs = FakeFs::new(cx.executor());
11351 fs.insert_tree(
11352 path!("/root"),
11353 json!({
11354 "a": {
11355 ".git": {},
11356 "src": {
11357 "main.rs": "fn main() {}",
11358 }
11359 },
11360 "b": {
11361 ".git": {},
11362 "src": {
11363 "main.rs": "fn main() {}",
11364 },
11365 "script": {
11366 "run.sh": "#!/bin/bash"
11367 }
11368 }
11369 }),
11370 )
11371 .await;
11372
11373 let project = Project::test(
11374 fs.clone(),
11375 [
11376 path!("/root/a").as_ref(),
11377 path!("/root/b/script").as_ref(),
11378 path!("/root/b").as_ref(),
11379 ],
11380 cx,
11381 )
11382 .await;
11383 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11384 scan_complete.await;
11385
11386 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11387 assert_eq!(worktrees.len(), 3);
11388
11389 let worktree_id_by_abs_path = worktrees
11390 .into_iter()
11391 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11392 .collect::<HashMap<_, _>>();
11393 let worktree_id = worktree_id_by_abs_path
11394 .get(Path::new(path!("/root/b/script")))
11395 .unwrap();
11396
11397 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11398 assert_eq!(repos.len(), 2);
11399
11400 project.update(cx, |project, cx| {
11401 project.remove_worktree(*worktree_id, cx);
11402 });
11403 cx.run_until_parked();
11404
11405 let mut repo_paths = project
11406 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11407 .values()
11408 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11409 .collect::<Vec<_>>();
11410 repo_paths.sort();
11411
11412 pretty_assertions::assert_eq!(
11413 repo_paths,
11414 [
11415 Path::new(path!("/root/a")).into(),
11416 Path::new(path!("/root/b")).into(),
11417 ]
11418 );
11419
11420 let active_repo_path = project
11421 .read_with(cx, |p, cx| {
11422 p.active_repository(cx)
11423 .map(|r| r.read(cx).work_directory_abs_path.clone())
11424 })
11425 .unwrap();
11426 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11427
11428 let worktree_id = worktree_id_by_abs_path
11429 .get(Path::new(path!("/root/a")))
11430 .unwrap();
11431 project.update(cx, |project, cx| {
11432 project.remove_worktree(*worktree_id, cx);
11433 });
11434 cx.run_until_parked();
11435
11436 let active_repo_path = project
11437 .read_with(cx, |p, cx| {
11438 p.active_repository(cx)
11439 .map(|r| r.read(cx).work_directory_abs_path.clone())
11440 })
11441 .unwrap();
11442 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11443
11444 let worktree_id = worktree_id_by_abs_path
11445 .get(Path::new(path!("/root/b")))
11446 .unwrap();
11447 project.update(cx, |project, cx| {
11448 project.remove_worktree(*worktree_id, cx);
11449 });
11450 cx.run_until_parked();
11451
11452 let active_repo_path = project.read_with(cx, |p, cx| {
11453 p.active_repository(cx)
11454 .map(|r| r.read(cx).work_directory_abs_path.clone())
11455 });
11456 assert!(active_repo_path.is_none());
11457}
11458
11459#[gpui::test]
11460async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11461 use DiffHunkSecondaryStatus::*;
11462 init_test(cx);
11463
11464 let committed_contents = r#"
11465 one
11466 two
11467 three
11468 "#
11469 .unindent();
11470 let file_contents = r#"
11471 one
11472 TWO
11473 three
11474 "#
11475 .unindent();
11476
11477 let fs = FakeFs::new(cx.background_executor.clone());
11478 fs.insert_tree(
11479 path!("/dir"),
11480 json!({
11481 ".git": {},
11482 "file.txt": file_contents.clone()
11483 }),
11484 )
11485 .await;
11486
11487 fs.set_head_and_index_for_repo(
11488 path!("/dir/.git").as_ref(),
11489 &[("file.txt", committed_contents.clone())],
11490 );
11491
11492 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11493
11494 let buffer = project
11495 .update(cx, |project, cx| {
11496 project.open_local_buffer(path!("/dir/file.txt"), cx)
11497 })
11498 .await
11499 .unwrap();
11500 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11501 let uncommitted_diff = project
11502 .update(cx, |project, cx| {
11503 project.open_uncommitted_diff(buffer.clone(), cx)
11504 })
11505 .await
11506 .unwrap();
11507
11508 // The hunk is initially unstaged.
11509 uncommitted_diff.read_with(cx, |diff, cx| {
11510 assert_hunks(
11511 diff.snapshot(cx).hunks(&snapshot),
11512 &snapshot,
11513 &diff.base_text_string(cx).unwrap(),
11514 &[(
11515 1..2,
11516 "two\n",
11517 "TWO\n",
11518 DiffHunkStatus::modified(HasSecondaryHunk),
11519 )],
11520 );
11521 });
11522
11523 // Get the repository handle.
11524 let repo = project.read_with(cx, |project, cx| {
11525 project.repositories(cx).values().next().unwrap().clone()
11526 });
11527
11528 // Stage the file.
11529 let stage_task = repo.update(cx, |repo, cx| {
11530 repo.stage_entries(vec![repo_path("file.txt")], cx)
11531 });
11532
11533 // Run a few ticks to let the job start and mark hunks as pending,
11534 // but don't run_until_parked which would complete the entire operation.
11535 for _ in 0..10 {
11536 cx.executor().tick();
11537 let [hunk]: [_; 1] = uncommitted_diff
11538 .read_with(cx, |diff, cx| {
11539 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11540 })
11541 .try_into()
11542 .unwrap();
11543 match hunk.secondary_status {
11544 HasSecondaryHunk => {}
11545 SecondaryHunkRemovalPending => break,
11546 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11547 _ => panic!("unexpected hunk state"),
11548 }
11549 }
11550 uncommitted_diff.read_with(cx, |diff, cx| {
11551 assert_hunks(
11552 diff.snapshot(cx).hunks(&snapshot),
11553 &snapshot,
11554 &diff.base_text_string(cx).unwrap(),
11555 &[(
11556 1..2,
11557 "two\n",
11558 "TWO\n",
11559 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11560 )],
11561 );
11562 });
11563
11564 // Let the staging complete.
11565 stage_task.await.unwrap();
11566 cx.run_until_parked();
11567
11568 // The hunk is now fully staged.
11569 uncommitted_diff.read_with(cx, |diff, cx| {
11570 assert_hunks(
11571 diff.snapshot(cx).hunks(&snapshot),
11572 &snapshot,
11573 &diff.base_text_string(cx).unwrap(),
11574 &[(
11575 1..2,
11576 "two\n",
11577 "TWO\n",
11578 DiffHunkStatus::modified(NoSecondaryHunk),
11579 )],
11580 );
11581 });
11582
11583 // Simulate a commit by updating HEAD to match the current file contents.
11584 // The FakeGitRepository's commit method is a no-op, so we need to manually
11585 // update HEAD to simulate the commit completing.
11586 fs.set_head_for_repo(
11587 path!("/dir/.git").as_ref(),
11588 &[("file.txt", file_contents.clone())],
11589 "newhead",
11590 );
11591 cx.run_until_parked();
11592
11593 // After committing, there are no more hunks.
11594 uncommitted_diff.read_with(cx, |diff, cx| {
11595 assert_hunks(
11596 diff.snapshot(cx).hunks(&snapshot),
11597 &snapshot,
11598 &diff.base_text_string(cx).unwrap(),
11599 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11600 );
11601 });
11602}
11603
11604#[gpui::test]
11605async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11606 init_test(cx);
11607
11608 // Configure read_only_files setting
11609 cx.update(|cx| {
11610 cx.update_global::<SettingsStore, _>(|store, cx| {
11611 store.update_user_settings(cx, |settings| {
11612 settings.project.worktree.read_only_files = Some(vec![
11613 "**/generated/**".to_string(),
11614 "**/*.gen.rs".to_string(),
11615 ]);
11616 });
11617 });
11618 });
11619
11620 let fs = FakeFs::new(cx.background_executor.clone());
11621 fs.insert_tree(
11622 path!("/root"),
11623 json!({
11624 "src": {
11625 "main.rs": "fn main() {}",
11626 "types.gen.rs": "// Generated file",
11627 },
11628 "generated": {
11629 "schema.rs": "// Auto-generated schema",
11630 }
11631 }),
11632 )
11633 .await;
11634
11635 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11636
11637 // Open a regular file - should be read-write
11638 let regular_buffer = project
11639 .update(cx, |project, cx| {
11640 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11641 })
11642 .await
11643 .unwrap();
11644
11645 regular_buffer.read_with(cx, |buffer, _| {
11646 assert!(!buffer.read_only(), "Regular file should not be read-only");
11647 });
11648
11649 // Open a file matching *.gen.rs pattern - should be read-only
11650 let gen_buffer = project
11651 .update(cx, |project, cx| {
11652 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11653 })
11654 .await
11655 .unwrap();
11656
11657 gen_buffer.read_with(cx, |buffer, _| {
11658 assert!(
11659 buffer.read_only(),
11660 "File matching *.gen.rs pattern should be read-only"
11661 );
11662 });
11663
11664 // Open a file in generated directory - should be read-only
11665 let generated_buffer = project
11666 .update(cx, |project, cx| {
11667 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11668 })
11669 .await
11670 .unwrap();
11671
11672 generated_buffer.read_with(cx, |buffer, _| {
11673 assert!(
11674 buffer.read_only(),
11675 "File in generated directory should be read-only"
11676 );
11677 });
11678}
11679
11680#[gpui::test]
11681async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11682 init_test(cx);
11683
11684 // Explicitly set read_only_files to empty (default behavior)
11685 cx.update(|cx| {
11686 cx.update_global::<SettingsStore, _>(|store, cx| {
11687 store.update_user_settings(cx, |settings| {
11688 settings.project.worktree.read_only_files = Some(vec![]);
11689 });
11690 });
11691 });
11692
11693 let fs = FakeFs::new(cx.background_executor.clone());
11694 fs.insert_tree(
11695 path!("/root"),
11696 json!({
11697 "src": {
11698 "main.rs": "fn main() {}",
11699 },
11700 "generated": {
11701 "schema.rs": "// Auto-generated schema",
11702 }
11703 }),
11704 )
11705 .await;
11706
11707 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11708
11709 // All files should be read-write when read_only_files is empty
11710 let main_buffer = project
11711 .update(cx, |project, cx| {
11712 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11713 })
11714 .await
11715 .unwrap();
11716
11717 main_buffer.read_with(cx, |buffer, _| {
11718 assert!(
11719 !buffer.read_only(),
11720 "Files should not be read-only when read_only_files is empty"
11721 );
11722 });
11723
11724 let generated_buffer = project
11725 .update(cx, |project, cx| {
11726 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11727 })
11728 .await
11729 .unwrap();
11730
11731 generated_buffer.read_with(cx, |buffer, _| {
11732 assert!(
11733 !buffer.read_only(),
11734 "Generated files should not be read-only when read_only_files is empty"
11735 );
11736 });
11737}
11738
11739#[gpui::test]
11740async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11741 init_test(cx);
11742
11743 // Configure to make lock files read-only
11744 cx.update(|cx| {
11745 cx.update_global::<SettingsStore, _>(|store, cx| {
11746 store.update_user_settings(cx, |settings| {
11747 settings.project.worktree.read_only_files = Some(vec![
11748 "**/*.lock".to_string(),
11749 "**/package-lock.json".to_string(),
11750 ]);
11751 });
11752 });
11753 });
11754
11755 let fs = FakeFs::new(cx.background_executor.clone());
11756 fs.insert_tree(
11757 path!("/root"),
11758 json!({
11759 "Cargo.lock": "# Lock file",
11760 "Cargo.toml": "[package]",
11761 "package-lock.json": "{}",
11762 "package.json": "{}",
11763 }),
11764 )
11765 .await;
11766
11767 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11768
11769 // Cargo.lock should be read-only
11770 let cargo_lock = project
11771 .update(cx, |project, cx| {
11772 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11773 })
11774 .await
11775 .unwrap();
11776
11777 cargo_lock.read_with(cx, |buffer, _| {
11778 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11779 });
11780
11781 // Cargo.toml should be read-write
11782 let cargo_toml = project
11783 .update(cx, |project, cx| {
11784 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11785 })
11786 .await
11787 .unwrap();
11788
11789 cargo_toml.read_with(cx, |buffer, _| {
11790 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11791 });
11792
11793 // package-lock.json should be read-only
11794 let package_lock = project
11795 .update(cx, |project, cx| {
11796 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11797 })
11798 .await
11799 .unwrap();
11800
11801 package_lock.read_with(cx, |buffer, _| {
11802 assert!(buffer.read_only(), "package-lock.json should be read-only");
11803 });
11804
11805 // package.json should be read-write
11806 let package_json = project
11807 .update(cx, |project, cx| {
11808 project.open_local_buffer(path!("/root/package.json"), cx)
11809 })
11810 .await
11811 .unwrap();
11812
11813 package_json.read_with(cx, |buffer, _| {
11814 assert!(!buffer.read_only(), "package.json should not be read-only");
11815 });
11816}