1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
13};
14use fs::FakeFs;
15use futures::{StreamExt, future};
16use git::{
17 GitHostingProviderRegistry,
18 repository::{RepoPath, repo_path},
19 status::{StatusCode, TrackedStatus},
20};
21use git2::RepositoryInitOptions;
22use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
23use itertools::Itertools;
24use language::{
25 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
26 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
27 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
28 ToolchainLister,
29 language_settings::{LanguageSettingsContent, language_settings},
30 markdown_lang, rust_lang, tree_sitter_typescript,
31};
32use lsp::{
33 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
34 Uri, WillRenameFiles, notification::DidRenameFiles,
35};
36use parking_lot::Mutex;
37use paths::{config_dir, global_gitignore_path, tasks_file};
38use postage::stream::Stream as _;
39use pretty_assertions::{assert_eq, assert_matches};
40use rand::{Rng as _, rngs::StdRng};
41use serde_json::json;
42#[cfg(not(windows))]
43use std::os;
44use std::{
45 env, mem,
46 num::NonZeroU32,
47 ops::Range,
48 str::FromStr,
49 sync::{Arc, OnceLock},
50 task::Poll,
51};
52use sum_tree::SumTree;
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .foreground_executor()
214 .block_on(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250
251 let fs = FakeFs::new(cx.executor());
252 fs.insert_tree(
253 path!("/grandparent"),
254 json!({
255 ".editorconfig": "[*]\nindent_size = 4\n",
256 "parent": {
257 ".editorconfig": "[*.rs]\nindent_size = 2\n",
258 "worktree": {
259 ".editorconfig": "[*.md]\nindent_size = 3\n",
260 "main.rs": "fn main() {}",
261 "README.md": "# README",
262 "other.txt": "other content",
263 }
264 }
265 }),
266 )
267 .await;
268
269 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
270
271 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
272 language_registry.add(rust_lang());
273 language_registry.add(markdown_lang());
274
275 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
276
277 cx.executor().run_until_parked();
278
279 cx.update(|cx| {
280 let tree = worktree.read(cx);
281 let settings_for = |path: &str| {
282 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
283 let file = File::for_entry(file_entry, worktree.clone());
284 let file_language = project
285 .read(cx)
286 .languages()
287 .load_language_for_file_path(file.path.as_std_path());
288 let file_language = cx
289 .foreground_executor()
290 .block_on(file_language)
291 .expect("Failed to get file language");
292 let file = file as _;
293 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
294 };
295
296 let settings_rs = settings_for("main.rs");
297 let settings_md = settings_for("README.md");
298 let settings_txt = settings_for("other.txt");
299
300 // main.rs gets indent_size = 2 from parent's external .editorconfig
301 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
302
303 // README.md gets indent_size = 3 from internal worktree .editorconfig
304 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
305
306 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
307 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
308 });
309}
310
311#[gpui::test]
312async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
313 init_test(cx);
314
315 let fs = FakeFs::new(cx.executor());
316 fs.insert_tree(
317 path!("/parent"),
318 json!({
319 ".editorconfig": "[*]\nindent_size = 99\n",
320 "worktree": {
321 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
322 "file.rs": "fn main() {}",
323 }
324 }),
325 )
326 .await;
327
328 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
329
330 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
331 language_registry.add(rust_lang());
332
333 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
334
335 cx.executor().run_until_parked();
336
337 cx.update(|cx| {
338 let tree = worktree.read(cx);
339 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
340 let file = File::for_entry(file_entry, worktree.clone());
341 let file_language = project
342 .read(cx)
343 .languages()
344 .load_language_for_file_path(file.path.as_std_path());
345 let file_language = cx
346 .foreground_executor()
347 .block_on(file_language)
348 .expect("Failed to get file language");
349 let file = file as _;
350 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
351
352 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
353 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
354 });
355}
356
357#[gpui::test]
358async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
359 init_test(cx);
360
361 let fs = FakeFs::new(cx.executor());
362 fs.insert_tree(
363 path!("/grandparent"),
364 json!({
365 ".editorconfig": "[*]\nindent_size = 99\n",
366 "parent": {
367 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
368 "worktree": {
369 "file.rs": "fn main() {}",
370 }
371 }
372 }),
373 )
374 .await;
375
376 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
377
378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
379 language_registry.add(rust_lang());
380
381 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
382
383 cx.executor().run_until_parked();
384
385 cx.update(|cx| {
386 let tree = worktree.read(cx);
387 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
388 let file = File::for_entry(file_entry, worktree.clone());
389 let file_language = project
390 .read(cx)
391 .languages()
392 .load_language_for_file_path(file.path.as_std_path());
393 let file_language = cx
394 .foreground_executor()
395 .block_on(file_language)
396 .expect("Failed to get file language");
397 let file = file as _;
398 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
399
400 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
401 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
402 });
403}
404
405#[gpui::test]
406async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
407 init_test(cx);
408
409 let fs = FakeFs::new(cx.executor());
410 fs.insert_tree(
411 path!("/parent"),
412 json!({
413 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
414 "worktree_a": {
415 "file.rs": "fn a() {}",
416 ".editorconfig": "[*]\ninsert_final_newline = true\n",
417 },
418 "worktree_b": {
419 "file.rs": "fn b() {}",
420 ".editorconfig": "[*]\ninsert_final_newline = false\n",
421 }
422 }),
423 )
424 .await;
425
426 let project = Project::test(
427 fs,
428 [
429 path!("/parent/worktree_a").as_ref(),
430 path!("/parent/worktree_b").as_ref(),
431 ],
432 cx,
433 )
434 .await;
435
436 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
437 language_registry.add(rust_lang());
438
439 cx.executor().run_until_parked();
440
441 cx.update(|cx| {
442 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
443 assert_eq!(worktrees.len(), 2);
444
445 for worktree in worktrees {
446 let tree = worktree.read(cx);
447 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
448 let file = File::for_entry(file_entry, worktree.clone());
449 let file_language = project
450 .read(cx)
451 .languages()
452 .load_language_for_file_path(file.path.as_std_path());
453 let file_language = cx
454 .foreground_executor()
455 .block_on(file_language)
456 .expect("Failed to get file language");
457 let file = file as _;
458 let settings =
459 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
460
461 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
462 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
463 }
464 });
465}
466
467#[gpui::test]
468async fn test_external_editorconfig_not_loaded_without_internal_config(
469 cx: &mut gpui::TestAppContext,
470) {
471 init_test(cx);
472
473 let fs = FakeFs::new(cx.executor());
474 fs.insert_tree(
475 path!("/parent"),
476 json!({
477 ".editorconfig": "[*]\nindent_size = 99\n",
478 "worktree": {
479 "file.rs": "fn main() {}",
480 }
481 }),
482 )
483 .await;
484
485 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
486
487 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
488 language_registry.add(rust_lang());
489
490 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
491
492 cx.executor().run_until_parked();
493
494 cx.update(|cx| {
495 let tree = worktree.read(cx);
496 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
497 let file = File::for_entry(file_entry, worktree.clone());
498 let file_language = project
499 .read(cx)
500 .languages()
501 .load_language_for_file_path(file.path.as_std_path());
502 let file_language = cx
503 .foreground_executor()
504 .block_on(file_language)
505 .expect("Failed to get file language");
506 let file = file as _;
507 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
508
509 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
510 // because without an internal .editorconfig, external configs are not loaded
511 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
512 });
513}
514
515#[gpui::test]
516async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/parent"),
522 json!({
523 ".editorconfig": "[*]\nindent_size = 4\n",
524 "worktree": {
525 ".editorconfig": "[*]\n",
526 "file.rs": "fn main() {}",
527 }
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
533
534 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
535 language_registry.add(rust_lang());
536
537 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540
541 cx.update(|cx| {
542 let tree = worktree.read(cx);
543 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
544 let file = File::for_entry(file_entry, worktree.clone());
545 let file_language = project
546 .read(cx)
547 .languages()
548 .load_language_for_file_path(file.path.as_std_path());
549 let file_language = cx
550 .foreground_executor()
551 .block_on(file_language)
552 .expect("Failed to get file language");
553 let file = file as _;
554 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
555
556 // Test initial settings: tab_size = 4 from parent's external .editorconfig
557 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
558 });
559
560 fs.atomic_write(
561 PathBuf::from(path!("/parent/.editorconfig")),
562 "[*]\nindent_size = 8\n".to_owned(),
563 )
564 .await
565 .unwrap();
566
567 cx.executor().run_until_parked();
568
569 cx.update(|cx| {
570 let tree = worktree.read(cx);
571 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
572 let file = File::for_entry(file_entry, worktree.clone());
573 let file_language = project
574 .read(cx)
575 .languages()
576 .load_language_for_file_path(file.path.as_std_path());
577 let file_language = cx
578 .foreground_executor()
579 .block_on(file_language)
580 .expect("Failed to get file language");
581 let file = file as _;
582 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
583
584 // Test settings updated: tab_size = 8
585 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
586 });
587}
588
589#[gpui::test]
590async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
591 init_test(cx);
592
593 let fs = FakeFs::new(cx.executor());
594 fs.insert_tree(
595 path!("/parent"),
596 json!({
597 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
598 "existing_worktree": {
599 ".editorconfig": "[*]\n",
600 "file.rs": "fn a() {}",
601 },
602 "new_worktree": {
603 ".editorconfig": "[*]\n",
604 "file.rs": "fn b() {}",
605 }
606 }),
607 )
608 .await;
609
610 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
611
612 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
613 language_registry.add(rust_lang());
614
615 cx.executor().run_until_parked();
616
617 cx.update(|cx| {
618 let worktree = project.read(cx).worktrees(cx).next().unwrap();
619 let tree = worktree.read(cx);
620 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
621 let file = File::for_entry(file_entry, worktree.clone());
622 let file_language = project
623 .read(cx)
624 .languages()
625 .load_language_for_file_path(file.path.as_std_path());
626 let file_language = cx
627 .foreground_executor()
628 .block_on(file_language)
629 .expect("Failed to get file language");
630 let file = file as _;
631 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
632
633 // Test existing worktree has tab_size = 7
634 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
635 });
636
637 let (new_worktree, _) = project
638 .update(cx, |project, cx| {
639 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
640 })
641 .await
642 .unwrap();
643
644 cx.executor().run_until_parked();
645
646 cx.update(|cx| {
647 let tree = new_worktree.read(cx);
648 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
649 let file = File::for_entry(file_entry, new_worktree.clone());
650 let file_language = project
651 .read(cx)
652 .languages()
653 .load_language_for_file_path(file.path.as_std_path());
654 let file_language = cx
655 .foreground_executor()
656 .block_on(file_language)
657 .expect("Failed to get file language");
658 let file = file as _;
659 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
660
661 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
662 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
663 });
664}
665
666#[gpui::test]
667async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
668 init_test(cx);
669
670 let fs = FakeFs::new(cx.executor());
671 fs.insert_tree(
672 path!("/parent"),
673 json!({
674 ".editorconfig": "[*]\nindent_size = 6\n",
675 "worktree": {
676 ".editorconfig": "[*]\n",
677 "file.rs": "fn main() {}",
678 }
679 }),
680 )
681 .await;
682
683 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
684
685 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
686 language_registry.add(rust_lang());
687
688 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
689 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
690
691 cx.executor().run_until_parked();
692
693 cx.update(|cx| {
694 let store = cx.global::<SettingsStore>();
695 let (worktree_ids, external_paths, watcher_paths) =
696 store.editorconfig_store.read(cx).test_state();
697
698 // Test external config is loaded
699 assert!(worktree_ids.contains(&worktree_id));
700 assert!(!external_paths.is_empty());
701 assert!(!watcher_paths.is_empty());
702 });
703
704 project.update(cx, |project, cx| {
705 project.remove_worktree(worktree_id, cx);
706 });
707
708 cx.executor().run_until_parked();
709
710 cx.update(|cx| {
711 let store = cx.global::<SettingsStore>();
712 let (worktree_ids, external_paths, watcher_paths) =
713 store.editorconfig_store.read(cx).test_state();
714
715 // Test worktree state, external configs, and watchers all removed
716 assert!(!worktree_ids.contains(&worktree_id));
717 assert!(external_paths.is_empty());
718 assert!(watcher_paths.is_empty());
719 });
720}
721
722#[gpui::test]
723async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
724 cx: &mut gpui::TestAppContext,
725) {
726 init_test(cx);
727
728 let fs = FakeFs::new(cx.executor());
729 fs.insert_tree(
730 path!("/parent"),
731 json!({
732 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
733 "worktree_a": {
734 ".editorconfig": "[*]\n",
735 "file.rs": "fn a() {}",
736 },
737 "worktree_b": {
738 ".editorconfig": "[*]\n",
739 "file.rs": "fn b() {}",
740 }
741 }),
742 )
743 .await;
744
745 let project = Project::test(
746 fs,
747 [
748 path!("/parent/worktree_a").as_ref(),
749 path!("/parent/worktree_b").as_ref(),
750 ],
751 cx,
752 )
753 .await;
754
755 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
756 language_registry.add(rust_lang());
757
758 cx.executor().run_until_parked();
759
760 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
761 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
762 assert_eq!(worktrees.len(), 2);
763
764 let worktree_a = &worktrees[0];
765 let worktree_b = &worktrees[1];
766 let worktree_a_id = worktree_a.read(cx).id();
767 let worktree_b_id = worktree_b.read(cx).id();
768 (worktree_a_id, worktree_b.clone(), worktree_b_id)
769 });
770
771 cx.update(|cx| {
772 let store = cx.global::<SettingsStore>();
773 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
774
775 // Test both worktrees have settings and share external config
776 assert!(worktree_ids.contains(&worktree_a_id));
777 assert!(worktree_ids.contains(&worktree_b_id));
778 assert_eq!(external_paths.len(), 1); // single shared external config
779 });
780
781 project.update(cx, |project, cx| {
782 project.remove_worktree(worktree_a_id, cx);
783 });
784
785 cx.executor().run_until_parked();
786
787 cx.update(|cx| {
788 let store = cx.global::<SettingsStore>();
789 let (worktree_ids, external_paths, watcher_paths) =
790 store.editorconfig_store.read(cx).test_state();
791
792 // Test worktree_a is gone but external config remains for worktree_b
793 assert!(!worktree_ids.contains(&worktree_a_id));
794 assert!(worktree_ids.contains(&worktree_b_id));
795 // External config should still exist because worktree_b uses it
796 assert_eq!(external_paths.len(), 1);
797 assert_eq!(watcher_paths.len(), 1);
798 });
799
800 cx.update(|cx| {
801 let tree = worktree_b.read(cx);
802 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
803 let file = File::for_entry(file_entry, worktree_b.clone());
804 let file_language = project
805 .read(cx)
806 .languages()
807 .load_language_for_file_path(file.path.as_std_path());
808 let file_language = cx
809 .foreground_executor()
810 .block_on(file_language)
811 .expect("Failed to get file language");
812 let file = file as _;
813 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
814
815 // Test worktree_b still has correct settings
816 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
817 });
818}
819
820#[gpui::test]
821async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
822 init_test(cx);
823 cx.update(|cx| {
824 GitHostingProviderRegistry::default_global(cx);
825 git_hosting_providers::init(cx);
826 });
827
828 let fs = FakeFs::new(cx.executor());
829 let str_path = path!("/dir");
830 let path = Path::new(str_path);
831
832 fs.insert_tree(
833 path!("/dir"),
834 json!({
835 ".zed": {
836 "settings.json": r#"{
837 "git_hosting_providers": [
838 {
839 "provider": "gitlab",
840 "base_url": "https://google.com",
841 "name": "foo"
842 }
843 ]
844 }"#
845 },
846 }),
847 )
848 .await;
849
850 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
851 let (_worktree, _) =
852 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
853 cx.executor().run_until_parked();
854
855 cx.update(|cx| {
856 let provider = GitHostingProviderRegistry::global(cx);
857 assert!(
858 provider
859 .list_hosting_providers()
860 .into_iter()
861 .any(|provider| provider.name() == "foo")
862 );
863 });
864
865 fs.atomic_write(
866 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
867 "{}".into(),
868 )
869 .await
870 .unwrap();
871
872 cx.run_until_parked();
873
874 cx.update(|cx| {
875 let provider = GitHostingProviderRegistry::global(cx);
876 assert!(
877 !provider
878 .list_hosting_providers()
879 .into_iter()
880 .any(|provider| provider.name() == "foo")
881 );
882 });
883}
884
885#[gpui::test]
886async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
887 init_test(cx);
888 TaskStore::init(None);
889
890 let fs = FakeFs::new(cx.executor());
891 fs.insert_tree(
892 path!("/dir"),
893 json!({
894 ".zed": {
895 "settings.json": r#"{ "tab_size": 8 }"#,
896 "tasks.json": r#"[{
897 "label": "cargo check all",
898 "command": "cargo",
899 "args": ["check", "--all"]
900 },]"#,
901 },
902 "a": {
903 "a.rs": "fn a() {\n A\n}"
904 },
905 "b": {
906 ".zed": {
907 "settings.json": r#"{ "tab_size": 2 }"#,
908 "tasks.json": r#"[{
909 "label": "cargo check",
910 "command": "cargo",
911 "args": ["check"]
912 },]"#,
913 },
914 "b.rs": "fn b() {\n B\n}"
915 }
916 }),
917 )
918 .await;
919
920 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
921 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
922
923 cx.executor().run_until_parked();
924 let worktree_id = cx.update(|cx| {
925 project.update(cx, |project, cx| {
926 project.worktrees(cx).next().unwrap().read(cx).id()
927 })
928 });
929
930 let mut task_contexts = TaskContexts::default();
931 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
932 let task_contexts = Arc::new(task_contexts);
933
934 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
935 id: worktree_id,
936 directory_in_worktree: rel_path(".zed").into(),
937 id_base: "local worktree tasks from directory \".zed\"".into(),
938 };
939
940 let all_tasks = cx
941 .update(|cx| {
942 let tree = worktree.read(cx);
943
944 let file_a = File::for_entry(
945 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
946 worktree.clone(),
947 ) as _;
948 let settings_a = language_settings(None, Some(&file_a), cx);
949 let file_b = File::for_entry(
950 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
951 worktree.clone(),
952 ) as _;
953 let settings_b = language_settings(None, Some(&file_b), cx);
954
955 assert_eq!(settings_a.tab_size.get(), 8);
956 assert_eq!(settings_b.tab_size.get(), 2);
957
958 get_all_tasks(&project, task_contexts.clone(), cx)
959 })
960 .await
961 .into_iter()
962 .map(|(source_kind, task)| {
963 let resolved = task.resolved;
964 (
965 source_kind,
966 task.resolved_label,
967 resolved.args,
968 resolved.env,
969 )
970 })
971 .collect::<Vec<_>>();
972 assert_eq!(
973 all_tasks,
974 vec![
975 (
976 TaskSourceKind::Worktree {
977 id: worktree_id,
978 directory_in_worktree: rel_path("b/.zed").into(),
979 id_base: "local worktree tasks from directory \"b/.zed\"".into()
980 },
981 "cargo check".to_string(),
982 vec!["check".to_string()],
983 HashMap::default(),
984 ),
985 (
986 topmost_local_task_source_kind.clone(),
987 "cargo check all".to_string(),
988 vec!["check".to_string(), "--all".to_string()],
989 HashMap::default(),
990 ),
991 ]
992 );
993
994 let (_, resolved_task) = cx
995 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
996 .await
997 .into_iter()
998 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
999 .expect("should have one global task");
1000 project.update(cx, |project, cx| {
1001 let task_inventory = project
1002 .task_store
1003 .read(cx)
1004 .task_inventory()
1005 .cloned()
1006 .unwrap();
1007 task_inventory.update(cx, |inventory, _| {
1008 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1009 inventory
1010 .update_file_based_tasks(
1011 TaskSettingsLocation::Global(tasks_file()),
1012 Some(
1013 &json!([{
1014 "label": "cargo check unstable",
1015 "command": "cargo",
1016 "args": [
1017 "check",
1018 "--all",
1019 "--all-targets"
1020 ],
1021 "env": {
1022 "RUSTFLAGS": "-Zunstable-options"
1023 }
1024 }])
1025 .to_string(),
1026 ),
1027 )
1028 .unwrap();
1029 });
1030 });
1031 cx.run_until_parked();
1032
1033 let all_tasks = cx
1034 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1035 .await
1036 .into_iter()
1037 .map(|(source_kind, task)| {
1038 let resolved = task.resolved;
1039 (
1040 source_kind,
1041 task.resolved_label,
1042 resolved.args,
1043 resolved.env,
1044 )
1045 })
1046 .collect::<Vec<_>>();
1047 assert_eq!(
1048 all_tasks,
1049 vec![
1050 (
1051 topmost_local_task_source_kind.clone(),
1052 "cargo check all".to_string(),
1053 vec!["check".to_string(), "--all".to_string()],
1054 HashMap::default(),
1055 ),
1056 (
1057 TaskSourceKind::Worktree {
1058 id: worktree_id,
1059 directory_in_worktree: rel_path("b/.zed").into(),
1060 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1061 },
1062 "cargo check".to_string(),
1063 vec!["check".to_string()],
1064 HashMap::default(),
1065 ),
1066 (
1067 TaskSourceKind::AbsPath {
1068 abs_path: paths::tasks_file().clone(),
1069 id_base: "global tasks.json".into(),
1070 },
1071 "cargo check unstable".to_string(),
1072 vec![
1073 "check".to_string(),
1074 "--all".to_string(),
1075 "--all-targets".to_string(),
1076 ],
1077 HashMap::from_iter(Some((
1078 "RUSTFLAGS".to_string(),
1079 "-Zunstable-options".to_string()
1080 ))),
1081 ),
1082 ]
1083 );
1084}
1085
1086#[gpui::test]
1087async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1088 init_test(cx);
1089 TaskStore::init(None);
1090
1091 let fs = FakeFs::new(cx.executor());
1092 fs.insert_tree(
1093 path!("/dir"),
1094 json!({
1095 ".zed": {
1096 "tasks.json": r#"[{
1097 "label": "test worktree root",
1098 "command": "echo $ZED_WORKTREE_ROOT"
1099 }]"#,
1100 },
1101 "a": {
1102 "a.rs": "fn a() {\n A\n}"
1103 },
1104 }),
1105 )
1106 .await;
1107
1108 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1109 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1110
1111 cx.executor().run_until_parked();
1112 let worktree_id = cx.update(|cx| {
1113 project.update(cx, |project, cx| {
1114 project.worktrees(cx).next().unwrap().read(cx).id()
1115 })
1116 });
1117
1118 let active_non_worktree_item_tasks = cx
1119 .update(|cx| {
1120 get_all_tasks(
1121 &project,
1122 Arc::new(TaskContexts {
1123 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1124 active_worktree_context: None,
1125 other_worktree_contexts: Vec::new(),
1126 lsp_task_sources: HashMap::default(),
1127 latest_selection: None,
1128 }),
1129 cx,
1130 )
1131 })
1132 .await;
1133 assert!(
1134 active_non_worktree_item_tasks.is_empty(),
1135 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1136 );
1137
1138 let active_worktree_tasks = cx
1139 .update(|cx| {
1140 get_all_tasks(
1141 &project,
1142 Arc::new(TaskContexts {
1143 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1144 active_worktree_context: Some((worktree_id, {
1145 let mut worktree_context = TaskContext::default();
1146 worktree_context
1147 .task_variables
1148 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1149 worktree_context
1150 })),
1151 other_worktree_contexts: Vec::new(),
1152 lsp_task_sources: HashMap::default(),
1153 latest_selection: None,
1154 }),
1155 cx,
1156 )
1157 })
1158 .await;
1159 assert_eq!(
1160 active_worktree_tasks
1161 .into_iter()
1162 .map(|(source_kind, task)| {
1163 let resolved = task.resolved;
1164 (source_kind, resolved.command.unwrap())
1165 })
1166 .collect::<Vec<_>>(),
1167 vec![(
1168 TaskSourceKind::Worktree {
1169 id: worktree_id,
1170 directory_in_worktree: rel_path(".zed").into(),
1171 id_base: "local worktree tasks from directory \".zed\"".into(),
1172 },
1173 "echo /dir".to_string(),
1174 )]
1175 );
1176}
1177
1178#[gpui::test]
1179async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1180 cx: &mut gpui::TestAppContext,
1181) {
1182 pub(crate) struct PyprojectTomlManifestProvider;
1183
1184 impl ManifestProvider for PyprojectTomlManifestProvider {
1185 fn name(&self) -> ManifestName {
1186 SharedString::new_static("pyproject.toml").into()
1187 }
1188
1189 fn search(
1190 &self,
1191 ManifestQuery {
1192 path,
1193 depth,
1194 delegate,
1195 }: ManifestQuery,
1196 ) -> Option<Arc<RelPath>> {
1197 for path in path.ancestors().take(depth) {
1198 let p = path.join(rel_path("pyproject.toml"));
1199 if delegate.exists(&p, Some(false)) {
1200 return Some(path.into());
1201 }
1202 }
1203
1204 None
1205 }
1206 }
1207
1208 init_test(cx);
1209 let fs = FakeFs::new(cx.executor());
1210
1211 fs.insert_tree(
1212 path!("/the-root"),
1213 json!({
1214 ".zed": {
1215 "settings.json": r#"
1216 {
1217 "languages": {
1218 "Python": {
1219 "language_servers": ["ty"]
1220 }
1221 }
1222 }"#
1223 },
1224 "project-a": {
1225 ".venv": {},
1226 "file.py": "",
1227 "pyproject.toml": ""
1228 },
1229 "project-b": {
1230 ".venv": {},
1231 "source_file.py":"",
1232 "another_file.py": "",
1233 "pyproject.toml": ""
1234 }
1235 }),
1236 )
1237 .await;
1238 cx.update(|cx| {
1239 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1240 });
1241
1242 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1243 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1244 let _fake_python_server = language_registry.register_fake_lsp(
1245 "Python",
1246 FakeLspAdapter {
1247 name: "ty",
1248 capabilities: lsp::ServerCapabilities {
1249 ..Default::default()
1250 },
1251 ..Default::default()
1252 },
1253 );
1254
1255 language_registry.add(python_lang(fs.clone()));
1256 let (first_buffer, _handle) = project
1257 .update(cx, |project, cx| {
1258 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1259 })
1260 .await
1261 .unwrap();
1262 cx.executor().run_until_parked();
1263 let servers = project.update(cx, |project, cx| {
1264 project.lsp_store.update(cx, |this, cx| {
1265 first_buffer.update(cx, |buffer, cx| {
1266 this.running_language_servers_for_local_buffer(buffer, cx)
1267 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1268 .collect::<Vec<_>>()
1269 })
1270 })
1271 });
1272 cx.executor().run_until_parked();
1273 assert_eq!(servers.len(), 1);
1274 let (adapter, server) = servers.into_iter().next().unwrap();
1275 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1276 assert_eq!(server.server_id(), LanguageServerId(0));
1277 // `workspace_folders` are set to the rooting point.
1278 assert_eq!(
1279 server.workspace_folders(),
1280 BTreeSet::from_iter(
1281 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1282 )
1283 );
1284
1285 let (second_project_buffer, _other_handle) = project
1286 .update(cx, |project, cx| {
1287 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1288 })
1289 .await
1290 .unwrap();
1291 cx.executor().run_until_parked();
1292 let servers = project.update(cx, |project, cx| {
1293 project.lsp_store.update(cx, |this, cx| {
1294 second_project_buffer.update(cx, |buffer, cx| {
1295 this.running_language_servers_for_local_buffer(buffer, cx)
1296 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1297 .collect::<Vec<_>>()
1298 })
1299 })
1300 });
1301 cx.executor().run_until_parked();
1302 assert_eq!(servers.len(), 1);
1303 let (adapter, server) = servers.into_iter().next().unwrap();
1304 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1305 // We're not using venvs at all here, so both folders should fall under the same root.
1306 assert_eq!(server.server_id(), LanguageServerId(0));
1307 // Now, let's select a different toolchain for one of subprojects.
1308
1309 let Toolchains {
1310 toolchains: available_toolchains_for_b,
1311 root_path,
1312 ..
1313 } = project
1314 .update(cx, |this, cx| {
1315 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1316 this.available_toolchains(
1317 ProjectPath {
1318 worktree_id,
1319 path: rel_path("project-b/source_file.py").into(),
1320 },
1321 LanguageName::new_static("Python"),
1322 cx,
1323 )
1324 })
1325 .await
1326 .expect("A toolchain to be discovered");
1327 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1328 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1329 let currently_active_toolchain = project
1330 .update(cx, |this, cx| {
1331 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1332 this.active_toolchain(
1333 ProjectPath {
1334 worktree_id,
1335 path: rel_path("project-b/source_file.py").into(),
1336 },
1337 LanguageName::new_static("Python"),
1338 cx,
1339 )
1340 })
1341 .await;
1342
1343 assert!(currently_active_toolchain.is_none());
1344 let _ = project
1345 .update(cx, |this, cx| {
1346 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1347 this.activate_toolchain(
1348 ProjectPath {
1349 worktree_id,
1350 path: root_path,
1351 },
1352 available_toolchains_for_b
1353 .toolchains
1354 .into_iter()
1355 .next()
1356 .unwrap(),
1357 cx,
1358 )
1359 })
1360 .await
1361 .unwrap();
1362 cx.run_until_parked();
1363 let servers = project.update(cx, |project, cx| {
1364 project.lsp_store.update(cx, |this, cx| {
1365 second_project_buffer.update(cx, |buffer, cx| {
1366 this.running_language_servers_for_local_buffer(buffer, cx)
1367 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1368 .collect::<Vec<_>>()
1369 })
1370 })
1371 });
1372 cx.executor().run_until_parked();
1373 assert_eq!(servers.len(), 1);
1374 let (adapter, server) = servers.into_iter().next().unwrap();
1375 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1376 // There's a new language server in town.
1377 assert_eq!(server.server_id(), LanguageServerId(1));
1378}
1379
1380#[gpui::test]
1381async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1382 init_test(cx);
1383
1384 let fs = FakeFs::new(cx.executor());
1385 fs.insert_tree(
1386 path!("/dir"),
1387 json!({
1388 "test.rs": "const A: i32 = 1;",
1389 "test2.rs": "",
1390 "Cargo.toml": "a = 1",
1391 "package.json": "{\"a\": 1}",
1392 }),
1393 )
1394 .await;
1395
1396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1397 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1398
1399 let mut fake_rust_servers = language_registry.register_fake_lsp(
1400 "Rust",
1401 FakeLspAdapter {
1402 name: "the-rust-language-server",
1403 capabilities: lsp::ServerCapabilities {
1404 completion_provider: Some(lsp::CompletionOptions {
1405 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1406 ..Default::default()
1407 }),
1408 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1409 lsp::TextDocumentSyncOptions {
1410 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1411 ..Default::default()
1412 },
1413 )),
1414 ..Default::default()
1415 },
1416 ..Default::default()
1417 },
1418 );
1419 let mut fake_json_servers = language_registry.register_fake_lsp(
1420 "JSON",
1421 FakeLspAdapter {
1422 name: "the-json-language-server",
1423 capabilities: lsp::ServerCapabilities {
1424 completion_provider: Some(lsp::CompletionOptions {
1425 trigger_characters: Some(vec![":".to_string()]),
1426 ..Default::default()
1427 }),
1428 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1429 lsp::TextDocumentSyncOptions {
1430 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1431 ..Default::default()
1432 },
1433 )),
1434 ..Default::default()
1435 },
1436 ..Default::default()
1437 },
1438 );
1439
1440 // Open a buffer without an associated language server.
1441 let (toml_buffer, _handle) = project
1442 .update(cx, |project, cx| {
1443 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1444 })
1445 .await
1446 .unwrap();
1447
1448 // Open a buffer with an associated language server before the language for it has been loaded.
1449 let (rust_buffer, _handle2) = project
1450 .update(cx, |project, cx| {
1451 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1452 })
1453 .await
1454 .unwrap();
1455 rust_buffer.update(cx, |buffer, _| {
1456 assert_eq!(buffer.language().map(|l| l.name()), None);
1457 });
1458
1459 // Now we add the languages to the project, and ensure they get assigned to all
1460 // the relevant open buffers.
1461 language_registry.add(json_lang());
1462 language_registry.add(rust_lang());
1463 cx.executor().run_until_parked();
1464 rust_buffer.update(cx, |buffer, _| {
1465 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1466 });
1467
1468 // A server is started up, and it is notified about Rust files.
1469 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1470 assert_eq!(
1471 fake_rust_server
1472 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1473 .await
1474 .text_document,
1475 lsp::TextDocumentItem {
1476 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1477 version: 0,
1478 text: "const A: i32 = 1;".to_string(),
1479 language_id: "rust".to_string(),
1480 }
1481 );
1482
1483 // The buffer is configured based on the language server's capabilities.
1484 rust_buffer.update(cx, |buffer, _| {
1485 assert_eq!(
1486 buffer
1487 .completion_triggers()
1488 .iter()
1489 .cloned()
1490 .collect::<Vec<_>>(),
1491 &[".".to_string(), "::".to_string()]
1492 );
1493 });
1494 toml_buffer.update(cx, |buffer, _| {
1495 assert!(buffer.completion_triggers().is_empty());
1496 });
1497
1498 // Edit a buffer. The changes are reported to the language server.
1499 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1500 assert_eq!(
1501 fake_rust_server
1502 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1503 .await
1504 .text_document,
1505 lsp::VersionedTextDocumentIdentifier::new(
1506 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1507 1
1508 )
1509 );
1510
1511 // Open a third buffer with a different associated language server.
1512 let (json_buffer, _json_handle) = project
1513 .update(cx, |project, cx| {
1514 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1515 })
1516 .await
1517 .unwrap();
1518
1519 // A json language server is started up and is only notified about the json buffer.
1520 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1521 assert_eq!(
1522 fake_json_server
1523 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1524 .await
1525 .text_document,
1526 lsp::TextDocumentItem {
1527 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1528 version: 0,
1529 text: "{\"a\": 1}".to_string(),
1530 language_id: "json".to_string(),
1531 }
1532 );
1533
1534 // This buffer is configured based on the second language server's
1535 // capabilities.
1536 json_buffer.update(cx, |buffer, _| {
1537 assert_eq!(
1538 buffer
1539 .completion_triggers()
1540 .iter()
1541 .cloned()
1542 .collect::<Vec<_>>(),
1543 &[":".to_string()]
1544 );
1545 });
1546
1547 // When opening another buffer whose language server is already running,
1548 // it is also configured based on the existing language server's capabilities.
1549 let (rust_buffer2, _handle4) = project
1550 .update(cx, |project, cx| {
1551 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1552 })
1553 .await
1554 .unwrap();
1555 rust_buffer2.update(cx, |buffer, _| {
1556 assert_eq!(
1557 buffer
1558 .completion_triggers()
1559 .iter()
1560 .cloned()
1561 .collect::<Vec<_>>(),
1562 &[".".to_string(), "::".to_string()]
1563 );
1564 });
1565
1566 // Changes are reported only to servers matching the buffer's language.
1567 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1568 rust_buffer2.update(cx, |buffer, cx| {
1569 buffer.edit([(0..0, "let x = 1;")], None, cx)
1570 });
1571 assert_eq!(
1572 fake_rust_server
1573 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1574 .await
1575 .text_document,
1576 lsp::VersionedTextDocumentIdentifier::new(
1577 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1578 1
1579 )
1580 );
1581
1582 // Save notifications are reported to all servers.
1583 project
1584 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1585 .await
1586 .unwrap();
1587 assert_eq!(
1588 fake_rust_server
1589 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1590 .await
1591 .text_document,
1592 lsp::TextDocumentIdentifier::new(
1593 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1594 )
1595 );
1596 assert_eq!(
1597 fake_json_server
1598 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1599 .await
1600 .text_document,
1601 lsp::TextDocumentIdentifier::new(
1602 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1603 )
1604 );
1605
1606 // Renames are reported only to servers matching the buffer's language.
1607 fs.rename(
1608 Path::new(path!("/dir/test2.rs")),
1609 Path::new(path!("/dir/test3.rs")),
1610 Default::default(),
1611 )
1612 .await
1613 .unwrap();
1614 assert_eq!(
1615 fake_rust_server
1616 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1617 .await
1618 .text_document,
1619 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1620 );
1621 assert_eq!(
1622 fake_rust_server
1623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1624 .await
1625 .text_document,
1626 lsp::TextDocumentItem {
1627 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1628 version: 0,
1629 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1630 language_id: "rust".to_string(),
1631 },
1632 );
1633
1634 rust_buffer2.update(cx, |buffer, cx| {
1635 buffer.update_diagnostics(
1636 LanguageServerId(0),
1637 DiagnosticSet::from_sorted_entries(
1638 vec![DiagnosticEntry {
1639 diagnostic: Default::default(),
1640 range: Anchor::MIN..Anchor::MAX,
1641 }],
1642 &buffer.snapshot(),
1643 ),
1644 cx,
1645 );
1646 assert_eq!(
1647 buffer
1648 .snapshot()
1649 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1650 .count(),
1651 1
1652 );
1653 });
1654
1655 // When the rename changes the extension of the file, the buffer gets closed on the old
1656 // language server and gets opened on the new one.
1657 fs.rename(
1658 Path::new(path!("/dir/test3.rs")),
1659 Path::new(path!("/dir/test3.json")),
1660 Default::default(),
1661 )
1662 .await
1663 .unwrap();
1664 assert_eq!(
1665 fake_rust_server
1666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1667 .await
1668 .text_document,
1669 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1670 );
1671 assert_eq!(
1672 fake_json_server
1673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1674 .await
1675 .text_document,
1676 lsp::TextDocumentItem {
1677 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1678 version: 0,
1679 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1680 language_id: "json".to_string(),
1681 },
1682 );
1683
1684 // We clear the diagnostics, since the language has changed.
1685 rust_buffer2.update(cx, |buffer, _| {
1686 assert_eq!(
1687 buffer
1688 .snapshot()
1689 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1690 .count(),
1691 0
1692 );
1693 });
1694
1695 // The renamed file's version resets after changing language server.
1696 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1697 assert_eq!(
1698 fake_json_server
1699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1700 .await
1701 .text_document,
1702 lsp::VersionedTextDocumentIdentifier::new(
1703 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1704 1
1705 )
1706 );
1707
1708 // Restart language servers
1709 project.update(cx, |project, cx| {
1710 project.restart_language_servers_for_buffers(
1711 vec![rust_buffer.clone(), json_buffer.clone()],
1712 HashSet::default(),
1713 cx,
1714 );
1715 });
1716
1717 let mut rust_shutdown_requests = fake_rust_server
1718 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1719 let mut json_shutdown_requests = fake_json_server
1720 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1721 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1722
1723 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1724 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1725
1726 // Ensure rust document is reopened in new rust language server
1727 assert_eq!(
1728 fake_rust_server
1729 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1730 .await
1731 .text_document,
1732 lsp::TextDocumentItem {
1733 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1734 version: 0,
1735 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1736 language_id: "rust".to_string(),
1737 }
1738 );
1739
1740 // Ensure json documents are reopened in new json language server
1741 assert_set_eq!(
1742 [
1743 fake_json_server
1744 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1745 .await
1746 .text_document,
1747 fake_json_server
1748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1749 .await
1750 .text_document,
1751 ],
1752 [
1753 lsp::TextDocumentItem {
1754 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1755 version: 0,
1756 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1757 language_id: "json".to_string(),
1758 },
1759 lsp::TextDocumentItem {
1760 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1761 version: 0,
1762 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1763 language_id: "json".to_string(),
1764 }
1765 ]
1766 );
1767
1768 // Close notifications are reported only to servers matching the buffer's language.
1769 cx.update(|_| drop(_json_handle));
1770 let close_message = lsp::DidCloseTextDocumentParams {
1771 text_document: lsp::TextDocumentIdentifier::new(
1772 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1773 ),
1774 };
1775 assert_eq!(
1776 fake_json_server
1777 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1778 .await,
1779 close_message,
1780 );
1781}
1782
1783#[gpui::test]
1784async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1785 init_test(cx);
1786
1787 let settings_json_contents = json!({
1788 "languages": {
1789 "Rust": {
1790 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1791 }
1792 },
1793 "lsp": {
1794 "my_fake_lsp": {
1795 "binary": {
1796 // file exists, so this is treated as a relative path
1797 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1798 }
1799 },
1800 "lsp_on_path": {
1801 "binary": {
1802 // file doesn't exist, so it will fall back on PATH env var
1803 "path": path!("lsp_on_path.exe").to_string(),
1804 }
1805 }
1806 },
1807 });
1808
1809 let fs = FakeFs::new(cx.executor());
1810 fs.insert_tree(
1811 path!("/the-root"),
1812 json!({
1813 ".zed": {
1814 "settings.json": settings_json_contents.to_string(),
1815 },
1816 ".relative_path": {
1817 "to": {
1818 "my_fake_lsp.exe": "",
1819 },
1820 },
1821 "src": {
1822 "main.rs": "",
1823 }
1824 }),
1825 )
1826 .await;
1827
1828 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1829 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1830 language_registry.add(rust_lang());
1831
1832 let mut my_fake_lsp = language_registry.register_fake_lsp(
1833 "Rust",
1834 FakeLspAdapter {
1835 name: "my_fake_lsp",
1836 ..Default::default()
1837 },
1838 );
1839 let mut lsp_on_path = language_registry.register_fake_lsp(
1840 "Rust",
1841 FakeLspAdapter {
1842 name: "lsp_on_path",
1843 ..Default::default()
1844 },
1845 );
1846
1847 cx.run_until_parked();
1848
1849 // Start the language server by opening a buffer with a compatible file extension.
1850 project
1851 .update(cx, |project, cx| {
1852 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1853 })
1854 .await
1855 .unwrap();
1856
1857 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1858 assert_eq!(
1859 lsp_path.to_string_lossy(),
1860 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1861 );
1862
1863 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1864 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1865}
1866
1867#[gpui::test]
1868async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1869 init_test(cx);
1870
1871 let settings_json_contents = json!({
1872 "languages": {
1873 "Rust": {
1874 "language_servers": ["tilde_lsp"]
1875 }
1876 },
1877 "lsp": {
1878 "tilde_lsp": {
1879 "binary": {
1880 "path": "~/.local/bin/rust-analyzer",
1881 }
1882 }
1883 },
1884 });
1885
1886 let fs = FakeFs::new(cx.executor());
1887 fs.insert_tree(
1888 path!("/root"),
1889 json!({
1890 ".zed": {
1891 "settings.json": settings_json_contents.to_string(),
1892 },
1893 "src": {
1894 "main.rs": "fn main() {}",
1895 }
1896 }),
1897 )
1898 .await;
1899
1900 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1902 language_registry.add(rust_lang());
1903
1904 let mut tilde_lsp = language_registry.register_fake_lsp(
1905 "Rust",
1906 FakeLspAdapter {
1907 name: "tilde_lsp",
1908 ..Default::default()
1909 },
1910 );
1911 cx.run_until_parked();
1912
1913 project
1914 .update(cx, |project, cx| {
1915 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1916 })
1917 .await
1918 .unwrap();
1919
1920 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1921 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1922 assert_eq!(
1923 lsp_path, expected_path,
1924 "Tilde path should expand to home directory"
1925 );
1926}
1927
1928#[gpui::test]
1929async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1930 init_test(cx);
1931
1932 let fs = FakeFs::new(cx.executor());
1933 fs.insert_tree(
1934 path!("/the-root"),
1935 json!({
1936 ".gitignore": "target\n",
1937 "Cargo.lock": "",
1938 "src": {
1939 "a.rs": "",
1940 "b.rs": "",
1941 },
1942 "target": {
1943 "x": {
1944 "out": {
1945 "x.rs": ""
1946 }
1947 },
1948 "y": {
1949 "out": {
1950 "y.rs": "",
1951 }
1952 },
1953 "z": {
1954 "out": {
1955 "z.rs": ""
1956 }
1957 }
1958 }
1959 }),
1960 )
1961 .await;
1962 fs.insert_tree(
1963 path!("/the-registry"),
1964 json!({
1965 "dep1": {
1966 "src": {
1967 "dep1.rs": "",
1968 }
1969 },
1970 "dep2": {
1971 "src": {
1972 "dep2.rs": "",
1973 }
1974 },
1975 }),
1976 )
1977 .await;
1978 fs.insert_tree(
1979 path!("/the/stdlib"),
1980 json!({
1981 "LICENSE": "",
1982 "src": {
1983 "string.rs": "",
1984 }
1985 }),
1986 )
1987 .await;
1988
1989 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1990 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1991 (project.languages().clone(), project.lsp_store())
1992 });
1993 language_registry.add(rust_lang());
1994 let mut fake_servers = language_registry.register_fake_lsp(
1995 "Rust",
1996 FakeLspAdapter {
1997 name: "the-language-server",
1998 ..Default::default()
1999 },
2000 );
2001
2002 cx.executor().run_until_parked();
2003
2004 // Start the language server by opening a buffer with a compatible file extension.
2005 project
2006 .update(cx, |project, cx| {
2007 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2008 })
2009 .await
2010 .unwrap();
2011
2012 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2013 project.update(cx, |project, cx| {
2014 let worktree = project.worktrees(cx).next().unwrap();
2015 assert_eq!(
2016 worktree
2017 .read(cx)
2018 .snapshot()
2019 .entries(true, 0)
2020 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2021 .collect::<Vec<_>>(),
2022 &[
2023 ("", false),
2024 (".gitignore", false),
2025 ("Cargo.lock", false),
2026 ("src", false),
2027 ("src/a.rs", false),
2028 ("src/b.rs", false),
2029 ("target", true),
2030 ]
2031 );
2032 });
2033
2034 let prev_read_dir_count = fs.read_dir_call_count();
2035
2036 let fake_server = fake_servers.next().await.unwrap();
2037 cx.executor().run_until_parked();
2038 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2039 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2040 id
2041 });
2042
2043 // Simulate jumping to a definition in a dependency outside of the worktree.
2044 let _out_of_worktree_buffer = project
2045 .update(cx, |project, cx| {
2046 project.open_local_buffer_via_lsp(
2047 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2048 server_id,
2049 cx,
2050 )
2051 })
2052 .await
2053 .unwrap();
2054
2055 // Keep track of the FS events reported to the language server.
2056 let file_changes = Arc::new(Mutex::new(Vec::new()));
2057 fake_server
2058 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2059 registrations: vec![lsp::Registration {
2060 id: Default::default(),
2061 method: "workspace/didChangeWatchedFiles".to_string(),
2062 register_options: serde_json::to_value(
2063 lsp::DidChangeWatchedFilesRegistrationOptions {
2064 watchers: vec![
2065 lsp::FileSystemWatcher {
2066 glob_pattern: lsp::GlobPattern::String(
2067 path!("/the-root/Cargo.toml").to_string(),
2068 ),
2069 kind: None,
2070 },
2071 lsp::FileSystemWatcher {
2072 glob_pattern: lsp::GlobPattern::String(
2073 path!("/the-root/src/*.{rs,c}").to_string(),
2074 ),
2075 kind: None,
2076 },
2077 lsp::FileSystemWatcher {
2078 glob_pattern: lsp::GlobPattern::String(
2079 path!("/the-root/target/y/**/*.rs").to_string(),
2080 ),
2081 kind: None,
2082 },
2083 lsp::FileSystemWatcher {
2084 glob_pattern: lsp::GlobPattern::String(
2085 path!("/the/stdlib/src/**/*.rs").to_string(),
2086 ),
2087 kind: None,
2088 },
2089 lsp::FileSystemWatcher {
2090 glob_pattern: lsp::GlobPattern::String(
2091 path!("**/Cargo.lock").to_string(),
2092 ),
2093 kind: None,
2094 },
2095 ],
2096 },
2097 )
2098 .ok(),
2099 }],
2100 })
2101 .await
2102 .into_response()
2103 .unwrap();
2104 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2105 let file_changes = file_changes.clone();
2106 move |params, _| {
2107 let mut file_changes = file_changes.lock();
2108 file_changes.extend(params.changes);
2109 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2110 }
2111 });
2112
2113 cx.executor().run_until_parked();
2114 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2115 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2116
2117 let mut new_watched_paths = fs.watched_paths();
2118 new_watched_paths.retain(|path| {
2119 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2120 });
2121 assert_eq!(
2122 &new_watched_paths,
2123 &[
2124 Path::new(path!("/the-root")),
2125 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2126 Path::new(path!("/the/stdlib/src"))
2127 ]
2128 );
2129
2130 // Now the language server has asked us to watch an ignored directory path,
2131 // so we recursively load it.
2132 project.update(cx, |project, cx| {
2133 let worktree = project.visible_worktrees(cx).next().unwrap();
2134 assert_eq!(
2135 worktree
2136 .read(cx)
2137 .snapshot()
2138 .entries(true, 0)
2139 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2140 .collect::<Vec<_>>(),
2141 &[
2142 ("", false),
2143 (".gitignore", false),
2144 ("Cargo.lock", false),
2145 ("src", false),
2146 ("src/a.rs", false),
2147 ("src/b.rs", false),
2148 ("target", true),
2149 ("target/x", true),
2150 ("target/y", true),
2151 ("target/y/out", true),
2152 ("target/y/out/y.rs", true),
2153 ("target/z", true),
2154 ]
2155 );
2156 });
2157
2158 // Perform some file system mutations, two of which match the watched patterns,
2159 // and one of which does not.
2160 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2161 .await
2162 .unwrap();
2163 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2164 .await
2165 .unwrap();
2166 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2167 .await
2168 .unwrap();
2169 fs.create_file(
2170 path!("/the-root/target/x/out/x2.rs").as_ref(),
2171 Default::default(),
2172 )
2173 .await
2174 .unwrap();
2175 fs.create_file(
2176 path!("/the-root/target/y/out/y2.rs").as_ref(),
2177 Default::default(),
2178 )
2179 .await
2180 .unwrap();
2181 fs.save(
2182 path!("/the-root/Cargo.lock").as_ref(),
2183 &"".into(),
2184 Default::default(),
2185 )
2186 .await
2187 .unwrap();
2188 fs.save(
2189 path!("/the-stdlib/LICENSE").as_ref(),
2190 &"".into(),
2191 Default::default(),
2192 )
2193 .await
2194 .unwrap();
2195 fs.save(
2196 path!("/the/stdlib/src/string.rs").as_ref(),
2197 &"".into(),
2198 Default::default(),
2199 )
2200 .await
2201 .unwrap();
2202
2203 // The language server receives events for the FS mutations that match its watch patterns.
2204 cx.executor().run_until_parked();
2205 assert_eq!(
2206 &*file_changes.lock(),
2207 &[
2208 lsp::FileEvent {
2209 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2210 typ: lsp::FileChangeType::CHANGED,
2211 },
2212 lsp::FileEvent {
2213 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2214 typ: lsp::FileChangeType::DELETED,
2215 },
2216 lsp::FileEvent {
2217 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2218 typ: lsp::FileChangeType::CREATED,
2219 },
2220 lsp::FileEvent {
2221 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2222 typ: lsp::FileChangeType::CREATED,
2223 },
2224 lsp::FileEvent {
2225 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2226 typ: lsp::FileChangeType::CHANGED,
2227 },
2228 ]
2229 );
2230}
2231
2232#[gpui::test]
2233async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2234 init_test(cx);
2235
2236 let fs = FakeFs::new(cx.executor());
2237 fs.insert_tree(
2238 path!("/dir"),
2239 json!({
2240 "a.rs": "let a = 1;",
2241 "b.rs": "let b = 2;"
2242 }),
2243 )
2244 .await;
2245
2246 let project = Project::test(
2247 fs,
2248 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2249 cx,
2250 )
2251 .await;
2252 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2253
2254 let buffer_a = project
2255 .update(cx, |project, cx| {
2256 project.open_local_buffer(path!("/dir/a.rs"), cx)
2257 })
2258 .await
2259 .unwrap();
2260 let buffer_b = project
2261 .update(cx, |project, cx| {
2262 project.open_local_buffer(path!("/dir/b.rs"), cx)
2263 })
2264 .await
2265 .unwrap();
2266
2267 lsp_store.update(cx, |lsp_store, cx| {
2268 lsp_store
2269 .update_diagnostics(
2270 LanguageServerId(0),
2271 lsp::PublishDiagnosticsParams {
2272 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2273 version: None,
2274 diagnostics: vec![lsp::Diagnostic {
2275 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2276 severity: Some(lsp::DiagnosticSeverity::ERROR),
2277 message: "error 1".to_string(),
2278 ..Default::default()
2279 }],
2280 },
2281 None,
2282 DiagnosticSourceKind::Pushed,
2283 &[],
2284 cx,
2285 )
2286 .unwrap();
2287 lsp_store
2288 .update_diagnostics(
2289 LanguageServerId(0),
2290 lsp::PublishDiagnosticsParams {
2291 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2292 version: None,
2293 diagnostics: vec![lsp::Diagnostic {
2294 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2295 severity: Some(DiagnosticSeverity::WARNING),
2296 message: "error 2".to_string(),
2297 ..Default::default()
2298 }],
2299 },
2300 None,
2301 DiagnosticSourceKind::Pushed,
2302 &[],
2303 cx,
2304 )
2305 .unwrap();
2306 });
2307
2308 buffer_a.update(cx, |buffer, _| {
2309 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2310 assert_eq!(
2311 chunks
2312 .iter()
2313 .map(|(s, d)| (s.as_str(), *d))
2314 .collect::<Vec<_>>(),
2315 &[
2316 ("let ", None),
2317 ("a", Some(DiagnosticSeverity::ERROR)),
2318 (" = 1;", None),
2319 ]
2320 );
2321 });
2322 buffer_b.update(cx, |buffer, _| {
2323 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2324 assert_eq!(
2325 chunks
2326 .iter()
2327 .map(|(s, d)| (s.as_str(), *d))
2328 .collect::<Vec<_>>(),
2329 &[
2330 ("let ", None),
2331 ("b", Some(DiagnosticSeverity::WARNING)),
2332 (" = 2;", None),
2333 ]
2334 );
2335 });
2336}
2337
2338#[gpui::test]
2339async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2340 init_test(cx);
2341
2342 let fs = FakeFs::new(cx.executor());
2343 fs.insert_tree(
2344 path!("/root"),
2345 json!({
2346 "dir": {
2347 ".git": {
2348 "HEAD": "ref: refs/heads/main",
2349 },
2350 ".gitignore": "b.rs",
2351 "a.rs": "let a = 1;",
2352 "b.rs": "let b = 2;",
2353 },
2354 "other.rs": "let b = c;"
2355 }),
2356 )
2357 .await;
2358
2359 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2360 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2361 let (worktree, _) = project
2362 .update(cx, |project, cx| {
2363 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2364 })
2365 .await
2366 .unwrap();
2367 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2368
2369 let (worktree, _) = project
2370 .update(cx, |project, cx| {
2371 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2372 })
2373 .await
2374 .unwrap();
2375 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2376
2377 let server_id = LanguageServerId(0);
2378 lsp_store.update(cx, |lsp_store, cx| {
2379 lsp_store
2380 .update_diagnostics(
2381 server_id,
2382 lsp::PublishDiagnosticsParams {
2383 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2384 version: None,
2385 diagnostics: vec![lsp::Diagnostic {
2386 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2387 severity: Some(lsp::DiagnosticSeverity::ERROR),
2388 message: "unused variable 'b'".to_string(),
2389 ..Default::default()
2390 }],
2391 },
2392 None,
2393 DiagnosticSourceKind::Pushed,
2394 &[],
2395 cx,
2396 )
2397 .unwrap();
2398 lsp_store
2399 .update_diagnostics(
2400 server_id,
2401 lsp::PublishDiagnosticsParams {
2402 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2403 version: None,
2404 diagnostics: vec![lsp::Diagnostic {
2405 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2406 severity: Some(lsp::DiagnosticSeverity::ERROR),
2407 message: "unknown variable 'c'".to_string(),
2408 ..Default::default()
2409 }],
2410 },
2411 None,
2412 DiagnosticSourceKind::Pushed,
2413 &[],
2414 cx,
2415 )
2416 .unwrap();
2417 });
2418
2419 let main_ignored_buffer = project
2420 .update(cx, |project, cx| {
2421 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2422 })
2423 .await
2424 .unwrap();
2425 main_ignored_buffer.update(cx, |buffer, _| {
2426 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2427 assert_eq!(
2428 chunks
2429 .iter()
2430 .map(|(s, d)| (s.as_str(), *d))
2431 .collect::<Vec<_>>(),
2432 &[
2433 ("let ", None),
2434 ("b", Some(DiagnosticSeverity::ERROR)),
2435 (" = 2;", None),
2436 ],
2437 "Gigitnored buffers should still get in-buffer diagnostics",
2438 );
2439 });
2440 let other_buffer = project
2441 .update(cx, |project, cx| {
2442 project.open_buffer((other_worktree_id, rel_path("")), cx)
2443 })
2444 .await
2445 .unwrap();
2446 other_buffer.update(cx, |buffer, _| {
2447 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2448 assert_eq!(
2449 chunks
2450 .iter()
2451 .map(|(s, d)| (s.as_str(), *d))
2452 .collect::<Vec<_>>(),
2453 &[
2454 ("let b = ", None),
2455 ("c", Some(DiagnosticSeverity::ERROR)),
2456 (";", None),
2457 ],
2458 "Buffers from hidden projects should still get in-buffer diagnostics"
2459 );
2460 });
2461
2462 project.update(cx, |project, cx| {
2463 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2464 assert_eq!(
2465 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2466 vec![(
2467 ProjectPath {
2468 worktree_id: main_worktree_id,
2469 path: rel_path("b.rs").into(),
2470 },
2471 server_id,
2472 DiagnosticSummary {
2473 error_count: 1,
2474 warning_count: 0,
2475 }
2476 )]
2477 );
2478 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2479 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2480 });
2481}
2482
2483#[gpui::test]
2484async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2485 init_test(cx);
2486
2487 let progress_token = "the-progress-token";
2488
2489 let fs = FakeFs::new(cx.executor());
2490 fs.insert_tree(
2491 path!("/dir"),
2492 json!({
2493 "a.rs": "fn a() { A }",
2494 "b.rs": "const y: i32 = 1",
2495 }),
2496 )
2497 .await;
2498
2499 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2500 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2501
2502 language_registry.add(rust_lang());
2503 let mut fake_servers = language_registry.register_fake_lsp(
2504 "Rust",
2505 FakeLspAdapter {
2506 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2507 disk_based_diagnostics_sources: vec!["disk".into()],
2508 ..Default::default()
2509 },
2510 );
2511
2512 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2513
2514 // Cause worktree to start the fake language server
2515 let _ = project
2516 .update(cx, |project, cx| {
2517 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2518 })
2519 .await
2520 .unwrap();
2521
2522 let mut events = cx.events(&project);
2523
2524 let fake_server = fake_servers.next().await.unwrap();
2525 assert_eq!(
2526 events.next().await.unwrap(),
2527 Event::LanguageServerAdded(
2528 LanguageServerId(0),
2529 fake_server.server.name(),
2530 Some(worktree_id)
2531 ),
2532 );
2533
2534 fake_server
2535 .start_progress(format!("{}/0", progress_token))
2536 .await;
2537 assert_eq!(
2538 events.next().await.unwrap(),
2539 Event::DiskBasedDiagnosticsStarted {
2540 language_server_id: LanguageServerId(0),
2541 }
2542 );
2543
2544 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2545 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2546 version: None,
2547 diagnostics: vec![lsp::Diagnostic {
2548 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2549 severity: Some(lsp::DiagnosticSeverity::ERROR),
2550 message: "undefined variable 'A'".to_string(),
2551 ..Default::default()
2552 }],
2553 });
2554 assert_eq!(
2555 events.next().await.unwrap(),
2556 Event::DiagnosticsUpdated {
2557 language_server_id: LanguageServerId(0),
2558 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2559 }
2560 );
2561
2562 fake_server.end_progress(format!("{}/0", progress_token));
2563 assert_eq!(
2564 events.next().await.unwrap(),
2565 Event::DiskBasedDiagnosticsFinished {
2566 language_server_id: LanguageServerId(0)
2567 }
2568 );
2569
2570 let buffer = project
2571 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2572 .await
2573 .unwrap();
2574
2575 buffer.update(cx, |buffer, _| {
2576 let snapshot = buffer.snapshot();
2577 let diagnostics = snapshot
2578 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2579 .collect::<Vec<_>>();
2580 assert_eq!(
2581 diagnostics,
2582 &[DiagnosticEntryRef {
2583 range: Point::new(0, 9)..Point::new(0, 10),
2584 diagnostic: &Diagnostic {
2585 severity: lsp::DiagnosticSeverity::ERROR,
2586 message: "undefined variable 'A'".to_string(),
2587 group_id: 0,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 }
2592 }]
2593 )
2594 });
2595
2596 // Ensure publishing empty diagnostics twice only results in one update event.
2597 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2598 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2599 version: None,
2600 diagnostics: Default::default(),
2601 });
2602 assert_eq!(
2603 events.next().await.unwrap(),
2604 Event::DiagnosticsUpdated {
2605 language_server_id: LanguageServerId(0),
2606 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2607 }
2608 );
2609
2610 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2611 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2612 version: None,
2613 diagnostics: Default::default(),
2614 });
2615 cx.executor().run_until_parked();
2616 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2617}
2618
2619#[gpui::test]
2620async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2621 init_test(cx);
2622
2623 let progress_token = "the-progress-token";
2624
2625 let fs = FakeFs::new(cx.executor());
2626 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2627
2628 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2629
2630 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2631 language_registry.add(rust_lang());
2632 let mut fake_servers = language_registry.register_fake_lsp(
2633 "Rust",
2634 FakeLspAdapter {
2635 name: "the-language-server",
2636 disk_based_diagnostics_sources: vec!["disk".into()],
2637 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2638 ..FakeLspAdapter::default()
2639 },
2640 );
2641
2642 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2643
2644 let (buffer, _handle) = project
2645 .update(cx, |project, cx| {
2646 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2647 })
2648 .await
2649 .unwrap();
2650 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2651 // Simulate diagnostics starting to update.
2652 let fake_server = fake_servers.next().await.unwrap();
2653 cx.executor().run_until_parked();
2654 fake_server.start_progress(progress_token).await;
2655
2656 // Restart the server before the diagnostics finish updating.
2657 project.update(cx, |project, cx| {
2658 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2659 });
2660 let mut events = cx.events(&project);
2661
2662 // Simulate the newly started server sending more diagnostics.
2663 let fake_server = fake_servers.next().await.unwrap();
2664 cx.executor().run_until_parked();
2665 assert_eq!(
2666 events.next().await.unwrap(),
2667 Event::LanguageServerRemoved(LanguageServerId(0))
2668 );
2669 assert_eq!(
2670 events.next().await.unwrap(),
2671 Event::LanguageServerAdded(
2672 LanguageServerId(1),
2673 fake_server.server.name(),
2674 Some(worktree_id)
2675 )
2676 );
2677 fake_server.start_progress(progress_token).await;
2678 assert_eq!(
2679 events.next().await.unwrap(),
2680 Event::LanguageServerBufferRegistered {
2681 server_id: LanguageServerId(1),
2682 buffer_id,
2683 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2684 name: Some(fake_server.server.name())
2685 }
2686 );
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsStarted {
2690 language_server_id: LanguageServerId(1)
2691 }
2692 );
2693 project.update(cx, |project, cx| {
2694 assert_eq!(
2695 project
2696 .language_servers_running_disk_based_diagnostics(cx)
2697 .collect::<Vec<_>>(),
2698 [LanguageServerId(1)]
2699 );
2700 });
2701
2702 // All diagnostics are considered done, despite the old server's diagnostic
2703 // task never completing.
2704 fake_server.end_progress(progress_token);
2705 assert_eq!(
2706 events.next().await.unwrap(),
2707 Event::DiskBasedDiagnosticsFinished {
2708 language_server_id: LanguageServerId(1)
2709 }
2710 );
2711 project.update(cx, |project, cx| {
2712 assert_eq!(
2713 project
2714 .language_servers_running_disk_based_diagnostics(cx)
2715 .collect::<Vec<_>>(),
2716 [] as [language::LanguageServerId; 0]
2717 );
2718 });
2719}
2720
2721#[gpui::test]
2722async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2723 init_test(cx);
2724
2725 let fs = FakeFs::new(cx.executor());
2726 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2727
2728 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2729
2730 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2731 language_registry.add(rust_lang());
2732 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2733
2734 let (buffer, _) = project
2735 .update(cx, |project, cx| {
2736 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2737 })
2738 .await
2739 .unwrap();
2740
2741 // Publish diagnostics
2742 let fake_server = fake_servers.next().await.unwrap();
2743 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2744 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2745 version: None,
2746 diagnostics: vec![lsp::Diagnostic {
2747 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2748 severity: Some(lsp::DiagnosticSeverity::ERROR),
2749 message: "the message".to_string(),
2750 ..Default::default()
2751 }],
2752 });
2753
2754 cx.executor().run_until_parked();
2755 buffer.update(cx, |buffer, _| {
2756 assert_eq!(
2757 buffer
2758 .snapshot()
2759 .diagnostics_in_range::<_, usize>(0..1, false)
2760 .map(|entry| entry.diagnostic.message.clone())
2761 .collect::<Vec<_>>(),
2762 ["the message".to_string()]
2763 );
2764 });
2765 project.update(cx, |project, cx| {
2766 assert_eq!(
2767 project.diagnostic_summary(false, cx),
2768 DiagnosticSummary {
2769 error_count: 1,
2770 warning_count: 0,
2771 }
2772 );
2773 });
2774
2775 project.update(cx, |project, cx| {
2776 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2777 });
2778
2779 // The diagnostics are cleared.
2780 cx.executor().run_until_parked();
2781 buffer.update(cx, |buffer, _| {
2782 assert_eq!(
2783 buffer
2784 .snapshot()
2785 .diagnostics_in_range::<_, usize>(0..1, false)
2786 .map(|entry| entry.diagnostic.message.clone())
2787 .collect::<Vec<_>>(),
2788 Vec::<String>::new(),
2789 );
2790 });
2791 project.update(cx, |project, cx| {
2792 assert_eq!(
2793 project.diagnostic_summary(false, cx),
2794 DiagnosticSummary {
2795 error_count: 0,
2796 warning_count: 0,
2797 }
2798 );
2799 });
2800}
2801
2802#[gpui::test]
2803async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2804 init_test(cx);
2805
2806 let fs = FakeFs::new(cx.executor());
2807 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2808
2809 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2810 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2811
2812 language_registry.add(rust_lang());
2813 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2814
2815 let (buffer, _handle) = project
2816 .update(cx, |project, cx| {
2817 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2818 })
2819 .await
2820 .unwrap();
2821
2822 // Before restarting the server, report diagnostics with an unknown buffer version.
2823 let fake_server = fake_servers.next().await.unwrap();
2824 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2825 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2826 version: Some(10000),
2827 diagnostics: Vec::new(),
2828 });
2829 cx.executor().run_until_parked();
2830 project.update(cx, |project, cx| {
2831 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2832 });
2833
2834 let mut fake_server = fake_servers.next().await.unwrap();
2835 let notification = fake_server
2836 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2837 .await
2838 .text_document;
2839 assert_eq!(notification.version, 0);
2840}
2841
2842#[gpui::test]
2843async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2844 init_test(cx);
2845
2846 let progress_token = "the-progress-token";
2847
2848 let fs = FakeFs::new(cx.executor());
2849 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2850
2851 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2852
2853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2854 language_registry.add(rust_lang());
2855 let mut fake_servers = language_registry.register_fake_lsp(
2856 "Rust",
2857 FakeLspAdapter {
2858 name: "the-language-server",
2859 disk_based_diagnostics_sources: vec!["disk".into()],
2860 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2861 ..Default::default()
2862 },
2863 );
2864
2865 let (buffer, _handle) = project
2866 .update(cx, |project, cx| {
2867 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2868 })
2869 .await
2870 .unwrap();
2871
2872 // Simulate diagnostics starting to update.
2873 let mut fake_server = fake_servers.next().await.unwrap();
2874 fake_server
2875 .start_progress_with(
2876 "another-token",
2877 lsp::WorkDoneProgressBegin {
2878 cancellable: Some(false),
2879 ..Default::default()
2880 },
2881 )
2882 .await;
2883 // Ensure progress notification is fully processed before starting the next one
2884 cx.executor().run_until_parked();
2885
2886 fake_server
2887 .start_progress_with(
2888 progress_token,
2889 lsp::WorkDoneProgressBegin {
2890 cancellable: Some(true),
2891 ..Default::default()
2892 },
2893 )
2894 .await;
2895 // Ensure progress notification is fully processed before cancelling
2896 cx.executor().run_until_parked();
2897
2898 project.update(cx, |project, cx| {
2899 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2900 });
2901 cx.executor().run_until_parked();
2902
2903 let cancel_notification = fake_server
2904 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2905 .await;
2906 assert_eq!(
2907 cancel_notification.token,
2908 NumberOrString::String(progress_token.into())
2909 );
2910}
2911
2912#[gpui::test]
2913async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2914 init_test(cx);
2915
2916 let fs = FakeFs::new(cx.executor());
2917 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2918 .await;
2919
2920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2921 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2922
2923 let mut fake_rust_servers = language_registry.register_fake_lsp(
2924 "Rust",
2925 FakeLspAdapter {
2926 name: "rust-lsp",
2927 ..Default::default()
2928 },
2929 );
2930 let mut fake_js_servers = language_registry.register_fake_lsp(
2931 "JavaScript",
2932 FakeLspAdapter {
2933 name: "js-lsp",
2934 ..Default::default()
2935 },
2936 );
2937 language_registry.add(rust_lang());
2938 language_registry.add(js_lang());
2939
2940 let _rs_buffer = project
2941 .update(cx, |project, cx| {
2942 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2943 })
2944 .await
2945 .unwrap();
2946 let _js_buffer = project
2947 .update(cx, |project, cx| {
2948 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2949 })
2950 .await
2951 .unwrap();
2952
2953 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2954 assert_eq!(
2955 fake_rust_server_1
2956 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2957 .await
2958 .text_document
2959 .uri
2960 .as_str(),
2961 uri!("file:///dir/a.rs")
2962 );
2963
2964 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2965 assert_eq!(
2966 fake_js_server
2967 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2968 .await
2969 .text_document
2970 .uri
2971 .as_str(),
2972 uri!("file:///dir/b.js")
2973 );
2974
2975 // Disable Rust language server, ensuring only that server gets stopped.
2976 cx.update(|cx| {
2977 SettingsStore::update_global(cx, |settings, cx| {
2978 settings.update_user_settings(cx, |settings| {
2979 settings.languages_mut().insert(
2980 "Rust".into(),
2981 LanguageSettingsContent {
2982 enable_language_server: Some(false),
2983 ..Default::default()
2984 },
2985 );
2986 });
2987 })
2988 });
2989 fake_rust_server_1
2990 .receive_notification::<lsp::notification::Exit>()
2991 .await;
2992
2993 // Enable Rust and disable JavaScript language servers, ensuring that the
2994 // former gets started again and that the latter stops.
2995 cx.update(|cx| {
2996 SettingsStore::update_global(cx, |settings, cx| {
2997 settings.update_user_settings(cx, |settings| {
2998 settings.languages_mut().insert(
2999 "Rust".into(),
3000 LanguageSettingsContent {
3001 enable_language_server: Some(true),
3002 ..Default::default()
3003 },
3004 );
3005 settings.languages_mut().insert(
3006 "JavaScript".into(),
3007 LanguageSettingsContent {
3008 enable_language_server: Some(false),
3009 ..Default::default()
3010 },
3011 );
3012 });
3013 })
3014 });
3015 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3016 assert_eq!(
3017 fake_rust_server_2
3018 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3019 .await
3020 .text_document
3021 .uri
3022 .as_str(),
3023 uri!("file:///dir/a.rs")
3024 );
3025 fake_js_server
3026 .receive_notification::<lsp::notification::Exit>()
3027 .await;
3028}
3029
3030#[gpui::test(iterations = 3)]
3031async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let text = "
3035 fn a() { A }
3036 fn b() { BB }
3037 fn c() { CCC }
3038 "
3039 .unindent();
3040
3041 let fs = FakeFs::new(cx.executor());
3042 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3043
3044 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3046
3047 language_registry.add(rust_lang());
3048 let mut fake_servers = language_registry.register_fake_lsp(
3049 "Rust",
3050 FakeLspAdapter {
3051 disk_based_diagnostics_sources: vec!["disk".into()],
3052 ..Default::default()
3053 },
3054 );
3055
3056 let buffer = project
3057 .update(cx, |project, cx| {
3058 project.open_local_buffer(path!("/dir/a.rs"), cx)
3059 })
3060 .await
3061 .unwrap();
3062
3063 let _handle = project.update(cx, |project, cx| {
3064 project.register_buffer_with_language_servers(&buffer, cx)
3065 });
3066
3067 let mut fake_server = fake_servers.next().await.unwrap();
3068 let open_notification = fake_server
3069 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3070 .await;
3071
3072 // Edit the buffer, moving the content down
3073 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3074 let change_notification_1 = fake_server
3075 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3076 .await;
3077 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3078
3079 // Report some diagnostics for the initial version of the buffer
3080 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3081 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3082 version: Some(open_notification.text_document.version),
3083 diagnostics: vec![
3084 lsp::Diagnostic {
3085 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3086 severity: Some(DiagnosticSeverity::ERROR),
3087 message: "undefined variable 'A'".to_string(),
3088 source: Some("disk".to_string()),
3089 ..Default::default()
3090 },
3091 lsp::Diagnostic {
3092 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3093 severity: Some(DiagnosticSeverity::ERROR),
3094 message: "undefined variable 'BB'".to_string(),
3095 source: Some("disk".to_string()),
3096 ..Default::default()
3097 },
3098 lsp::Diagnostic {
3099 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3100 severity: Some(DiagnosticSeverity::ERROR),
3101 source: Some("disk".to_string()),
3102 message: "undefined variable 'CCC'".to_string(),
3103 ..Default::default()
3104 },
3105 ],
3106 });
3107
3108 // The diagnostics have moved down since they were created.
3109 cx.executor().run_until_parked();
3110 buffer.update(cx, |buffer, _| {
3111 assert_eq!(
3112 buffer
3113 .snapshot()
3114 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3115 .collect::<Vec<_>>(),
3116 &[
3117 DiagnosticEntry {
3118 range: Point::new(3, 9)..Point::new(3, 11),
3119 diagnostic: Diagnostic {
3120 source: Some("disk".into()),
3121 severity: DiagnosticSeverity::ERROR,
3122 message: "undefined variable 'BB'".to_string(),
3123 is_disk_based: true,
3124 group_id: 1,
3125 is_primary: true,
3126 source_kind: DiagnosticSourceKind::Pushed,
3127 ..Diagnostic::default()
3128 },
3129 },
3130 DiagnosticEntry {
3131 range: Point::new(4, 9)..Point::new(4, 12),
3132 diagnostic: Diagnostic {
3133 source: Some("disk".into()),
3134 severity: DiagnosticSeverity::ERROR,
3135 message: "undefined variable 'CCC'".to_string(),
3136 is_disk_based: true,
3137 group_id: 2,
3138 is_primary: true,
3139 source_kind: DiagnosticSourceKind::Pushed,
3140 ..Diagnostic::default()
3141 }
3142 }
3143 ]
3144 );
3145 assert_eq!(
3146 chunks_with_diagnostics(buffer, 0..buffer.len()),
3147 [
3148 ("\n\nfn a() { ".to_string(), None),
3149 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3150 (" }\nfn b() { ".to_string(), None),
3151 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3152 (" }\nfn c() { ".to_string(), None),
3153 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3154 (" }\n".to_string(), None),
3155 ]
3156 );
3157 assert_eq!(
3158 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3159 [
3160 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3161 (" }\nfn c() { ".to_string(), None),
3162 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3163 ]
3164 );
3165 });
3166
3167 // Ensure overlapping diagnostics are highlighted correctly.
3168 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3169 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3170 version: Some(open_notification.text_document.version),
3171 diagnostics: vec![
3172 lsp::Diagnostic {
3173 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3174 severity: Some(DiagnosticSeverity::ERROR),
3175 message: "undefined variable 'A'".to_string(),
3176 source: Some("disk".to_string()),
3177 ..Default::default()
3178 },
3179 lsp::Diagnostic {
3180 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3181 severity: Some(DiagnosticSeverity::WARNING),
3182 message: "unreachable statement".to_string(),
3183 source: Some("disk".to_string()),
3184 ..Default::default()
3185 },
3186 ],
3187 });
3188
3189 cx.executor().run_until_parked();
3190 buffer.update(cx, |buffer, _| {
3191 assert_eq!(
3192 buffer
3193 .snapshot()
3194 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3195 .collect::<Vec<_>>(),
3196 &[
3197 DiagnosticEntry {
3198 range: Point::new(2, 9)..Point::new(2, 12),
3199 diagnostic: Diagnostic {
3200 source: Some("disk".into()),
3201 severity: DiagnosticSeverity::WARNING,
3202 message: "unreachable statement".to_string(),
3203 is_disk_based: true,
3204 group_id: 4,
3205 is_primary: true,
3206 source_kind: DiagnosticSourceKind::Pushed,
3207 ..Diagnostic::default()
3208 }
3209 },
3210 DiagnosticEntry {
3211 range: Point::new(2, 9)..Point::new(2, 10),
3212 diagnostic: Diagnostic {
3213 source: Some("disk".into()),
3214 severity: DiagnosticSeverity::ERROR,
3215 message: "undefined variable 'A'".to_string(),
3216 is_disk_based: true,
3217 group_id: 3,
3218 is_primary: true,
3219 source_kind: DiagnosticSourceKind::Pushed,
3220 ..Diagnostic::default()
3221 },
3222 }
3223 ]
3224 );
3225 assert_eq!(
3226 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3227 [
3228 ("fn a() { ".to_string(), None),
3229 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3230 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3231 ("\n".to_string(), None),
3232 ]
3233 );
3234 assert_eq!(
3235 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3236 [
3237 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3238 ("\n".to_string(), None),
3239 ]
3240 );
3241 });
3242
3243 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3244 // changes since the last save.
3245 buffer.update(cx, |buffer, cx| {
3246 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3247 buffer.edit(
3248 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3249 None,
3250 cx,
3251 );
3252 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3253 });
3254 let change_notification_2 = fake_server
3255 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3256 .await;
3257 assert!(
3258 change_notification_2.text_document.version > change_notification_1.text_document.version
3259 );
3260
3261 // Handle out-of-order diagnostics
3262 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3263 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3264 version: Some(change_notification_2.text_document.version),
3265 diagnostics: vec![
3266 lsp::Diagnostic {
3267 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3268 severity: Some(DiagnosticSeverity::ERROR),
3269 message: "undefined variable 'BB'".to_string(),
3270 source: Some("disk".to_string()),
3271 ..Default::default()
3272 },
3273 lsp::Diagnostic {
3274 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3275 severity: Some(DiagnosticSeverity::WARNING),
3276 message: "undefined variable 'A'".to_string(),
3277 source: Some("disk".to_string()),
3278 ..Default::default()
3279 },
3280 ],
3281 });
3282
3283 cx.executor().run_until_parked();
3284 buffer.update(cx, |buffer, _| {
3285 assert_eq!(
3286 buffer
3287 .snapshot()
3288 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3289 .collect::<Vec<_>>(),
3290 &[
3291 DiagnosticEntry {
3292 range: Point::new(2, 21)..Point::new(2, 22),
3293 diagnostic: Diagnostic {
3294 source: Some("disk".into()),
3295 severity: DiagnosticSeverity::WARNING,
3296 message: "undefined variable 'A'".to_string(),
3297 is_disk_based: true,
3298 group_id: 6,
3299 is_primary: true,
3300 source_kind: DiagnosticSourceKind::Pushed,
3301 ..Diagnostic::default()
3302 }
3303 },
3304 DiagnosticEntry {
3305 range: Point::new(3, 9)..Point::new(3, 14),
3306 diagnostic: Diagnostic {
3307 source: Some("disk".into()),
3308 severity: DiagnosticSeverity::ERROR,
3309 message: "undefined variable 'BB'".to_string(),
3310 is_disk_based: true,
3311 group_id: 5,
3312 is_primary: true,
3313 source_kind: DiagnosticSourceKind::Pushed,
3314 ..Diagnostic::default()
3315 },
3316 }
3317 ]
3318 );
3319 });
3320}
3321
3322#[gpui::test]
3323async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3324 init_test(cx);
3325
3326 let text = concat!(
3327 "let one = ;\n", //
3328 "let two = \n",
3329 "let three = 3;\n",
3330 );
3331
3332 let fs = FakeFs::new(cx.executor());
3333 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3334
3335 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3336 let buffer = project
3337 .update(cx, |project, cx| {
3338 project.open_local_buffer(path!("/dir/a.rs"), cx)
3339 })
3340 .await
3341 .unwrap();
3342
3343 project.update(cx, |project, cx| {
3344 project.lsp_store.update(cx, |lsp_store, cx| {
3345 lsp_store
3346 .update_diagnostic_entries(
3347 LanguageServerId(0),
3348 PathBuf::from(path!("/dir/a.rs")),
3349 None,
3350 None,
3351 vec![
3352 DiagnosticEntry {
3353 range: Unclipped(PointUtf16::new(0, 10))
3354 ..Unclipped(PointUtf16::new(0, 10)),
3355 diagnostic: Diagnostic {
3356 severity: DiagnosticSeverity::ERROR,
3357 message: "syntax error 1".to_string(),
3358 source_kind: DiagnosticSourceKind::Pushed,
3359 ..Diagnostic::default()
3360 },
3361 },
3362 DiagnosticEntry {
3363 range: Unclipped(PointUtf16::new(1, 10))
3364 ..Unclipped(PointUtf16::new(1, 10)),
3365 diagnostic: Diagnostic {
3366 severity: DiagnosticSeverity::ERROR,
3367 message: "syntax error 2".to_string(),
3368 source_kind: DiagnosticSourceKind::Pushed,
3369 ..Diagnostic::default()
3370 },
3371 },
3372 ],
3373 cx,
3374 )
3375 .unwrap();
3376 })
3377 });
3378
3379 // An empty range is extended forward to include the following character.
3380 // At the end of a line, an empty range is extended backward to include
3381 // the preceding character.
3382 buffer.update(cx, |buffer, _| {
3383 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3384 assert_eq!(
3385 chunks
3386 .iter()
3387 .map(|(s, d)| (s.as_str(), *d))
3388 .collect::<Vec<_>>(),
3389 &[
3390 ("let one = ", None),
3391 (";", Some(DiagnosticSeverity::ERROR)),
3392 ("\nlet two =", None),
3393 (" ", Some(DiagnosticSeverity::ERROR)),
3394 ("\nlet three = 3;\n", None)
3395 ]
3396 );
3397 });
3398}
3399
3400#[gpui::test]
3401async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3402 init_test(cx);
3403
3404 let fs = FakeFs::new(cx.executor());
3405 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3406 .await;
3407
3408 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3409 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
3410
3411 lsp_store.update(cx, |lsp_store, cx| {
3412 lsp_store
3413 .update_diagnostic_entries(
3414 LanguageServerId(0),
3415 Path::new(path!("/dir/a.rs")).to_owned(),
3416 None,
3417 None,
3418 vec![DiagnosticEntry {
3419 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3420 diagnostic: Diagnostic {
3421 severity: DiagnosticSeverity::ERROR,
3422 is_primary: true,
3423 message: "syntax error a1".to_string(),
3424 source_kind: DiagnosticSourceKind::Pushed,
3425 ..Diagnostic::default()
3426 },
3427 }],
3428 cx,
3429 )
3430 .unwrap();
3431 lsp_store
3432 .update_diagnostic_entries(
3433 LanguageServerId(1),
3434 Path::new(path!("/dir/a.rs")).to_owned(),
3435 None,
3436 None,
3437 vec![DiagnosticEntry {
3438 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3439 diagnostic: Diagnostic {
3440 severity: DiagnosticSeverity::ERROR,
3441 is_primary: true,
3442 message: "syntax error b1".to_string(),
3443 source_kind: DiagnosticSourceKind::Pushed,
3444 ..Diagnostic::default()
3445 },
3446 }],
3447 cx,
3448 )
3449 .unwrap();
3450
3451 assert_eq!(
3452 lsp_store.diagnostic_summary(false, cx),
3453 DiagnosticSummary {
3454 error_count: 2,
3455 warning_count: 0,
3456 }
3457 );
3458 });
3459}
3460
3461#[gpui::test]
3462async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3463 init_test(cx);
3464
3465 let text = "
3466 fn a() {
3467 f1();
3468 }
3469 fn b() {
3470 f2();
3471 }
3472 fn c() {
3473 f3();
3474 }
3475 "
3476 .unindent();
3477
3478 let fs = FakeFs::new(cx.executor());
3479 fs.insert_tree(
3480 path!("/dir"),
3481 json!({
3482 "a.rs": text.clone(),
3483 }),
3484 )
3485 .await;
3486
3487 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3488 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3489
3490 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3491 language_registry.add(rust_lang());
3492 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3493
3494 let (buffer, _handle) = project
3495 .update(cx, |project, cx| {
3496 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3497 })
3498 .await
3499 .unwrap();
3500
3501 let mut fake_server = fake_servers.next().await.unwrap();
3502 let lsp_document_version = fake_server
3503 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3504 .await
3505 .text_document
3506 .version;
3507
3508 // Simulate editing the buffer after the language server computes some edits.
3509 buffer.update(cx, |buffer, cx| {
3510 buffer.edit(
3511 [(
3512 Point::new(0, 0)..Point::new(0, 0),
3513 "// above first function\n",
3514 )],
3515 None,
3516 cx,
3517 );
3518 buffer.edit(
3519 [(
3520 Point::new(2, 0)..Point::new(2, 0),
3521 " // inside first function\n",
3522 )],
3523 None,
3524 cx,
3525 );
3526 buffer.edit(
3527 [(
3528 Point::new(6, 4)..Point::new(6, 4),
3529 "// inside second function ",
3530 )],
3531 None,
3532 cx,
3533 );
3534
3535 assert_eq!(
3536 buffer.text(),
3537 "
3538 // above first function
3539 fn a() {
3540 // inside first function
3541 f1();
3542 }
3543 fn b() {
3544 // inside second function f2();
3545 }
3546 fn c() {
3547 f3();
3548 }
3549 "
3550 .unindent()
3551 );
3552 });
3553
3554 let edits = lsp_store
3555 .update(cx, |lsp_store, cx| {
3556 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3557 &buffer,
3558 vec![
3559 // replace body of first function
3560 lsp::TextEdit {
3561 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3562 new_text: "
3563 fn a() {
3564 f10();
3565 }
3566 "
3567 .unindent(),
3568 },
3569 // edit inside second function
3570 lsp::TextEdit {
3571 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3572 new_text: "00".into(),
3573 },
3574 // edit inside third function via two distinct edits
3575 lsp::TextEdit {
3576 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3577 new_text: "4000".into(),
3578 },
3579 lsp::TextEdit {
3580 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3581 new_text: "".into(),
3582 },
3583 ],
3584 LanguageServerId(0),
3585 Some(lsp_document_version),
3586 cx,
3587 )
3588 })
3589 .await
3590 .unwrap();
3591
3592 buffer.update(cx, |buffer, cx| {
3593 for (range, new_text) in edits {
3594 buffer.edit([(range, new_text)], None, cx);
3595 }
3596 assert_eq!(
3597 buffer.text(),
3598 "
3599 // above first function
3600 fn a() {
3601 // inside first function
3602 f10();
3603 }
3604 fn b() {
3605 // inside second function f200();
3606 }
3607 fn c() {
3608 f4000();
3609 }
3610 "
3611 .unindent()
3612 );
3613 });
3614}
3615
3616#[gpui::test]
3617async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3618 init_test(cx);
3619
3620 let text = "
3621 use a::b;
3622 use a::c;
3623
3624 fn f() {
3625 b();
3626 c();
3627 }
3628 "
3629 .unindent();
3630
3631 let fs = FakeFs::new(cx.executor());
3632 fs.insert_tree(
3633 path!("/dir"),
3634 json!({
3635 "a.rs": text.clone(),
3636 }),
3637 )
3638 .await;
3639
3640 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3642 let buffer = project
3643 .update(cx, |project, cx| {
3644 project.open_local_buffer(path!("/dir/a.rs"), cx)
3645 })
3646 .await
3647 .unwrap();
3648
3649 // Simulate the language server sending us a small edit in the form of a very large diff.
3650 // Rust-analyzer does this when performing a merge-imports code action.
3651 let edits = lsp_store
3652 .update(cx, |lsp_store, cx| {
3653 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3654 &buffer,
3655 [
3656 // Replace the first use statement without editing the semicolon.
3657 lsp::TextEdit {
3658 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3659 new_text: "a::{b, c}".into(),
3660 },
3661 // Reinsert the remainder of the file between the semicolon and the final
3662 // newline of the file.
3663 lsp::TextEdit {
3664 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3665 new_text: "\n\n".into(),
3666 },
3667 lsp::TextEdit {
3668 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3669 new_text: "
3670 fn f() {
3671 b();
3672 c();
3673 }"
3674 .unindent(),
3675 },
3676 // Delete everything after the first newline of the file.
3677 lsp::TextEdit {
3678 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3679 new_text: "".into(),
3680 },
3681 ],
3682 LanguageServerId(0),
3683 None,
3684 cx,
3685 )
3686 })
3687 .await
3688 .unwrap();
3689
3690 buffer.update(cx, |buffer, cx| {
3691 let edits = edits
3692 .into_iter()
3693 .map(|(range, text)| {
3694 (
3695 range.start.to_point(buffer)..range.end.to_point(buffer),
3696 text,
3697 )
3698 })
3699 .collect::<Vec<_>>();
3700
3701 assert_eq!(
3702 edits,
3703 [
3704 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3705 (Point::new(1, 0)..Point::new(2, 0), "".into())
3706 ]
3707 );
3708
3709 for (range, new_text) in edits {
3710 buffer.edit([(range, new_text)], None, cx);
3711 }
3712 assert_eq!(
3713 buffer.text(),
3714 "
3715 use a::{b, c};
3716
3717 fn f() {
3718 b();
3719 c();
3720 }
3721 "
3722 .unindent()
3723 );
3724 });
3725}
3726
3727#[gpui::test]
3728async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3729 cx: &mut gpui::TestAppContext,
3730) {
3731 init_test(cx);
3732
3733 let text = "Path()";
3734
3735 let fs = FakeFs::new(cx.executor());
3736 fs.insert_tree(
3737 path!("/dir"),
3738 json!({
3739 "a.rs": text
3740 }),
3741 )
3742 .await;
3743
3744 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3745 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3746 let buffer = project
3747 .update(cx, |project, cx| {
3748 project.open_local_buffer(path!("/dir/a.rs"), cx)
3749 })
3750 .await
3751 .unwrap();
3752
3753 // Simulate the language server sending us a pair of edits at the same location,
3754 // with an insertion following a replacement (which violates the LSP spec).
3755 let edits = lsp_store
3756 .update(cx, |lsp_store, cx| {
3757 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3758 &buffer,
3759 [
3760 lsp::TextEdit {
3761 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3762 new_text: "Path".into(),
3763 },
3764 lsp::TextEdit {
3765 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3766 new_text: "from path import Path\n\n\n".into(),
3767 },
3768 ],
3769 LanguageServerId(0),
3770 None,
3771 cx,
3772 )
3773 })
3774 .await
3775 .unwrap();
3776
3777 buffer.update(cx, |buffer, cx| {
3778 buffer.edit(edits, None, cx);
3779 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3780 });
3781}
3782
3783#[gpui::test]
3784async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3785 init_test(cx);
3786
3787 let text = "
3788 use a::b;
3789 use a::c;
3790
3791 fn f() {
3792 b();
3793 c();
3794 }
3795 "
3796 .unindent();
3797
3798 let fs = FakeFs::new(cx.executor());
3799 fs.insert_tree(
3800 path!("/dir"),
3801 json!({
3802 "a.rs": text.clone(),
3803 }),
3804 )
3805 .await;
3806
3807 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3808 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3809 let buffer = project
3810 .update(cx, |project, cx| {
3811 project.open_local_buffer(path!("/dir/a.rs"), cx)
3812 })
3813 .await
3814 .unwrap();
3815
3816 // Simulate the language server sending us edits in a non-ordered fashion,
3817 // with ranges sometimes being inverted or pointing to invalid locations.
3818 let edits = lsp_store
3819 .update(cx, |lsp_store, cx| {
3820 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3821 &buffer,
3822 [
3823 lsp::TextEdit {
3824 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3825 new_text: "\n\n".into(),
3826 },
3827 lsp::TextEdit {
3828 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3829 new_text: "a::{b, c}".into(),
3830 },
3831 lsp::TextEdit {
3832 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3833 new_text: "".into(),
3834 },
3835 lsp::TextEdit {
3836 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3837 new_text: "
3838 fn f() {
3839 b();
3840 c();
3841 }"
3842 .unindent(),
3843 },
3844 ],
3845 LanguageServerId(0),
3846 None,
3847 cx,
3848 )
3849 })
3850 .await
3851 .unwrap();
3852
3853 buffer.update(cx, |buffer, cx| {
3854 let edits = edits
3855 .into_iter()
3856 .map(|(range, text)| {
3857 (
3858 range.start.to_point(buffer)..range.end.to_point(buffer),
3859 text,
3860 )
3861 })
3862 .collect::<Vec<_>>();
3863
3864 assert_eq!(
3865 edits,
3866 [
3867 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3868 (Point::new(1, 0)..Point::new(2, 0), "".into())
3869 ]
3870 );
3871
3872 for (range, new_text) in edits {
3873 buffer.edit([(range, new_text)], None, cx);
3874 }
3875 assert_eq!(
3876 buffer.text(),
3877 "
3878 use a::{b, c};
3879
3880 fn f() {
3881 b();
3882 c();
3883 }
3884 "
3885 .unindent()
3886 );
3887 });
3888}
3889
3890fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3891 buffer: &Buffer,
3892 range: Range<T>,
3893) -> Vec<(String, Option<DiagnosticSeverity>)> {
3894 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3895 for chunk in buffer.snapshot().chunks(range, true) {
3896 if chunks
3897 .last()
3898 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3899 {
3900 chunks.last_mut().unwrap().0.push_str(chunk.text);
3901 } else {
3902 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3903 }
3904 }
3905 chunks
3906}
3907
3908#[gpui::test(iterations = 10)]
3909async fn test_definition(cx: &mut gpui::TestAppContext) {
3910 init_test(cx);
3911
3912 let fs = FakeFs::new(cx.executor());
3913 fs.insert_tree(
3914 path!("/dir"),
3915 json!({
3916 "a.rs": "const fn a() { A }",
3917 "b.rs": "const y: i32 = crate::a()",
3918 }),
3919 )
3920 .await;
3921
3922 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3923
3924 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3925 language_registry.add(rust_lang());
3926 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3927
3928 let (buffer, _handle) = project
3929 .update(cx, |project, cx| {
3930 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3931 })
3932 .await
3933 .unwrap();
3934
3935 let fake_server = fake_servers.next().await.unwrap();
3936 cx.executor().run_until_parked();
3937
3938 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3939 let params = params.text_document_position_params;
3940 assert_eq!(
3941 params.text_document.uri.to_file_path().unwrap(),
3942 Path::new(path!("/dir/b.rs")),
3943 );
3944 assert_eq!(params.position, lsp::Position::new(0, 22));
3945
3946 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3947 lsp::Location::new(
3948 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3949 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3950 ),
3951 )))
3952 });
3953 let mut definitions = project
3954 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3955 .await
3956 .unwrap()
3957 .unwrap();
3958
3959 // Assert no new language server started
3960 cx.executor().run_until_parked();
3961 assert!(fake_servers.try_next().is_err());
3962
3963 assert_eq!(definitions.len(), 1);
3964 let definition = definitions.pop().unwrap();
3965 cx.update(|cx| {
3966 let target_buffer = definition.target.buffer.read(cx);
3967 assert_eq!(
3968 target_buffer
3969 .file()
3970 .unwrap()
3971 .as_local()
3972 .unwrap()
3973 .abs_path(cx),
3974 Path::new(path!("/dir/a.rs")),
3975 );
3976 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3977 assert_eq!(
3978 list_worktrees(&project, cx),
3979 [
3980 (path!("/dir/a.rs").as_ref(), false),
3981 (path!("/dir/b.rs").as_ref(), true)
3982 ],
3983 );
3984
3985 drop(definition);
3986 });
3987 cx.update(|cx| {
3988 assert_eq!(
3989 list_worktrees(&project, cx),
3990 [(path!("/dir/b.rs").as_ref(), true)]
3991 );
3992 });
3993
3994 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3995 project
3996 .read(cx)
3997 .worktrees(cx)
3998 .map(|worktree| {
3999 let worktree = worktree.read(cx);
4000 (
4001 worktree.as_local().unwrap().abs_path().as_ref(),
4002 worktree.is_visible(),
4003 )
4004 })
4005 .collect::<Vec<_>>()
4006 }
4007}
4008
4009#[gpui::test]
4010async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4011 init_test(cx);
4012
4013 let fs = FakeFs::new(cx.executor());
4014 fs.insert_tree(
4015 path!("/dir"),
4016 json!({
4017 "a.ts": "",
4018 }),
4019 )
4020 .await;
4021
4022 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4023
4024 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4025 language_registry.add(typescript_lang());
4026 let mut fake_language_servers = language_registry.register_fake_lsp(
4027 "TypeScript",
4028 FakeLspAdapter {
4029 capabilities: lsp::ServerCapabilities {
4030 completion_provider: Some(lsp::CompletionOptions {
4031 trigger_characters: Some(vec![".".to_string()]),
4032 ..Default::default()
4033 }),
4034 ..Default::default()
4035 },
4036 ..Default::default()
4037 },
4038 );
4039
4040 let (buffer, _handle) = project
4041 .update(cx, |p, cx| {
4042 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4043 })
4044 .await
4045 .unwrap();
4046
4047 let fake_server = fake_language_servers.next().await.unwrap();
4048 cx.executor().run_until_parked();
4049
4050 // When text_edit exists, it takes precedence over insert_text and label
4051 let text = "let a = obj.fqn";
4052 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4053 let completions = project.update(cx, |project, cx| {
4054 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4055 });
4056
4057 fake_server
4058 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4059 Ok(Some(lsp::CompletionResponse::Array(vec![
4060 lsp::CompletionItem {
4061 label: "labelText".into(),
4062 insert_text: Some("insertText".into()),
4063 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4064 range: lsp::Range::new(
4065 lsp::Position::new(0, text.len() as u32 - 3),
4066 lsp::Position::new(0, text.len() as u32),
4067 ),
4068 new_text: "textEditText".into(),
4069 })),
4070 ..Default::default()
4071 },
4072 ])))
4073 })
4074 .next()
4075 .await;
4076
4077 let completions = completions
4078 .await
4079 .unwrap()
4080 .into_iter()
4081 .flat_map(|response| response.completions)
4082 .collect::<Vec<_>>();
4083 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4084
4085 assert_eq!(completions.len(), 1);
4086 assert_eq!(completions[0].new_text, "textEditText");
4087 assert_eq!(
4088 completions[0].replace_range.to_offset(&snapshot),
4089 text.len() - 3..text.len()
4090 );
4091}
4092
4093#[gpui::test]
4094async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4095 init_test(cx);
4096
4097 let fs = FakeFs::new(cx.executor());
4098 fs.insert_tree(
4099 path!("/dir"),
4100 json!({
4101 "a.ts": "",
4102 }),
4103 )
4104 .await;
4105
4106 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4107
4108 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4109 language_registry.add(typescript_lang());
4110 let mut fake_language_servers = language_registry.register_fake_lsp(
4111 "TypeScript",
4112 FakeLspAdapter {
4113 capabilities: lsp::ServerCapabilities {
4114 completion_provider: Some(lsp::CompletionOptions {
4115 trigger_characters: Some(vec![".".to_string()]),
4116 ..Default::default()
4117 }),
4118 ..Default::default()
4119 },
4120 ..Default::default()
4121 },
4122 );
4123
4124 let (buffer, _handle) = project
4125 .update(cx, |p, cx| {
4126 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4127 })
4128 .await
4129 .unwrap();
4130
4131 let fake_server = fake_language_servers.next().await.unwrap();
4132 cx.executor().run_until_parked();
4133 let text = "let a = obj.fqn";
4134
4135 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4136 {
4137 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4138 let completions = project.update(cx, |project, cx| {
4139 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4140 });
4141
4142 fake_server
4143 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4144 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4145 is_incomplete: false,
4146 item_defaults: Some(lsp::CompletionListItemDefaults {
4147 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4148 lsp::Range::new(
4149 lsp::Position::new(0, text.len() as u32 - 3),
4150 lsp::Position::new(0, text.len() as u32),
4151 ),
4152 )),
4153 ..Default::default()
4154 }),
4155 items: vec![lsp::CompletionItem {
4156 label: "labelText".into(),
4157 text_edit_text: Some("textEditText".into()),
4158 text_edit: None,
4159 ..Default::default()
4160 }],
4161 })))
4162 })
4163 .next()
4164 .await;
4165
4166 let completions = completions
4167 .await
4168 .unwrap()
4169 .into_iter()
4170 .flat_map(|response| response.completions)
4171 .collect::<Vec<_>>();
4172 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4173
4174 assert_eq!(completions.len(), 1);
4175 assert_eq!(completions[0].new_text, "textEditText");
4176 assert_eq!(
4177 completions[0].replace_range.to_offset(&snapshot),
4178 text.len() - 3..text.len()
4179 );
4180 }
4181
4182 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4183 {
4184 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4185 let completions = project.update(cx, |project, cx| {
4186 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4187 });
4188
4189 fake_server
4190 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4191 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4192 is_incomplete: false,
4193 item_defaults: Some(lsp::CompletionListItemDefaults {
4194 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4195 lsp::Range::new(
4196 lsp::Position::new(0, text.len() as u32 - 3),
4197 lsp::Position::new(0, text.len() as u32),
4198 ),
4199 )),
4200 ..Default::default()
4201 }),
4202 items: vec![lsp::CompletionItem {
4203 label: "labelText".into(),
4204 text_edit_text: None,
4205 insert_text: Some("irrelevant".into()),
4206 text_edit: None,
4207 ..Default::default()
4208 }],
4209 })))
4210 })
4211 .next()
4212 .await;
4213
4214 let completions = completions
4215 .await
4216 .unwrap()
4217 .into_iter()
4218 .flat_map(|response| response.completions)
4219 .collect::<Vec<_>>();
4220 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4221
4222 assert_eq!(completions.len(), 1);
4223 assert_eq!(completions[0].new_text, "labelText");
4224 assert_eq!(
4225 completions[0].replace_range.to_offset(&snapshot),
4226 text.len() - 3..text.len()
4227 );
4228 }
4229}
4230
4231#[gpui::test]
4232async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4233 init_test(cx);
4234
4235 let fs = FakeFs::new(cx.executor());
4236 fs.insert_tree(
4237 path!("/dir"),
4238 json!({
4239 "a.ts": "",
4240 }),
4241 )
4242 .await;
4243
4244 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4245
4246 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4247 language_registry.add(typescript_lang());
4248 let mut fake_language_servers = language_registry.register_fake_lsp(
4249 "TypeScript",
4250 FakeLspAdapter {
4251 capabilities: lsp::ServerCapabilities {
4252 completion_provider: Some(lsp::CompletionOptions {
4253 trigger_characters: Some(vec![":".to_string()]),
4254 ..Default::default()
4255 }),
4256 ..Default::default()
4257 },
4258 ..Default::default()
4259 },
4260 );
4261
4262 let (buffer, _handle) = project
4263 .update(cx, |p, cx| {
4264 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4265 })
4266 .await
4267 .unwrap();
4268
4269 let fake_server = fake_language_servers.next().await.unwrap();
4270 cx.executor().run_until_parked();
4271
4272 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4273 let text = "let a = b.fqn";
4274 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4275 let completions = project.update(cx, |project, cx| {
4276 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4277 });
4278
4279 fake_server
4280 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4281 Ok(Some(lsp::CompletionResponse::Array(vec![
4282 lsp::CompletionItem {
4283 label: "fullyQualifiedName?".into(),
4284 insert_text: Some("fullyQualifiedName".into()),
4285 ..Default::default()
4286 },
4287 ])))
4288 })
4289 .next()
4290 .await;
4291 let completions = completions
4292 .await
4293 .unwrap()
4294 .into_iter()
4295 .flat_map(|response| response.completions)
4296 .collect::<Vec<_>>();
4297 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4298 assert_eq!(completions.len(), 1);
4299 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4300 assert_eq!(
4301 completions[0].replace_range.to_offset(&snapshot),
4302 text.len() - 3..text.len()
4303 );
4304
4305 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4306 let text = "let a = \"atoms/cmp\"";
4307 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4308 let completions = project.update(cx, |project, cx| {
4309 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4310 });
4311
4312 fake_server
4313 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4314 Ok(Some(lsp::CompletionResponse::Array(vec![
4315 lsp::CompletionItem {
4316 label: "component".into(),
4317 ..Default::default()
4318 },
4319 ])))
4320 })
4321 .next()
4322 .await;
4323 let completions = completions
4324 .await
4325 .unwrap()
4326 .into_iter()
4327 .flat_map(|response| response.completions)
4328 .collect::<Vec<_>>();
4329 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4330 assert_eq!(completions.len(), 1);
4331 assert_eq!(completions[0].new_text, "component");
4332 assert_eq!(
4333 completions[0].replace_range.to_offset(&snapshot),
4334 text.len() - 4..text.len() - 1
4335 );
4336}
4337
4338#[gpui::test]
4339async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4340 init_test(cx);
4341
4342 let fs = FakeFs::new(cx.executor());
4343 fs.insert_tree(
4344 path!("/dir"),
4345 json!({
4346 "a.ts": "",
4347 }),
4348 )
4349 .await;
4350
4351 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4352
4353 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4354 language_registry.add(typescript_lang());
4355 let mut fake_language_servers = language_registry.register_fake_lsp(
4356 "TypeScript",
4357 FakeLspAdapter {
4358 capabilities: lsp::ServerCapabilities {
4359 completion_provider: Some(lsp::CompletionOptions {
4360 trigger_characters: Some(vec![":".to_string()]),
4361 ..Default::default()
4362 }),
4363 ..Default::default()
4364 },
4365 ..Default::default()
4366 },
4367 );
4368
4369 let (buffer, _handle) = project
4370 .update(cx, |p, cx| {
4371 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4372 })
4373 .await
4374 .unwrap();
4375
4376 let fake_server = fake_language_servers.next().await.unwrap();
4377 cx.executor().run_until_parked();
4378
4379 let text = "let a = b.fqn";
4380 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4381 let completions = project.update(cx, |project, cx| {
4382 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4383 });
4384
4385 fake_server
4386 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4387 Ok(Some(lsp::CompletionResponse::Array(vec![
4388 lsp::CompletionItem {
4389 label: "fullyQualifiedName?".into(),
4390 insert_text: Some("fully\rQualified\r\nName".into()),
4391 ..Default::default()
4392 },
4393 ])))
4394 })
4395 .next()
4396 .await;
4397 let completions = completions
4398 .await
4399 .unwrap()
4400 .into_iter()
4401 .flat_map(|response| response.completions)
4402 .collect::<Vec<_>>();
4403 assert_eq!(completions.len(), 1);
4404 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4405}
4406
4407#[gpui::test(iterations = 10)]
4408async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4409 init_test(cx);
4410
4411 let fs = FakeFs::new(cx.executor());
4412 fs.insert_tree(
4413 path!("/dir"),
4414 json!({
4415 "a.ts": "a",
4416 }),
4417 )
4418 .await;
4419
4420 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4421
4422 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4423 language_registry.add(typescript_lang());
4424 let mut fake_language_servers = language_registry.register_fake_lsp(
4425 "TypeScript",
4426 FakeLspAdapter {
4427 capabilities: lsp::ServerCapabilities {
4428 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4429 lsp::CodeActionOptions {
4430 resolve_provider: Some(true),
4431 ..lsp::CodeActionOptions::default()
4432 },
4433 )),
4434 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4435 commands: vec!["_the/command".to_string()],
4436 ..lsp::ExecuteCommandOptions::default()
4437 }),
4438 ..lsp::ServerCapabilities::default()
4439 },
4440 ..FakeLspAdapter::default()
4441 },
4442 );
4443
4444 let (buffer, _handle) = project
4445 .update(cx, |p, cx| {
4446 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4447 })
4448 .await
4449 .unwrap();
4450
4451 let fake_server = fake_language_servers.next().await.unwrap();
4452 cx.executor().run_until_parked();
4453
4454 // Language server returns code actions that contain commands, and not edits.
4455 let actions = project.update(cx, |project, cx| {
4456 project.code_actions(&buffer, 0..0, None, cx)
4457 });
4458 fake_server
4459 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4460 Ok(Some(vec![
4461 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4462 title: "The code action".into(),
4463 data: Some(serde_json::json!({
4464 "command": "_the/command",
4465 })),
4466 ..lsp::CodeAction::default()
4467 }),
4468 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4469 title: "two".into(),
4470 ..lsp::CodeAction::default()
4471 }),
4472 ]))
4473 })
4474 .next()
4475 .await;
4476
4477 let action = actions.await.unwrap().unwrap()[0].clone();
4478 let apply = project.update(cx, |project, cx| {
4479 project.apply_code_action(buffer.clone(), action, true, cx)
4480 });
4481
4482 // Resolving the code action does not populate its edits. In absence of
4483 // edits, we must execute the given command.
4484 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4485 |mut action, _| async move {
4486 if action.data.is_some() {
4487 action.command = Some(lsp::Command {
4488 title: "The command".into(),
4489 command: "_the/command".into(),
4490 arguments: Some(vec![json!("the-argument")]),
4491 });
4492 }
4493 Ok(action)
4494 },
4495 );
4496
4497 // While executing the command, the language server sends the editor
4498 // a `workspaceEdit` request.
4499 fake_server
4500 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4501 let fake = fake_server.clone();
4502 move |params, _| {
4503 assert_eq!(params.command, "_the/command");
4504 let fake = fake.clone();
4505 async move {
4506 fake.server
4507 .request::<lsp::request::ApplyWorkspaceEdit>(
4508 lsp::ApplyWorkspaceEditParams {
4509 label: None,
4510 edit: lsp::WorkspaceEdit {
4511 changes: Some(
4512 [(
4513 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4514 vec![lsp::TextEdit {
4515 range: lsp::Range::new(
4516 lsp::Position::new(0, 0),
4517 lsp::Position::new(0, 0),
4518 ),
4519 new_text: "X".into(),
4520 }],
4521 )]
4522 .into_iter()
4523 .collect(),
4524 ),
4525 ..Default::default()
4526 },
4527 },
4528 )
4529 .await
4530 .into_response()
4531 .unwrap();
4532 Ok(Some(json!(null)))
4533 }
4534 }
4535 })
4536 .next()
4537 .await;
4538
4539 // Applying the code action returns a project transaction containing the edits
4540 // sent by the language server in its `workspaceEdit` request.
4541 let transaction = apply.await.unwrap();
4542 assert!(transaction.0.contains_key(&buffer));
4543 buffer.update(cx, |buffer, cx| {
4544 assert_eq!(buffer.text(), "Xa");
4545 buffer.undo(cx);
4546 assert_eq!(buffer.text(), "a");
4547 });
4548}
4549
4550#[gpui::test]
4551async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4552 init_test(cx);
4553 let fs = FakeFs::new(cx.background_executor.clone());
4554 let expected_contents = "content";
4555 fs.as_fake()
4556 .insert_tree(
4557 "/root",
4558 json!({
4559 "test.txt": expected_contents
4560 }),
4561 )
4562 .await;
4563
4564 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4565
4566 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4567 let worktree = project.worktrees(cx).next().unwrap();
4568 let entry_id = worktree
4569 .read(cx)
4570 .entry_for_path(rel_path("test.txt"))
4571 .unwrap()
4572 .id;
4573 (worktree, entry_id)
4574 });
4575 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4576 let _result = project
4577 .update(cx, |project, cx| {
4578 project.rename_entry(
4579 entry_id,
4580 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4581 cx,
4582 )
4583 })
4584 .await
4585 .unwrap();
4586 worktree.read_with(cx, |worktree, _| {
4587 assert!(
4588 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4589 "Old file should have been removed"
4590 );
4591 assert!(
4592 worktree
4593 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4594 .is_some(),
4595 "Whole directory hierarchy and the new file should have been created"
4596 );
4597 });
4598 assert_eq!(
4599 worktree
4600 .update(cx, |worktree, cx| {
4601 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4602 })
4603 .await
4604 .unwrap()
4605 .text,
4606 expected_contents,
4607 "Moved file's contents should be preserved"
4608 );
4609
4610 let entry_id = worktree.read_with(cx, |worktree, _| {
4611 worktree
4612 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4613 .unwrap()
4614 .id
4615 });
4616
4617 let _result = project
4618 .update(cx, |project, cx| {
4619 project.rename_entry(
4620 entry_id,
4621 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4622 cx,
4623 )
4624 })
4625 .await
4626 .unwrap();
4627 worktree.read_with(cx, |worktree, _| {
4628 assert!(
4629 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4630 "First file should not reappear"
4631 );
4632 assert!(
4633 worktree
4634 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4635 .is_none(),
4636 "Old file should have been removed"
4637 );
4638 assert!(
4639 worktree
4640 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4641 .is_some(),
4642 "No error should have occurred after moving into existing directory"
4643 );
4644 });
4645 assert_eq!(
4646 worktree
4647 .update(cx, |worktree, cx| {
4648 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4649 })
4650 .await
4651 .unwrap()
4652 .text,
4653 expected_contents,
4654 "Moved file's contents should be preserved"
4655 );
4656}
4657
4658#[gpui::test(iterations = 10)]
4659async fn test_save_file(cx: &mut gpui::TestAppContext) {
4660 init_test(cx);
4661
4662 let fs = FakeFs::new(cx.executor());
4663 fs.insert_tree(
4664 path!("/dir"),
4665 json!({
4666 "file1": "the old contents",
4667 }),
4668 )
4669 .await;
4670
4671 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4672 let buffer = project
4673 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4674 .await
4675 .unwrap();
4676 buffer.update(cx, |buffer, cx| {
4677 assert_eq!(buffer.text(), "the old contents");
4678 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4679 });
4680
4681 project
4682 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4683 .await
4684 .unwrap();
4685
4686 let new_text = fs
4687 .load(Path::new(path!("/dir/file1")))
4688 .await
4689 .unwrap()
4690 .replace("\r\n", "\n");
4691 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4692}
4693
4694#[gpui::test(iterations = 10)]
4695async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4696 // Issue: #24349
4697 init_test(cx);
4698
4699 let fs = FakeFs::new(cx.executor());
4700 fs.insert_tree(path!("/dir"), json!({})).await;
4701
4702 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4704
4705 language_registry.add(rust_lang());
4706 let mut fake_rust_servers = language_registry.register_fake_lsp(
4707 "Rust",
4708 FakeLspAdapter {
4709 name: "the-rust-language-server",
4710 capabilities: lsp::ServerCapabilities {
4711 completion_provider: Some(lsp::CompletionOptions {
4712 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4713 ..Default::default()
4714 }),
4715 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4716 lsp::TextDocumentSyncOptions {
4717 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4718 ..Default::default()
4719 },
4720 )),
4721 ..Default::default()
4722 },
4723 ..Default::default()
4724 },
4725 );
4726
4727 let buffer = project
4728 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4729 .unwrap()
4730 .await;
4731 project.update(cx, |this, cx| {
4732 this.register_buffer_with_language_servers(&buffer, cx);
4733 buffer.update(cx, |buffer, cx| {
4734 assert!(!this.has_language_servers_for(buffer, cx));
4735 })
4736 });
4737
4738 project
4739 .update(cx, |this, cx| {
4740 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4741 this.save_buffer_as(
4742 buffer.clone(),
4743 ProjectPath {
4744 worktree_id,
4745 path: rel_path("file.rs").into(),
4746 },
4747 cx,
4748 )
4749 })
4750 .await
4751 .unwrap();
4752 // A server is started up, and it is notified about Rust files.
4753 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4754 assert_eq!(
4755 fake_rust_server
4756 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4757 .await
4758 .text_document,
4759 lsp::TextDocumentItem {
4760 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4761 version: 0,
4762 text: "".to_string(),
4763 language_id: "rust".to_string(),
4764 }
4765 );
4766
4767 project.update(cx, |this, cx| {
4768 buffer.update(cx, |buffer, cx| {
4769 assert!(this.has_language_servers_for(buffer, cx));
4770 })
4771 });
4772}
4773
4774#[gpui::test(iterations = 30)]
4775async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4776 init_test(cx);
4777
4778 let fs = FakeFs::new(cx.executor());
4779 fs.insert_tree(
4780 path!("/dir"),
4781 json!({
4782 "file1": "the original contents",
4783 }),
4784 )
4785 .await;
4786
4787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4788 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4789 let buffer = project
4790 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4791 .await
4792 .unwrap();
4793
4794 // Change the buffer's file on disk, and then wait for the file change
4795 // to be detected by the worktree, so that the buffer starts reloading.
4796 fs.save(
4797 path!("/dir/file1").as_ref(),
4798 &"the first contents".into(),
4799 Default::default(),
4800 )
4801 .await
4802 .unwrap();
4803 worktree.next_event(cx).await;
4804
4805 // Change the buffer's file again. Depending on the random seed, the
4806 // previous file change may still be in progress.
4807 fs.save(
4808 path!("/dir/file1").as_ref(),
4809 &"the second contents".into(),
4810 Default::default(),
4811 )
4812 .await
4813 .unwrap();
4814 worktree.next_event(cx).await;
4815
4816 cx.executor().run_until_parked();
4817 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4818 buffer.read_with(cx, |buffer, _| {
4819 assert_eq!(buffer.text(), on_disk_text);
4820 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4821 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4822 });
4823}
4824
4825#[gpui::test(iterations = 30)]
4826async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4827 init_test(cx);
4828
4829 let fs = FakeFs::new(cx.executor());
4830 fs.insert_tree(
4831 path!("/dir"),
4832 json!({
4833 "file1": "the original contents",
4834 }),
4835 )
4836 .await;
4837
4838 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4839 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4840 let buffer = project
4841 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4842 .await
4843 .unwrap();
4844
4845 // Change the buffer's file on disk, and then wait for the file change
4846 // to be detected by the worktree, so that the buffer starts reloading.
4847 fs.save(
4848 path!("/dir/file1").as_ref(),
4849 &"the first contents".into(),
4850 Default::default(),
4851 )
4852 .await
4853 .unwrap();
4854 worktree.next_event(cx).await;
4855
4856 cx.executor()
4857 .spawn(cx.executor().simulate_random_delay())
4858 .await;
4859
4860 // Perform a noop edit, causing the buffer's version to increase.
4861 buffer.update(cx, |buffer, cx| {
4862 buffer.edit([(0..0, " ")], None, cx);
4863 buffer.undo(cx);
4864 });
4865
4866 cx.executor().run_until_parked();
4867 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4868 buffer.read_with(cx, |buffer, _| {
4869 let buffer_text = buffer.text();
4870 if buffer_text == on_disk_text {
4871 assert!(
4872 !buffer.is_dirty() && !buffer.has_conflict(),
4873 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4874 );
4875 }
4876 // If the file change occurred while the buffer was processing the first
4877 // change, the buffer will be in a conflicting state.
4878 else {
4879 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4880 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4881 }
4882 });
4883}
4884
4885#[gpui::test]
4886async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4887 init_test(cx);
4888
4889 let fs = FakeFs::new(cx.executor());
4890 fs.insert_tree(
4891 path!("/dir"),
4892 json!({
4893 "file1": "the old contents",
4894 }),
4895 )
4896 .await;
4897
4898 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4899 let buffer = project
4900 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4901 .await
4902 .unwrap();
4903 buffer.update(cx, |buffer, cx| {
4904 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4905 });
4906
4907 project
4908 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4909 .await
4910 .unwrap();
4911
4912 let new_text = fs
4913 .load(Path::new(path!("/dir/file1")))
4914 .await
4915 .unwrap()
4916 .replace("\r\n", "\n");
4917 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4918}
4919
4920#[gpui::test]
4921async fn test_save_as(cx: &mut gpui::TestAppContext) {
4922 init_test(cx);
4923
4924 let fs = FakeFs::new(cx.executor());
4925 fs.insert_tree("/dir", json!({})).await;
4926
4927 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4928
4929 let languages = project.update(cx, |project, _| project.languages().clone());
4930 languages.add(rust_lang());
4931
4932 let buffer = project.update(cx, |project, cx| {
4933 project.create_local_buffer("", None, false, cx)
4934 });
4935 buffer.update(cx, |buffer, cx| {
4936 buffer.edit([(0..0, "abc")], None, cx);
4937 assert!(buffer.is_dirty());
4938 assert!(!buffer.has_conflict());
4939 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4940 });
4941 project
4942 .update(cx, |project, cx| {
4943 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4944 let path = ProjectPath {
4945 worktree_id,
4946 path: rel_path("file1.rs").into(),
4947 };
4948 project.save_buffer_as(buffer.clone(), path, cx)
4949 })
4950 .await
4951 .unwrap();
4952 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4953
4954 cx.executor().run_until_parked();
4955 buffer.update(cx, |buffer, cx| {
4956 assert_eq!(
4957 buffer.file().unwrap().full_path(cx),
4958 Path::new("dir/file1.rs")
4959 );
4960 assert!(!buffer.is_dirty());
4961 assert!(!buffer.has_conflict());
4962 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4963 });
4964
4965 let opened_buffer = project
4966 .update(cx, |project, cx| {
4967 project.open_local_buffer("/dir/file1.rs", cx)
4968 })
4969 .await
4970 .unwrap();
4971 assert_eq!(opened_buffer, buffer);
4972}
4973
4974#[gpui::test]
4975async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4976 init_test(cx);
4977
4978 let fs = FakeFs::new(cx.executor());
4979 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4980
4981 fs.insert_tree(
4982 path!("/dir"),
4983 json!({
4984 "data_a.txt": "data about a"
4985 }),
4986 )
4987 .await;
4988
4989 let buffer = project
4990 .update(cx, |project, cx| {
4991 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4992 })
4993 .await
4994 .unwrap();
4995
4996 buffer.update(cx, |buffer, cx| {
4997 buffer.edit([(11..12, "b")], None, cx);
4998 });
4999
5000 // Save buffer's contents as a new file and confirm that the buffer's now
5001 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5002 // file associated with the buffer has now been updated to `data_b.txt`
5003 project
5004 .update(cx, |project, cx| {
5005 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5006 let new_path = ProjectPath {
5007 worktree_id,
5008 path: rel_path("data_b.txt").into(),
5009 };
5010
5011 project.save_buffer_as(buffer.clone(), new_path, cx)
5012 })
5013 .await
5014 .unwrap();
5015
5016 buffer.update(cx, |buffer, cx| {
5017 assert_eq!(
5018 buffer.file().unwrap().full_path(cx),
5019 Path::new("dir/data_b.txt")
5020 )
5021 });
5022
5023 // Open the original `data_a.txt` file, confirming that its contents are
5024 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5025 let original_buffer = project
5026 .update(cx, |project, cx| {
5027 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5028 })
5029 .await
5030 .unwrap();
5031
5032 original_buffer.update(cx, |buffer, cx| {
5033 assert_eq!(buffer.text(), "data about a");
5034 assert_eq!(
5035 buffer.file().unwrap().full_path(cx),
5036 Path::new("dir/data_a.txt")
5037 )
5038 });
5039}
5040
5041#[gpui::test(retries = 5)]
5042async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5043 use worktree::WorktreeModelHandle as _;
5044
5045 init_test(cx);
5046 cx.executor().allow_parking();
5047
5048 let dir = TempTree::new(json!({
5049 "a": {
5050 "file1": "",
5051 "file2": "",
5052 "file3": "",
5053 },
5054 "b": {
5055 "c": {
5056 "file4": "",
5057 "file5": "",
5058 }
5059 }
5060 }));
5061
5062 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5063
5064 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5065 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5066 async move { buffer.await.unwrap() }
5067 };
5068 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5069 project.update(cx, |project, cx| {
5070 let tree = project.worktrees(cx).next().unwrap();
5071 tree.read(cx)
5072 .entry_for_path(rel_path(path))
5073 .unwrap_or_else(|| panic!("no entry for path {}", path))
5074 .id
5075 })
5076 };
5077
5078 let buffer2 = buffer_for_path("a/file2", cx).await;
5079 let buffer3 = buffer_for_path("a/file3", cx).await;
5080 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5081 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5082
5083 let file2_id = id_for_path("a/file2", cx);
5084 let file3_id = id_for_path("a/file3", cx);
5085 let file4_id = id_for_path("b/c/file4", cx);
5086
5087 // Create a remote copy of this worktree.
5088 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5089 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5090
5091 let updates = Arc::new(Mutex::new(Vec::new()));
5092 tree.update(cx, |tree, cx| {
5093 let updates = updates.clone();
5094 tree.observe_updates(0, cx, move |update| {
5095 updates.lock().push(update);
5096 async { true }
5097 });
5098 });
5099
5100 let remote = cx.update(|cx| {
5101 Worktree::remote(
5102 0,
5103 ReplicaId::REMOTE_SERVER,
5104 metadata,
5105 project.read(cx).client().into(),
5106 project.read(cx).path_style(cx),
5107 cx,
5108 )
5109 });
5110
5111 cx.executor().run_until_parked();
5112
5113 cx.update(|cx| {
5114 assert!(!buffer2.read(cx).is_dirty());
5115 assert!(!buffer3.read(cx).is_dirty());
5116 assert!(!buffer4.read(cx).is_dirty());
5117 assert!(!buffer5.read(cx).is_dirty());
5118 });
5119
5120 // Rename and delete files and directories.
5121 tree.flush_fs_events(cx).await;
5122 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5123 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5124 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5125 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5126 tree.flush_fs_events(cx).await;
5127
5128 cx.update(|app| {
5129 assert_eq!(
5130 tree.read(app).paths().collect::<Vec<_>>(),
5131 vec![
5132 rel_path("a"),
5133 rel_path("a/file1"),
5134 rel_path("a/file2.new"),
5135 rel_path("b"),
5136 rel_path("d"),
5137 rel_path("d/file3"),
5138 rel_path("d/file4"),
5139 ]
5140 );
5141 });
5142
5143 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5144 assert_eq!(id_for_path("d/file3", cx), file3_id);
5145 assert_eq!(id_for_path("d/file4", cx), file4_id);
5146
5147 cx.update(|cx| {
5148 assert_eq!(
5149 buffer2.read(cx).file().unwrap().path().as_ref(),
5150 rel_path("a/file2.new")
5151 );
5152 assert_eq!(
5153 buffer3.read(cx).file().unwrap().path().as_ref(),
5154 rel_path("d/file3")
5155 );
5156 assert_eq!(
5157 buffer4.read(cx).file().unwrap().path().as_ref(),
5158 rel_path("d/file4")
5159 );
5160 assert_eq!(
5161 buffer5.read(cx).file().unwrap().path().as_ref(),
5162 rel_path("b/c/file5")
5163 );
5164
5165 assert_matches!(
5166 buffer2.read(cx).file().unwrap().disk_state(),
5167 DiskState::Present { .. }
5168 );
5169 assert_matches!(
5170 buffer3.read(cx).file().unwrap().disk_state(),
5171 DiskState::Present { .. }
5172 );
5173 assert_matches!(
5174 buffer4.read(cx).file().unwrap().disk_state(),
5175 DiskState::Present { .. }
5176 );
5177 assert_eq!(
5178 buffer5.read(cx).file().unwrap().disk_state(),
5179 DiskState::Deleted
5180 );
5181 });
5182
5183 // Update the remote worktree. Check that it becomes consistent with the
5184 // local worktree.
5185 cx.executor().run_until_parked();
5186
5187 remote.update(cx, |remote, _| {
5188 for update in updates.lock().drain(..) {
5189 remote.as_remote_mut().unwrap().update_from_remote(update);
5190 }
5191 });
5192 cx.executor().run_until_parked();
5193 remote.update(cx, |remote, _| {
5194 assert_eq!(
5195 remote.paths().collect::<Vec<_>>(),
5196 vec![
5197 rel_path("a"),
5198 rel_path("a/file1"),
5199 rel_path("a/file2.new"),
5200 rel_path("b"),
5201 rel_path("d"),
5202 rel_path("d/file3"),
5203 rel_path("d/file4"),
5204 ]
5205 );
5206 });
5207}
5208
5209#[gpui::test(iterations = 10)]
5210async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5211 init_test(cx);
5212
5213 let fs = FakeFs::new(cx.executor());
5214 fs.insert_tree(
5215 path!("/dir"),
5216 json!({
5217 "a": {
5218 "file1": "",
5219 }
5220 }),
5221 )
5222 .await;
5223
5224 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5225 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5226 let tree_id = tree.update(cx, |tree, _| tree.id());
5227
5228 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5229 project.update(cx, |project, cx| {
5230 let tree = project.worktrees(cx).next().unwrap();
5231 tree.read(cx)
5232 .entry_for_path(rel_path(path))
5233 .unwrap_or_else(|| panic!("no entry for path {}", path))
5234 .id
5235 })
5236 };
5237
5238 let dir_id = id_for_path("a", cx);
5239 let file_id = id_for_path("a/file1", cx);
5240 let buffer = project
5241 .update(cx, |p, cx| {
5242 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5243 })
5244 .await
5245 .unwrap();
5246 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5247
5248 project
5249 .update(cx, |project, cx| {
5250 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5251 })
5252 .unwrap()
5253 .await
5254 .into_included()
5255 .unwrap();
5256 cx.executor().run_until_parked();
5257
5258 assert_eq!(id_for_path("b", cx), dir_id);
5259 assert_eq!(id_for_path("b/file1", cx), file_id);
5260 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5261}
5262
5263#[gpui::test]
5264async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5265 init_test(cx);
5266
5267 let fs = FakeFs::new(cx.executor());
5268 fs.insert_tree(
5269 "/dir",
5270 json!({
5271 "a.txt": "a-contents",
5272 "b.txt": "b-contents",
5273 }),
5274 )
5275 .await;
5276
5277 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5278
5279 // Spawn multiple tasks to open paths, repeating some paths.
5280 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5281 (
5282 p.open_local_buffer("/dir/a.txt", cx),
5283 p.open_local_buffer("/dir/b.txt", cx),
5284 p.open_local_buffer("/dir/a.txt", cx),
5285 )
5286 });
5287
5288 let buffer_a_1 = buffer_a_1.await.unwrap();
5289 let buffer_a_2 = buffer_a_2.await.unwrap();
5290 let buffer_b = buffer_b.await.unwrap();
5291 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5292 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5293
5294 // There is only one buffer per path.
5295 let buffer_a_id = buffer_a_1.entity_id();
5296 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5297
5298 // Open the same path again while it is still open.
5299 drop(buffer_a_1);
5300 let buffer_a_3 = project
5301 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5302 .await
5303 .unwrap();
5304
5305 // There's still only one buffer per path.
5306 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5307}
5308
5309#[gpui::test]
5310async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5311 init_test(cx);
5312
5313 let fs = FakeFs::new(cx.executor());
5314 fs.insert_tree(
5315 path!("/dir"),
5316 json!({
5317 "file1": "abc",
5318 "file2": "def",
5319 "file3": "ghi",
5320 }),
5321 )
5322 .await;
5323
5324 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5325
5326 let buffer1 = project
5327 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5328 .await
5329 .unwrap();
5330 let events = Arc::new(Mutex::new(Vec::new()));
5331
5332 // initially, the buffer isn't dirty.
5333 buffer1.update(cx, |buffer, cx| {
5334 cx.subscribe(&buffer1, {
5335 let events = events.clone();
5336 move |_, _, event, _| match event {
5337 BufferEvent::Operation { .. } => {}
5338 _ => events.lock().push(event.clone()),
5339 }
5340 })
5341 .detach();
5342
5343 assert!(!buffer.is_dirty());
5344 assert!(events.lock().is_empty());
5345
5346 buffer.edit([(1..2, "")], None, cx);
5347 });
5348
5349 // after the first edit, the buffer is dirty, and emits a dirtied event.
5350 buffer1.update(cx, |buffer, cx| {
5351 assert!(buffer.text() == "ac");
5352 assert!(buffer.is_dirty());
5353 assert_eq!(
5354 *events.lock(),
5355 &[
5356 language::BufferEvent::Edited,
5357 language::BufferEvent::DirtyChanged
5358 ]
5359 );
5360 events.lock().clear();
5361 buffer.did_save(
5362 buffer.version(),
5363 buffer.file().unwrap().disk_state().mtime(),
5364 cx,
5365 );
5366 });
5367
5368 // after saving, the buffer is not dirty, and emits a saved event.
5369 buffer1.update(cx, |buffer, cx| {
5370 assert!(!buffer.is_dirty());
5371 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5372 events.lock().clear();
5373
5374 buffer.edit([(1..1, "B")], None, cx);
5375 buffer.edit([(2..2, "D")], None, cx);
5376 });
5377
5378 // after editing again, the buffer is dirty, and emits another dirty event.
5379 buffer1.update(cx, |buffer, cx| {
5380 assert!(buffer.text() == "aBDc");
5381 assert!(buffer.is_dirty());
5382 assert_eq!(
5383 *events.lock(),
5384 &[
5385 language::BufferEvent::Edited,
5386 language::BufferEvent::DirtyChanged,
5387 language::BufferEvent::Edited,
5388 ],
5389 );
5390 events.lock().clear();
5391
5392 // After restoring the buffer to its previously-saved state,
5393 // the buffer is not considered dirty anymore.
5394 buffer.edit([(1..3, "")], None, cx);
5395 assert!(buffer.text() == "ac");
5396 assert!(!buffer.is_dirty());
5397 });
5398
5399 assert_eq!(
5400 *events.lock(),
5401 &[
5402 language::BufferEvent::Edited,
5403 language::BufferEvent::DirtyChanged
5404 ]
5405 );
5406
5407 // When a file is deleted, it is not considered dirty.
5408 let events = Arc::new(Mutex::new(Vec::new()));
5409 let buffer2 = project
5410 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5411 .await
5412 .unwrap();
5413 buffer2.update(cx, |_, cx| {
5414 cx.subscribe(&buffer2, {
5415 let events = events.clone();
5416 move |_, _, event, _| match event {
5417 BufferEvent::Operation { .. } => {}
5418 _ => events.lock().push(event.clone()),
5419 }
5420 })
5421 .detach();
5422 });
5423
5424 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5425 .await
5426 .unwrap();
5427 cx.executor().run_until_parked();
5428 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5429 assert_eq!(
5430 mem::take(&mut *events.lock()),
5431 &[language::BufferEvent::FileHandleChanged]
5432 );
5433
5434 // Buffer becomes dirty when edited.
5435 buffer2.update(cx, |buffer, cx| {
5436 buffer.edit([(2..3, "")], None, cx);
5437 assert_eq!(buffer.is_dirty(), true);
5438 });
5439 assert_eq!(
5440 mem::take(&mut *events.lock()),
5441 &[
5442 language::BufferEvent::Edited,
5443 language::BufferEvent::DirtyChanged
5444 ]
5445 );
5446
5447 // Buffer becomes clean again when all of its content is removed, because
5448 // the file was deleted.
5449 buffer2.update(cx, |buffer, cx| {
5450 buffer.edit([(0..2, "")], None, cx);
5451 assert_eq!(buffer.is_empty(), true);
5452 assert_eq!(buffer.is_dirty(), false);
5453 });
5454 assert_eq!(
5455 *events.lock(),
5456 &[
5457 language::BufferEvent::Edited,
5458 language::BufferEvent::DirtyChanged
5459 ]
5460 );
5461
5462 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5463 let events = Arc::new(Mutex::new(Vec::new()));
5464 let buffer3 = project
5465 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5466 .await
5467 .unwrap();
5468 buffer3.update(cx, |_, cx| {
5469 cx.subscribe(&buffer3, {
5470 let events = events.clone();
5471 move |_, _, event, _| match event {
5472 BufferEvent::Operation { .. } => {}
5473 _ => events.lock().push(event.clone()),
5474 }
5475 })
5476 .detach();
5477 });
5478
5479 buffer3.update(cx, |buffer, cx| {
5480 buffer.edit([(0..0, "x")], None, cx);
5481 });
5482 events.lock().clear();
5483 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5484 .await
5485 .unwrap();
5486 cx.executor().run_until_parked();
5487 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5488 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5489}
5490
5491#[gpui::test]
5492async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5493 init_test(cx);
5494
5495 let (initial_contents, initial_offsets) =
5496 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5497 let fs = FakeFs::new(cx.executor());
5498 fs.insert_tree(
5499 path!("/dir"),
5500 json!({
5501 "the-file": initial_contents,
5502 }),
5503 )
5504 .await;
5505 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5506 let buffer = project
5507 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5508 .await
5509 .unwrap();
5510
5511 let anchors = initial_offsets
5512 .iter()
5513 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5514 .collect::<Vec<_>>();
5515
5516 // Change the file on disk, adding two new lines of text, and removing
5517 // one line.
5518 buffer.update(cx, |buffer, _| {
5519 assert!(!buffer.is_dirty());
5520 assert!(!buffer.has_conflict());
5521 });
5522
5523 let (new_contents, new_offsets) =
5524 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5525 fs.save(
5526 path!("/dir/the-file").as_ref(),
5527 &new_contents.as_str().into(),
5528 LineEnding::Unix,
5529 )
5530 .await
5531 .unwrap();
5532
5533 // Because the buffer was not modified, it is reloaded from disk. Its
5534 // contents are edited according to the diff between the old and new
5535 // file contents.
5536 cx.executor().run_until_parked();
5537 buffer.update(cx, |buffer, _| {
5538 assert_eq!(buffer.text(), new_contents);
5539 assert!(!buffer.is_dirty());
5540 assert!(!buffer.has_conflict());
5541
5542 let anchor_offsets = anchors
5543 .iter()
5544 .map(|anchor| anchor.to_offset(&*buffer))
5545 .collect::<Vec<_>>();
5546 assert_eq!(anchor_offsets, new_offsets);
5547 });
5548
5549 // Modify the buffer
5550 buffer.update(cx, |buffer, cx| {
5551 buffer.edit([(0..0, " ")], None, cx);
5552 assert!(buffer.is_dirty());
5553 assert!(!buffer.has_conflict());
5554 });
5555
5556 // Change the file on disk again, adding blank lines to the beginning.
5557 fs.save(
5558 path!("/dir/the-file").as_ref(),
5559 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5560 LineEnding::Unix,
5561 )
5562 .await
5563 .unwrap();
5564
5565 // Because the buffer is modified, it doesn't reload from disk, but is
5566 // marked as having a conflict.
5567 cx.executor().run_until_parked();
5568 buffer.update(cx, |buffer, _| {
5569 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5570 assert!(buffer.has_conflict());
5571 });
5572}
5573
5574#[gpui::test]
5575async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5576 init_test(cx);
5577
5578 let fs = FakeFs::new(cx.executor());
5579 fs.insert_tree(
5580 path!("/dir"),
5581 json!({
5582 "file1": "a\nb\nc\n",
5583 "file2": "one\r\ntwo\r\nthree\r\n",
5584 }),
5585 )
5586 .await;
5587
5588 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5589 let buffer1 = project
5590 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5591 .await
5592 .unwrap();
5593 let buffer2 = project
5594 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5595 .await
5596 .unwrap();
5597
5598 buffer1.update(cx, |buffer, _| {
5599 assert_eq!(buffer.text(), "a\nb\nc\n");
5600 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5601 });
5602 buffer2.update(cx, |buffer, _| {
5603 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5604 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5605 });
5606
5607 // Change a file's line endings on disk from unix to windows. The buffer's
5608 // state updates correctly.
5609 fs.save(
5610 path!("/dir/file1").as_ref(),
5611 &"aaa\nb\nc\n".into(),
5612 LineEnding::Windows,
5613 )
5614 .await
5615 .unwrap();
5616 cx.executor().run_until_parked();
5617 buffer1.update(cx, |buffer, _| {
5618 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5619 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5620 });
5621
5622 // Save a file with windows line endings. The file is written correctly.
5623 buffer2.update(cx, |buffer, cx| {
5624 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5625 });
5626 project
5627 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5628 .await
5629 .unwrap();
5630 assert_eq!(
5631 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5632 "one\r\ntwo\r\nthree\r\nfour\r\n",
5633 );
5634}
5635
5636#[gpui::test]
5637async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5638 init_test(cx);
5639
5640 let fs = FakeFs::new(cx.executor());
5641 fs.insert_tree(
5642 path!("/dir"),
5643 json!({
5644 "a.rs": "
5645 fn foo(mut v: Vec<usize>) {
5646 for x in &v {
5647 v.push(1);
5648 }
5649 }
5650 "
5651 .unindent(),
5652 }),
5653 )
5654 .await;
5655
5656 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5657 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5658 let buffer = project
5659 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5660 .await
5661 .unwrap();
5662
5663 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5664 let message = lsp::PublishDiagnosticsParams {
5665 uri: buffer_uri.clone(),
5666 diagnostics: vec![
5667 lsp::Diagnostic {
5668 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5669 severity: Some(DiagnosticSeverity::WARNING),
5670 message: "error 1".to_string(),
5671 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5672 location: lsp::Location {
5673 uri: buffer_uri.clone(),
5674 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5675 },
5676 message: "error 1 hint 1".to_string(),
5677 }]),
5678 ..Default::default()
5679 },
5680 lsp::Diagnostic {
5681 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5682 severity: Some(DiagnosticSeverity::HINT),
5683 message: "error 1 hint 1".to_string(),
5684 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5685 location: lsp::Location {
5686 uri: buffer_uri.clone(),
5687 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5688 },
5689 message: "original diagnostic".to_string(),
5690 }]),
5691 ..Default::default()
5692 },
5693 lsp::Diagnostic {
5694 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5695 severity: Some(DiagnosticSeverity::ERROR),
5696 message: "error 2".to_string(),
5697 related_information: Some(vec![
5698 lsp::DiagnosticRelatedInformation {
5699 location: lsp::Location {
5700 uri: buffer_uri.clone(),
5701 range: lsp::Range::new(
5702 lsp::Position::new(1, 13),
5703 lsp::Position::new(1, 15),
5704 ),
5705 },
5706 message: "error 2 hint 1".to_string(),
5707 },
5708 lsp::DiagnosticRelatedInformation {
5709 location: lsp::Location {
5710 uri: buffer_uri.clone(),
5711 range: lsp::Range::new(
5712 lsp::Position::new(1, 13),
5713 lsp::Position::new(1, 15),
5714 ),
5715 },
5716 message: "error 2 hint 2".to_string(),
5717 },
5718 ]),
5719 ..Default::default()
5720 },
5721 lsp::Diagnostic {
5722 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5723 severity: Some(DiagnosticSeverity::HINT),
5724 message: "error 2 hint 1".to_string(),
5725 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5726 location: lsp::Location {
5727 uri: buffer_uri.clone(),
5728 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5729 },
5730 message: "original diagnostic".to_string(),
5731 }]),
5732 ..Default::default()
5733 },
5734 lsp::Diagnostic {
5735 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5736 severity: Some(DiagnosticSeverity::HINT),
5737 message: "error 2 hint 2".to_string(),
5738 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5739 location: lsp::Location {
5740 uri: buffer_uri,
5741 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5742 },
5743 message: "original diagnostic".to_string(),
5744 }]),
5745 ..Default::default()
5746 },
5747 ],
5748 version: None,
5749 };
5750
5751 lsp_store
5752 .update(cx, |lsp_store, cx| {
5753 lsp_store.update_diagnostics(
5754 LanguageServerId(0),
5755 message,
5756 None,
5757 DiagnosticSourceKind::Pushed,
5758 &[],
5759 cx,
5760 )
5761 })
5762 .unwrap();
5763 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5764
5765 assert_eq!(
5766 buffer
5767 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5768 .collect::<Vec<_>>(),
5769 &[
5770 DiagnosticEntry {
5771 range: Point::new(1, 8)..Point::new(1, 9),
5772 diagnostic: Diagnostic {
5773 severity: DiagnosticSeverity::WARNING,
5774 message: "error 1".to_string(),
5775 group_id: 1,
5776 is_primary: true,
5777 source_kind: DiagnosticSourceKind::Pushed,
5778 ..Diagnostic::default()
5779 }
5780 },
5781 DiagnosticEntry {
5782 range: Point::new(1, 8)..Point::new(1, 9),
5783 diagnostic: Diagnostic {
5784 severity: DiagnosticSeverity::HINT,
5785 message: "error 1 hint 1".to_string(),
5786 group_id: 1,
5787 is_primary: false,
5788 source_kind: DiagnosticSourceKind::Pushed,
5789 ..Diagnostic::default()
5790 }
5791 },
5792 DiagnosticEntry {
5793 range: Point::new(1, 13)..Point::new(1, 15),
5794 diagnostic: Diagnostic {
5795 severity: DiagnosticSeverity::HINT,
5796 message: "error 2 hint 1".to_string(),
5797 group_id: 0,
5798 is_primary: false,
5799 source_kind: DiagnosticSourceKind::Pushed,
5800 ..Diagnostic::default()
5801 }
5802 },
5803 DiagnosticEntry {
5804 range: Point::new(1, 13)..Point::new(1, 15),
5805 diagnostic: Diagnostic {
5806 severity: DiagnosticSeverity::HINT,
5807 message: "error 2 hint 2".to_string(),
5808 group_id: 0,
5809 is_primary: false,
5810 source_kind: DiagnosticSourceKind::Pushed,
5811 ..Diagnostic::default()
5812 }
5813 },
5814 DiagnosticEntry {
5815 range: Point::new(2, 8)..Point::new(2, 17),
5816 diagnostic: Diagnostic {
5817 severity: DiagnosticSeverity::ERROR,
5818 message: "error 2".to_string(),
5819 group_id: 0,
5820 is_primary: true,
5821 source_kind: DiagnosticSourceKind::Pushed,
5822 ..Diagnostic::default()
5823 }
5824 }
5825 ]
5826 );
5827
5828 assert_eq!(
5829 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5830 &[
5831 DiagnosticEntry {
5832 range: Point::new(1, 13)..Point::new(1, 15),
5833 diagnostic: Diagnostic {
5834 severity: DiagnosticSeverity::HINT,
5835 message: "error 2 hint 1".to_string(),
5836 group_id: 0,
5837 is_primary: false,
5838 source_kind: DiagnosticSourceKind::Pushed,
5839 ..Diagnostic::default()
5840 }
5841 },
5842 DiagnosticEntry {
5843 range: Point::new(1, 13)..Point::new(1, 15),
5844 diagnostic: Diagnostic {
5845 severity: DiagnosticSeverity::HINT,
5846 message: "error 2 hint 2".to_string(),
5847 group_id: 0,
5848 is_primary: false,
5849 source_kind: DiagnosticSourceKind::Pushed,
5850 ..Diagnostic::default()
5851 }
5852 },
5853 DiagnosticEntry {
5854 range: Point::new(2, 8)..Point::new(2, 17),
5855 diagnostic: Diagnostic {
5856 severity: DiagnosticSeverity::ERROR,
5857 message: "error 2".to_string(),
5858 group_id: 0,
5859 is_primary: true,
5860 source_kind: DiagnosticSourceKind::Pushed,
5861 ..Diagnostic::default()
5862 }
5863 }
5864 ]
5865 );
5866
5867 assert_eq!(
5868 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5869 &[
5870 DiagnosticEntry {
5871 range: Point::new(1, 8)..Point::new(1, 9),
5872 diagnostic: Diagnostic {
5873 severity: DiagnosticSeverity::WARNING,
5874 message: "error 1".to_string(),
5875 group_id: 1,
5876 is_primary: true,
5877 source_kind: DiagnosticSourceKind::Pushed,
5878 ..Diagnostic::default()
5879 }
5880 },
5881 DiagnosticEntry {
5882 range: Point::new(1, 8)..Point::new(1, 9),
5883 diagnostic: Diagnostic {
5884 severity: DiagnosticSeverity::HINT,
5885 message: "error 1 hint 1".to_string(),
5886 group_id: 1,
5887 is_primary: false,
5888 source_kind: DiagnosticSourceKind::Pushed,
5889 ..Diagnostic::default()
5890 }
5891 },
5892 ]
5893 );
5894}
5895
5896#[gpui::test]
5897async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5898 init_test(cx);
5899
5900 let fs = FakeFs::new(cx.executor());
5901 fs.insert_tree(
5902 path!("/dir"),
5903 json!({
5904 "one.rs": "const ONE: usize = 1;",
5905 "two": {
5906 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5907 }
5908
5909 }),
5910 )
5911 .await;
5912 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5913
5914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5915 language_registry.add(rust_lang());
5916 let watched_paths = lsp::FileOperationRegistrationOptions {
5917 filters: vec![
5918 FileOperationFilter {
5919 scheme: Some("file".to_owned()),
5920 pattern: lsp::FileOperationPattern {
5921 glob: "**/*.rs".to_owned(),
5922 matches: Some(lsp::FileOperationPatternKind::File),
5923 options: None,
5924 },
5925 },
5926 FileOperationFilter {
5927 scheme: Some("file".to_owned()),
5928 pattern: lsp::FileOperationPattern {
5929 glob: "**/**".to_owned(),
5930 matches: Some(lsp::FileOperationPatternKind::Folder),
5931 options: None,
5932 },
5933 },
5934 ],
5935 };
5936 let mut fake_servers = language_registry.register_fake_lsp(
5937 "Rust",
5938 FakeLspAdapter {
5939 capabilities: lsp::ServerCapabilities {
5940 workspace: Some(lsp::WorkspaceServerCapabilities {
5941 workspace_folders: None,
5942 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5943 did_rename: Some(watched_paths.clone()),
5944 will_rename: Some(watched_paths),
5945 ..Default::default()
5946 }),
5947 }),
5948 ..Default::default()
5949 },
5950 ..Default::default()
5951 },
5952 );
5953
5954 let _ = project
5955 .update(cx, |project, cx| {
5956 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5957 })
5958 .await
5959 .unwrap();
5960
5961 let fake_server = fake_servers.next().await.unwrap();
5962 cx.executor().run_until_parked();
5963 let response = project.update(cx, |project, cx| {
5964 let worktree = project.worktrees(cx).next().unwrap();
5965 let entry = worktree
5966 .read(cx)
5967 .entry_for_path(rel_path("one.rs"))
5968 .unwrap();
5969 project.rename_entry(
5970 entry.id,
5971 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5972 cx,
5973 )
5974 });
5975 let expected_edit = lsp::WorkspaceEdit {
5976 changes: None,
5977 document_changes: Some(DocumentChanges::Edits({
5978 vec![TextDocumentEdit {
5979 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5980 range: lsp::Range {
5981 start: lsp::Position {
5982 line: 0,
5983 character: 1,
5984 },
5985 end: lsp::Position {
5986 line: 0,
5987 character: 3,
5988 },
5989 },
5990 new_text: "This is not a drill".to_owned(),
5991 })],
5992 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5993 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5994 version: Some(1337),
5995 },
5996 }]
5997 })),
5998 change_annotations: None,
5999 };
6000 let resolved_workspace_edit = Arc::new(OnceLock::new());
6001 fake_server
6002 .set_request_handler::<WillRenameFiles, _, _>({
6003 let resolved_workspace_edit = resolved_workspace_edit.clone();
6004 let expected_edit = expected_edit.clone();
6005 move |params, _| {
6006 let resolved_workspace_edit = resolved_workspace_edit.clone();
6007 let expected_edit = expected_edit.clone();
6008 async move {
6009 assert_eq!(params.files.len(), 1);
6010 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6011 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6012 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6013 Ok(Some(expected_edit))
6014 }
6015 }
6016 })
6017 .next()
6018 .await
6019 .unwrap();
6020 let _ = response.await.unwrap();
6021 fake_server
6022 .handle_notification::<DidRenameFiles, _>(|params, _| {
6023 assert_eq!(params.files.len(), 1);
6024 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6025 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6026 })
6027 .next()
6028 .await
6029 .unwrap();
6030 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6031}
6032
6033#[gpui::test]
6034async fn test_rename(cx: &mut gpui::TestAppContext) {
6035 // hi
6036 init_test(cx);
6037
6038 let fs = FakeFs::new(cx.executor());
6039 fs.insert_tree(
6040 path!("/dir"),
6041 json!({
6042 "one.rs": "const ONE: usize = 1;",
6043 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6044 }),
6045 )
6046 .await;
6047
6048 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6049
6050 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6051 language_registry.add(rust_lang());
6052 let mut fake_servers = language_registry.register_fake_lsp(
6053 "Rust",
6054 FakeLspAdapter {
6055 capabilities: lsp::ServerCapabilities {
6056 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6057 prepare_provider: Some(true),
6058 work_done_progress_options: Default::default(),
6059 })),
6060 ..Default::default()
6061 },
6062 ..Default::default()
6063 },
6064 );
6065
6066 let (buffer, _handle) = project
6067 .update(cx, |project, cx| {
6068 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6069 })
6070 .await
6071 .unwrap();
6072
6073 let fake_server = fake_servers.next().await.unwrap();
6074 cx.executor().run_until_parked();
6075
6076 let response = project.update(cx, |project, cx| {
6077 project.prepare_rename(buffer.clone(), 7, cx)
6078 });
6079 fake_server
6080 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6081 assert_eq!(
6082 params.text_document.uri.as_str(),
6083 uri!("file:///dir/one.rs")
6084 );
6085 assert_eq!(params.position, lsp::Position::new(0, 7));
6086 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6087 lsp::Position::new(0, 6),
6088 lsp::Position::new(0, 9),
6089 ))))
6090 })
6091 .next()
6092 .await
6093 .unwrap();
6094 let response = response.await.unwrap();
6095 let PrepareRenameResponse::Success(range) = response else {
6096 panic!("{:?}", response);
6097 };
6098 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6099 assert_eq!(range, 6..9);
6100
6101 let response = project.update(cx, |project, cx| {
6102 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6103 });
6104 fake_server
6105 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6106 assert_eq!(
6107 params.text_document_position.text_document.uri.as_str(),
6108 uri!("file:///dir/one.rs")
6109 );
6110 assert_eq!(
6111 params.text_document_position.position,
6112 lsp::Position::new(0, 7)
6113 );
6114 assert_eq!(params.new_name, "THREE");
6115 Ok(Some(lsp::WorkspaceEdit {
6116 changes: Some(
6117 [
6118 (
6119 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6120 vec![lsp::TextEdit::new(
6121 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6122 "THREE".to_string(),
6123 )],
6124 ),
6125 (
6126 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6127 vec![
6128 lsp::TextEdit::new(
6129 lsp::Range::new(
6130 lsp::Position::new(0, 24),
6131 lsp::Position::new(0, 27),
6132 ),
6133 "THREE".to_string(),
6134 ),
6135 lsp::TextEdit::new(
6136 lsp::Range::new(
6137 lsp::Position::new(0, 35),
6138 lsp::Position::new(0, 38),
6139 ),
6140 "THREE".to_string(),
6141 ),
6142 ],
6143 ),
6144 ]
6145 .into_iter()
6146 .collect(),
6147 ),
6148 ..Default::default()
6149 }))
6150 })
6151 .next()
6152 .await
6153 .unwrap();
6154 let mut transaction = response.await.unwrap().0;
6155 assert_eq!(transaction.len(), 2);
6156 assert_eq!(
6157 transaction
6158 .remove_entry(&buffer)
6159 .unwrap()
6160 .0
6161 .update(cx, |buffer, _| buffer.text()),
6162 "const THREE: usize = 1;"
6163 );
6164 assert_eq!(
6165 transaction
6166 .into_keys()
6167 .next()
6168 .unwrap()
6169 .update(cx, |buffer, _| buffer.text()),
6170 "const TWO: usize = one::THREE + one::THREE;"
6171 );
6172}
6173
6174#[gpui::test]
6175async fn test_search(cx: &mut gpui::TestAppContext) {
6176 init_test(cx);
6177
6178 let fs = FakeFs::new(cx.executor());
6179 fs.insert_tree(
6180 path!("/dir"),
6181 json!({
6182 "one.rs": "const ONE: usize = 1;",
6183 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6184 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6185 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6186 }),
6187 )
6188 .await;
6189 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6190 assert_eq!(
6191 search(
6192 &project,
6193 SearchQuery::text(
6194 "TWO",
6195 false,
6196 true,
6197 false,
6198 Default::default(),
6199 Default::default(),
6200 false,
6201 None
6202 )
6203 .unwrap(),
6204 cx
6205 )
6206 .await
6207 .unwrap(),
6208 HashMap::from_iter([
6209 (path!("dir/two.rs").to_string(), vec![6..9]),
6210 (path!("dir/three.rs").to_string(), vec![37..40])
6211 ])
6212 );
6213
6214 let buffer_4 = project
6215 .update(cx, |project, cx| {
6216 project.open_local_buffer(path!("/dir/four.rs"), cx)
6217 })
6218 .await
6219 .unwrap();
6220 buffer_4.update(cx, |buffer, cx| {
6221 let text = "two::TWO";
6222 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6223 });
6224
6225 assert_eq!(
6226 search(
6227 &project,
6228 SearchQuery::text(
6229 "TWO",
6230 false,
6231 true,
6232 false,
6233 Default::default(),
6234 Default::default(),
6235 false,
6236 None,
6237 )
6238 .unwrap(),
6239 cx
6240 )
6241 .await
6242 .unwrap(),
6243 HashMap::from_iter([
6244 (path!("dir/two.rs").to_string(), vec![6..9]),
6245 (path!("dir/three.rs").to_string(), vec![37..40]),
6246 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6247 ])
6248 );
6249}
6250
6251#[gpui::test]
6252async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6253 init_test(cx);
6254
6255 let search_query = "file";
6256
6257 let fs = FakeFs::new(cx.executor());
6258 fs.insert_tree(
6259 path!("/dir"),
6260 json!({
6261 "one.rs": r#"// Rust file one"#,
6262 "one.ts": r#"// TypeScript file one"#,
6263 "two.rs": r#"// Rust file two"#,
6264 "two.ts": r#"// TypeScript file two"#,
6265 }),
6266 )
6267 .await;
6268 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6269
6270 assert!(
6271 search(
6272 &project,
6273 SearchQuery::text(
6274 search_query,
6275 false,
6276 true,
6277 false,
6278 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6279 Default::default(),
6280 false,
6281 None
6282 )
6283 .unwrap(),
6284 cx
6285 )
6286 .await
6287 .unwrap()
6288 .is_empty(),
6289 "If no inclusions match, no files should be returned"
6290 );
6291
6292 assert_eq!(
6293 search(
6294 &project,
6295 SearchQuery::text(
6296 search_query,
6297 false,
6298 true,
6299 false,
6300 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6301 Default::default(),
6302 false,
6303 None
6304 )
6305 .unwrap(),
6306 cx
6307 )
6308 .await
6309 .unwrap(),
6310 HashMap::from_iter([
6311 (path!("dir/one.rs").to_string(), vec![8..12]),
6312 (path!("dir/two.rs").to_string(), vec![8..12]),
6313 ]),
6314 "Rust only search should give only Rust files"
6315 );
6316
6317 assert_eq!(
6318 search(
6319 &project,
6320 SearchQuery::text(
6321 search_query,
6322 false,
6323 true,
6324 false,
6325 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6326 .unwrap(),
6327 Default::default(),
6328 false,
6329 None,
6330 )
6331 .unwrap(),
6332 cx
6333 )
6334 .await
6335 .unwrap(),
6336 HashMap::from_iter([
6337 (path!("dir/one.ts").to_string(), vec![14..18]),
6338 (path!("dir/two.ts").to_string(), vec![14..18]),
6339 ]),
6340 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6341 );
6342
6343 assert_eq!(
6344 search(
6345 &project,
6346 SearchQuery::text(
6347 search_query,
6348 false,
6349 true,
6350 false,
6351 PathMatcher::new(
6352 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6353 PathStyle::local()
6354 )
6355 .unwrap(),
6356 Default::default(),
6357 false,
6358 None,
6359 )
6360 .unwrap(),
6361 cx
6362 )
6363 .await
6364 .unwrap(),
6365 HashMap::from_iter([
6366 (path!("dir/two.ts").to_string(), vec![14..18]),
6367 (path!("dir/one.rs").to_string(), vec![8..12]),
6368 (path!("dir/one.ts").to_string(), vec![14..18]),
6369 (path!("dir/two.rs").to_string(), vec![8..12]),
6370 ]),
6371 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6372 );
6373}
6374
6375#[gpui::test]
6376async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6377 init_test(cx);
6378
6379 let search_query = "file";
6380
6381 let fs = FakeFs::new(cx.executor());
6382 fs.insert_tree(
6383 path!("/dir"),
6384 json!({
6385 "one.rs": r#"// Rust file one"#,
6386 "one.ts": r#"// TypeScript file one"#,
6387 "two.rs": r#"// Rust file two"#,
6388 "two.ts": r#"// TypeScript file two"#,
6389 }),
6390 )
6391 .await;
6392 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6393
6394 assert_eq!(
6395 search(
6396 &project,
6397 SearchQuery::text(
6398 search_query,
6399 false,
6400 true,
6401 false,
6402 Default::default(),
6403 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6404 false,
6405 None,
6406 )
6407 .unwrap(),
6408 cx
6409 )
6410 .await
6411 .unwrap(),
6412 HashMap::from_iter([
6413 (path!("dir/one.rs").to_string(), vec![8..12]),
6414 (path!("dir/one.ts").to_string(), vec![14..18]),
6415 (path!("dir/two.rs").to_string(), vec![8..12]),
6416 (path!("dir/two.ts").to_string(), vec![14..18]),
6417 ]),
6418 "If no exclusions match, all files should be returned"
6419 );
6420
6421 assert_eq!(
6422 search(
6423 &project,
6424 SearchQuery::text(
6425 search_query,
6426 false,
6427 true,
6428 false,
6429 Default::default(),
6430 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6431 false,
6432 None,
6433 )
6434 .unwrap(),
6435 cx
6436 )
6437 .await
6438 .unwrap(),
6439 HashMap::from_iter([
6440 (path!("dir/one.ts").to_string(), vec![14..18]),
6441 (path!("dir/two.ts").to_string(), vec![14..18]),
6442 ]),
6443 "Rust exclusion search should give only TypeScript files"
6444 );
6445
6446 assert_eq!(
6447 search(
6448 &project,
6449 SearchQuery::text(
6450 search_query,
6451 false,
6452 true,
6453 false,
6454 Default::default(),
6455 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6456 .unwrap(),
6457 false,
6458 None,
6459 )
6460 .unwrap(),
6461 cx
6462 )
6463 .await
6464 .unwrap(),
6465 HashMap::from_iter([
6466 (path!("dir/one.rs").to_string(), vec![8..12]),
6467 (path!("dir/two.rs").to_string(), vec![8..12]),
6468 ]),
6469 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6470 );
6471
6472 assert!(
6473 search(
6474 &project,
6475 SearchQuery::text(
6476 search_query,
6477 false,
6478 true,
6479 false,
6480 Default::default(),
6481 PathMatcher::new(
6482 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6483 PathStyle::local(),
6484 )
6485 .unwrap(),
6486 false,
6487 None,
6488 )
6489 .unwrap(),
6490 cx
6491 )
6492 .await
6493 .unwrap()
6494 .is_empty(),
6495 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6496 );
6497}
6498
6499#[gpui::test]
6500async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6501 init_test(cx);
6502
6503 let search_query = "file";
6504
6505 let fs = FakeFs::new(cx.executor());
6506 fs.insert_tree(
6507 path!("/dir"),
6508 json!({
6509 "one.rs": r#"// Rust file one"#,
6510 "one.ts": r#"// TypeScript file one"#,
6511 "two.rs": r#"// Rust file two"#,
6512 "two.ts": r#"// TypeScript file two"#,
6513 }),
6514 )
6515 .await;
6516
6517 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6518 let path_style = PathStyle::local();
6519 let _buffer = project.update(cx, |project, cx| {
6520 project.create_local_buffer("file", None, false, cx)
6521 });
6522
6523 assert_eq!(
6524 search(
6525 &project,
6526 SearchQuery::text(
6527 search_query,
6528 false,
6529 true,
6530 false,
6531 Default::default(),
6532 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6533 false,
6534 None,
6535 )
6536 .unwrap(),
6537 cx
6538 )
6539 .await
6540 .unwrap(),
6541 HashMap::from_iter([
6542 (path!("dir/one.rs").to_string(), vec![8..12]),
6543 (path!("dir/one.ts").to_string(), vec![14..18]),
6544 (path!("dir/two.rs").to_string(), vec![8..12]),
6545 (path!("dir/two.ts").to_string(), vec![14..18]),
6546 ]),
6547 "If no exclusions match, all files should be returned"
6548 );
6549
6550 assert_eq!(
6551 search(
6552 &project,
6553 SearchQuery::text(
6554 search_query,
6555 false,
6556 true,
6557 false,
6558 Default::default(),
6559 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6560 false,
6561 None,
6562 )
6563 .unwrap(),
6564 cx
6565 )
6566 .await
6567 .unwrap(),
6568 HashMap::from_iter([
6569 (path!("dir/one.ts").to_string(), vec![14..18]),
6570 (path!("dir/two.ts").to_string(), vec![14..18]),
6571 ]),
6572 "Rust exclusion search should give only TypeScript files"
6573 );
6574
6575 assert_eq!(
6576 search(
6577 &project,
6578 SearchQuery::text(
6579 search_query,
6580 false,
6581 true,
6582 false,
6583 Default::default(),
6584 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6585 false,
6586 None,
6587 )
6588 .unwrap(),
6589 cx
6590 )
6591 .await
6592 .unwrap(),
6593 HashMap::from_iter([
6594 (path!("dir/one.rs").to_string(), vec![8..12]),
6595 (path!("dir/two.rs").to_string(), vec![8..12]),
6596 ]),
6597 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6598 );
6599
6600 assert!(
6601 search(
6602 &project,
6603 SearchQuery::text(
6604 search_query,
6605 false,
6606 true,
6607 false,
6608 Default::default(),
6609 PathMatcher::new(
6610 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6611 PathStyle::local(),
6612 )
6613 .unwrap(),
6614 false,
6615 None,
6616 )
6617 .unwrap(),
6618 cx
6619 )
6620 .await
6621 .unwrap()
6622 .is_empty(),
6623 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6624 );
6625}
6626
6627#[gpui::test]
6628async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6629 init_test(cx);
6630
6631 let search_query = "file";
6632
6633 let fs = FakeFs::new(cx.executor());
6634 fs.insert_tree(
6635 path!("/dir"),
6636 json!({
6637 "one.rs": r#"// Rust file one"#,
6638 "one.ts": r#"// TypeScript file one"#,
6639 "two.rs": r#"// Rust file two"#,
6640 "two.ts": r#"// TypeScript file two"#,
6641 }),
6642 )
6643 .await;
6644 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6645 assert!(
6646 search(
6647 &project,
6648 SearchQuery::text(
6649 search_query,
6650 false,
6651 true,
6652 false,
6653 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6654 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6655 false,
6656 None,
6657 )
6658 .unwrap(),
6659 cx
6660 )
6661 .await
6662 .unwrap()
6663 .is_empty(),
6664 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6665 );
6666
6667 assert!(
6668 search(
6669 &project,
6670 SearchQuery::text(
6671 search_query,
6672 false,
6673 true,
6674 false,
6675 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6676 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6677 false,
6678 None,
6679 )
6680 .unwrap(),
6681 cx
6682 )
6683 .await
6684 .unwrap()
6685 .is_empty(),
6686 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6687 );
6688
6689 assert!(
6690 search(
6691 &project,
6692 SearchQuery::text(
6693 search_query,
6694 false,
6695 true,
6696 false,
6697 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6698 .unwrap(),
6699 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6700 .unwrap(),
6701 false,
6702 None,
6703 )
6704 .unwrap(),
6705 cx
6706 )
6707 .await
6708 .unwrap()
6709 .is_empty(),
6710 "Non-matching inclusions and exclusions should not change that."
6711 );
6712
6713 assert_eq!(
6714 search(
6715 &project,
6716 SearchQuery::text(
6717 search_query,
6718 false,
6719 true,
6720 false,
6721 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6722 .unwrap(),
6723 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6724 .unwrap(),
6725 false,
6726 None,
6727 )
6728 .unwrap(),
6729 cx
6730 )
6731 .await
6732 .unwrap(),
6733 HashMap::from_iter([
6734 (path!("dir/one.ts").to_string(), vec![14..18]),
6735 (path!("dir/two.ts").to_string(), vec![14..18]),
6736 ]),
6737 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6738 );
6739}
6740
6741#[gpui::test]
6742async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6743 init_test(cx);
6744
6745 let fs = FakeFs::new(cx.executor());
6746 fs.insert_tree(
6747 path!("/worktree-a"),
6748 json!({
6749 "haystack.rs": r#"// NEEDLE"#,
6750 "haystack.ts": r#"// NEEDLE"#,
6751 }),
6752 )
6753 .await;
6754 fs.insert_tree(
6755 path!("/worktree-b"),
6756 json!({
6757 "haystack.rs": r#"// NEEDLE"#,
6758 "haystack.ts": r#"// NEEDLE"#,
6759 }),
6760 )
6761 .await;
6762
6763 let path_style = PathStyle::local();
6764 let project = Project::test(
6765 fs.clone(),
6766 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6767 cx,
6768 )
6769 .await;
6770
6771 assert_eq!(
6772 search(
6773 &project,
6774 SearchQuery::text(
6775 "NEEDLE",
6776 false,
6777 true,
6778 false,
6779 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6780 Default::default(),
6781 true,
6782 None,
6783 )
6784 .unwrap(),
6785 cx
6786 )
6787 .await
6788 .unwrap(),
6789 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6790 "should only return results from included worktree"
6791 );
6792 assert_eq!(
6793 search(
6794 &project,
6795 SearchQuery::text(
6796 "NEEDLE",
6797 false,
6798 true,
6799 false,
6800 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6801 Default::default(),
6802 true,
6803 None,
6804 )
6805 .unwrap(),
6806 cx
6807 )
6808 .await
6809 .unwrap(),
6810 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6811 "should only return results from included worktree"
6812 );
6813
6814 assert_eq!(
6815 search(
6816 &project,
6817 SearchQuery::text(
6818 "NEEDLE",
6819 false,
6820 true,
6821 false,
6822 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6823 Default::default(),
6824 false,
6825 None,
6826 )
6827 .unwrap(),
6828 cx
6829 )
6830 .await
6831 .unwrap(),
6832 HashMap::from_iter([
6833 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6834 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6835 ]),
6836 "should return results from both worktrees"
6837 );
6838}
6839
6840#[gpui::test]
6841async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6842 init_test(cx);
6843
6844 let fs = FakeFs::new(cx.background_executor.clone());
6845 fs.insert_tree(
6846 path!("/dir"),
6847 json!({
6848 ".git": {},
6849 ".gitignore": "**/target\n/node_modules\n",
6850 "target": {
6851 "index.txt": "index_key:index_value"
6852 },
6853 "node_modules": {
6854 "eslint": {
6855 "index.ts": "const eslint_key = 'eslint value'",
6856 "package.json": r#"{ "some_key": "some value" }"#,
6857 },
6858 "prettier": {
6859 "index.ts": "const prettier_key = 'prettier value'",
6860 "package.json": r#"{ "other_key": "other value" }"#,
6861 },
6862 },
6863 "package.json": r#"{ "main_key": "main value" }"#,
6864 }),
6865 )
6866 .await;
6867 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6868
6869 let query = "key";
6870 assert_eq!(
6871 search(
6872 &project,
6873 SearchQuery::text(
6874 query,
6875 false,
6876 false,
6877 false,
6878 Default::default(),
6879 Default::default(),
6880 false,
6881 None,
6882 )
6883 .unwrap(),
6884 cx
6885 )
6886 .await
6887 .unwrap(),
6888 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6889 "Only one non-ignored file should have the query"
6890 );
6891
6892 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6893 let path_style = PathStyle::local();
6894 assert_eq!(
6895 search(
6896 &project,
6897 SearchQuery::text(
6898 query,
6899 false,
6900 false,
6901 true,
6902 Default::default(),
6903 Default::default(),
6904 false,
6905 None,
6906 )
6907 .unwrap(),
6908 cx
6909 )
6910 .await
6911 .unwrap(),
6912 HashMap::from_iter([
6913 (path!("dir/package.json").to_string(), vec![8..11]),
6914 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6915 (
6916 path!("dir/node_modules/prettier/package.json").to_string(),
6917 vec![9..12]
6918 ),
6919 (
6920 path!("dir/node_modules/prettier/index.ts").to_string(),
6921 vec![15..18]
6922 ),
6923 (
6924 path!("dir/node_modules/eslint/index.ts").to_string(),
6925 vec![13..16]
6926 ),
6927 (
6928 path!("dir/node_modules/eslint/package.json").to_string(),
6929 vec![8..11]
6930 ),
6931 ]),
6932 "Unrestricted search with ignored directories should find every file with the query"
6933 );
6934
6935 let files_to_include =
6936 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6937 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6938 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6939 assert_eq!(
6940 search(
6941 &project,
6942 SearchQuery::text(
6943 query,
6944 false,
6945 false,
6946 true,
6947 files_to_include,
6948 files_to_exclude,
6949 false,
6950 None,
6951 )
6952 .unwrap(),
6953 cx
6954 )
6955 .await
6956 .unwrap(),
6957 HashMap::from_iter([(
6958 path!("dir/node_modules/prettier/package.json").to_string(),
6959 vec![9..12]
6960 )]),
6961 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6962 );
6963}
6964
6965#[gpui::test]
6966async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6967 init_test(cx);
6968
6969 let fs = FakeFs::new(cx.executor());
6970 fs.insert_tree(
6971 path!("/dir"),
6972 json!({
6973 "one.rs": "// ПРИВЕТ? привет!",
6974 "two.rs": "// ПРИВЕТ.",
6975 "three.rs": "// привет",
6976 }),
6977 )
6978 .await;
6979 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6980 let unicode_case_sensitive_query = SearchQuery::text(
6981 "привет",
6982 false,
6983 true,
6984 false,
6985 Default::default(),
6986 Default::default(),
6987 false,
6988 None,
6989 );
6990 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6991 assert_eq!(
6992 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6993 .await
6994 .unwrap(),
6995 HashMap::from_iter([
6996 (path!("dir/one.rs").to_string(), vec![17..29]),
6997 (path!("dir/three.rs").to_string(), vec![3..15]),
6998 ])
6999 );
7000
7001 let unicode_case_insensitive_query = SearchQuery::text(
7002 "привет",
7003 false,
7004 false,
7005 false,
7006 Default::default(),
7007 Default::default(),
7008 false,
7009 None,
7010 );
7011 assert_matches!(
7012 unicode_case_insensitive_query,
7013 Ok(SearchQuery::Regex { .. })
7014 );
7015 assert_eq!(
7016 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7017 .await
7018 .unwrap(),
7019 HashMap::from_iter([
7020 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7021 (path!("dir/two.rs").to_string(), vec![3..15]),
7022 (path!("dir/three.rs").to_string(), vec![3..15]),
7023 ])
7024 );
7025
7026 assert_eq!(
7027 search(
7028 &project,
7029 SearchQuery::text(
7030 "привет.",
7031 false,
7032 false,
7033 false,
7034 Default::default(),
7035 Default::default(),
7036 false,
7037 None,
7038 )
7039 .unwrap(),
7040 cx
7041 )
7042 .await
7043 .unwrap(),
7044 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7045 );
7046}
7047
7048#[gpui::test]
7049async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7050 init_test(cx);
7051
7052 let fs = FakeFs::new(cx.executor());
7053 fs.insert_tree(
7054 "/one/two",
7055 json!({
7056 "three": {
7057 "a.txt": "",
7058 "four": {}
7059 },
7060 "c.rs": ""
7061 }),
7062 )
7063 .await;
7064
7065 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7066 project
7067 .update(cx, |project, cx| {
7068 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7069 project.create_entry((id, rel_path("b..")), true, cx)
7070 })
7071 .await
7072 .unwrap()
7073 .into_included()
7074 .unwrap();
7075
7076 assert_eq!(
7077 fs.paths(true),
7078 vec![
7079 PathBuf::from(path!("/")),
7080 PathBuf::from(path!("/one")),
7081 PathBuf::from(path!("/one/two")),
7082 PathBuf::from(path!("/one/two/c.rs")),
7083 PathBuf::from(path!("/one/two/three")),
7084 PathBuf::from(path!("/one/two/three/a.txt")),
7085 PathBuf::from(path!("/one/two/three/b..")),
7086 PathBuf::from(path!("/one/two/three/four")),
7087 ]
7088 );
7089}
7090
7091#[gpui::test]
7092async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7093 init_test(cx);
7094
7095 let fs = FakeFs::new(cx.executor());
7096 fs.insert_tree(
7097 path!("/dir"),
7098 json!({
7099 "a.tsx": "a",
7100 }),
7101 )
7102 .await;
7103
7104 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7105
7106 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7107 language_registry.add(tsx_lang());
7108 let language_server_names = [
7109 "TypeScriptServer",
7110 "TailwindServer",
7111 "ESLintServer",
7112 "NoHoverCapabilitiesServer",
7113 ];
7114 let mut language_servers = [
7115 language_registry.register_fake_lsp(
7116 "tsx",
7117 FakeLspAdapter {
7118 name: language_server_names[0],
7119 capabilities: lsp::ServerCapabilities {
7120 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7121 ..lsp::ServerCapabilities::default()
7122 },
7123 ..FakeLspAdapter::default()
7124 },
7125 ),
7126 language_registry.register_fake_lsp(
7127 "tsx",
7128 FakeLspAdapter {
7129 name: language_server_names[1],
7130 capabilities: lsp::ServerCapabilities {
7131 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7132 ..lsp::ServerCapabilities::default()
7133 },
7134 ..FakeLspAdapter::default()
7135 },
7136 ),
7137 language_registry.register_fake_lsp(
7138 "tsx",
7139 FakeLspAdapter {
7140 name: language_server_names[2],
7141 capabilities: lsp::ServerCapabilities {
7142 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7143 ..lsp::ServerCapabilities::default()
7144 },
7145 ..FakeLspAdapter::default()
7146 },
7147 ),
7148 language_registry.register_fake_lsp(
7149 "tsx",
7150 FakeLspAdapter {
7151 name: language_server_names[3],
7152 capabilities: lsp::ServerCapabilities {
7153 hover_provider: None,
7154 ..lsp::ServerCapabilities::default()
7155 },
7156 ..FakeLspAdapter::default()
7157 },
7158 ),
7159 ];
7160
7161 let (buffer, _handle) = project
7162 .update(cx, |p, cx| {
7163 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7164 })
7165 .await
7166 .unwrap();
7167 cx.executor().run_until_parked();
7168
7169 let mut servers_with_hover_requests = HashMap::default();
7170 for i in 0..language_server_names.len() {
7171 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7172 panic!(
7173 "Failed to get language server #{i} with name {}",
7174 &language_server_names[i]
7175 )
7176 });
7177 let new_server_name = new_server.server.name();
7178 assert!(
7179 !servers_with_hover_requests.contains_key(&new_server_name),
7180 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7181 );
7182 match new_server_name.as_ref() {
7183 "TailwindServer" | "TypeScriptServer" => {
7184 servers_with_hover_requests.insert(
7185 new_server_name.clone(),
7186 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7187 move |_, _| {
7188 let name = new_server_name.clone();
7189 async move {
7190 Ok(Some(lsp::Hover {
7191 contents: lsp::HoverContents::Scalar(
7192 lsp::MarkedString::String(format!("{name} hover")),
7193 ),
7194 range: None,
7195 }))
7196 }
7197 },
7198 ),
7199 );
7200 }
7201 "ESLintServer" => {
7202 servers_with_hover_requests.insert(
7203 new_server_name,
7204 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7205 |_, _| async move { Ok(None) },
7206 ),
7207 );
7208 }
7209 "NoHoverCapabilitiesServer" => {
7210 let _never_handled = new_server
7211 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7212 panic!(
7213 "Should not call for hovers server with no corresponding capabilities"
7214 )
7215 });
7216 }
7217 unexpected => panic!("Unexpected server name: {unexpected}"),
7218 }
7219 }
7220
7221 let hover_task = project.update(cx, |project, cx| {
7222 project.hover(&buffer, Point::new(0, 0), cx)
7223 });
7224 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7225 |mut hover_request| async move {
7226 hover_request
7227 .next()
7228 .await
7229 .expect("All hover requests should have been triggered")
7230 },
7231 ))
7232 .await;
7233 assert_eq!(
7234 vec!["TailwindServer hover", "TypeScriptServer hover"],
7235 hover_task
7236 .await
7237 .into_iter()
7238 .flatten()
7239 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7240 .sorted()
7241 .collect::<Vec<_>>(),
7242 "Should receive hover responses from all related servers with hover capabilities"
7243 );
7244}
7245
7246#[gpui::test]
7247async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7248 init_test(cx);
7249
7250 let fs = FakeFs::new(cx.executor());
7251 fs.insert_tree(
7252 path!("/dir"),
7253 json!({
7254 "a.ts": "a",
7255 }),
7256 )
7257 .await;
7258
7259 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7260
7261 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7262 language_registry.add(typescript_lang());
7263 let mut fake_language_servers = language_registry.register_fake_lsp(
7264 "TypeScript",
7265 FakeLspAdapter {
7266 capabilities: lsp::ServerCapabilities {
7267 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7268 ..lsp::ServerCapabilities::default()
7269 },
7270 ..FakeLspAdapter::default()
7271 },
7272 );
7273
7274 let (buffer, _handle) = project
7275 .update(cx, |p, cx| {
7276 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7277 })
7278 .await
7279 .unwrap();
7280 cx.executor().run_until_parked();
7281
7282 let fake_server = fake_language_servers
7283 .next()
7284 .await
7285 .expect("failed to get the language server");
7286
7287 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7288 move |_, _| async move {
7289 Ok(Some(lsp::Hover {
7290 contents: lsp::HoverContents::Array(vec![
7291 lsp::MarkedString::String("".to_string()),
7292 lsp::MarkedString::String(" ".to_string()),
7293 lsp::MarkedString::String("\n\n\n".to_string()),
7294 ]),
7295 range: None,
7296 }))
7297 },
7298 );
7299
7300 let hover_task = project.update(cx, |project, cx| {
7301 project.hover(&buffer, Point::new(0, 0), cx)
7302 });
7303 let () = request_handled
7304 .next()
7305 .await
7306 .expect("All hover requests should have been triggered");
7307 assert_eq!(
7308 Vec::<String>::new(),
7309 hover_task
7310 .await
7311 .into_iter()
7312 .flatten()
7313 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7314 .sorted()
7315 .collect::<Vec<_>>(),
7316 "Empty hover parts should be ignored"
7317 );
7318}
7319
7320#[gpui::test]
7321async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7322 init_test(cx);
7323
7324 let fs = FakeFs::new(cx.executor());
7325 fs.insert_tree(
7326 path!("/dir"),
7327 json!({
7328 "a.ts": "a",
7329 }),
7330 )
7331 .await;
7332
7333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7334
7335 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7336 language_registry.add(typescript_lang());
7337 let mut fake_language_servers = language_registry.register_fake_lsp(
7338 "TypeScript",
7339 FakeLspAdapter {
7340 capabilities: lsp::ServerCapabilities {
7341 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7342 ..lsp::ServerCapabilities::default()
7343 },
7344 ..FakeLspAdapter::default()
7345 },
7346 );
7347
7348 let (buffer, _handle) = project
7349 .update(cx, |p, cx| {
7350 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7351 })
7352 .await
7353 .unwrap();
7354 cx.executor().run_until_parked();
7355
7356 let fake_server = fake_language_servers
7357 .next()
7358 .await
7359 .expect("failed to get the language server");
7360
7361 let mut request_handled = fake_server
7362 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7363 Ok(Some(vec![
7364 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7365 title: "organize imports".to_string(),
7366 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7367 ..lsp::CodeAction::default()
7368 }),
7369 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7370 title: "fix code".to_string(),
7371 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7372 ..lsp::CodeAction::default()
7373 }),
7374 ]))
7375 });
7376
7377 let code_actions_task = project.update(cx, |project, cx| {
7378 project.code_actions(
7379 &buffer,
7380 0..buffer.read(cx).len(),
7381 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7382 cx,
7383 )
7384 });
7385
7386 let () = request_handled
7387 .next()
7388 .await
7389 .expect("The code action request should have been triggered");
7390
7391 let code_actions = code_actions_task.await.unwrap().unwrap();
7392 assert_eq!(code_actions.len(), 1);
7393 assert_eq!(
7394 code_actions[0].lsp_action.action_kind(),
7395 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7396 );
7397}
7398
7399#[gpui::test]
7400async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7401 init_test(cx);
7402
7403 let fs = FakeFs::new(cx.executor());
7404 fs.insert_tree(
7405 path!("/dir"),
7406 json!({
7407 "a.tsx": "a",
7408 }),
7409 )
7410 .await;
7411
7412 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7413
7414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7415 language_registry.add(tsx_lang());
7416 let language_server_names = [
7417 "TypeScriptServer",
7418 "TailwindServer",
7419 "ESLintServer",
7420 "NoActionsCapabilitiesServer",
7421 ];
7422
7423 let mut language_server_rxs = [
7424 language_registry.register_fake_lsp(
7425 "tsx",
7426 FakeLspAdapter {
7427 name: language_server_names[0],
7428 capabilities: lsp::ServerCapabilities {
7429 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7430 ..lsp::ServerCapabilities::default()
7431 },
7432 ..FakeLspAdapter::default()
7433 },
7434 ),
7435 language_registry.register_fake_lsp(
7436 "tsx",
7437 FakeLspAdapter {
7438 name: language_server_names[1],
7439 capabilities: lsp::ServerCapabilities {
7440 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7441 ..lsp::ServerCapabilities::default()
7442 },
7443 ..FakeLspAdapter::default()
7444 },
7445 ),
7446 language_registry.register_fake_lsp(
7447 "tsx",
7448 FakeLspAdapter {
7449 name: language_server_names[2],
7450 capabilities: lsp::ServerCapabilities {
7451 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7452 ..lsp::ServerCapabilities::default()
7453 },
7454 ..FakeLspAdapter::default()
7455 },
7456 ),
7457 language_registry.register_fake_lsp(
7458 "tsx",
7459 FakeLspAdapter {
7460 name: language_server_names[3],
7461 capabilities: lsp::ServerCapabilities {
7462 code_action_provider: None,
7463 ..lsp::ServerCapabilities::default()
7464 },
7465 ..FakeLspAdapter::default()
7466 },
7467 ),
7468 ];
7469
7470 let (buffer, _handle) = project
7471 .update(cx, |p, cx| {
7472 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7473 })
7474 .await
7475 .unwrap();
7476 cx.executor().run_until_parked();
7477
7478 let mut servers_with_actions_requests = HashMap::default();
7479 for i in 0..language_server_names.len() {
7480 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7481 panic!(
7482 "Failed to get language server #{i} with name {}",
7483 &language_server_names[i]
7484 )
7485 });
7486 let new_server_name = new_server.server.name();
7487
7488 assert!(
7489 !servers_with_actions_requests.contains_key(&new_server_name),
7490 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7491 );
7492 match new_server_name.0.as_ref() {
7493 "TailwindServer" | "TypeScriptServer" => {
7494 servers_with_actions_requests.insert(
7495 new_server_name.clone(),
7496 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7497 move |_, _| {
7498 let name = new_server_name.clone();
7499 async move {
7500 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7501 lsp::CodeAction {
7502 title: format!("{name} code action"),
7503 ..lsp::CodeAction::default()
7504 },
7505 )]))
7506 }
7507 },
7508 ),
7509 );
7510 }
7511 "ESLintServer" => {
7512 servers_with_actions_requests.insert(
7513 new_server_name,
7514 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7515 |_, _| async move { Ok(None) },
7516 ),
7517 );
7518 }
7519 "NoActionsCapabilitiesServer" => {
7520 let _never_handled = new_server
7521 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7522 panic!(
7523 "Should not call for code actions server with no corresponding capabilities"
7524 )
7525 });
7526 }
7527 unexpected => panic!("Unexpected server name: {unexpected}"),
7528 }
7529 }
7530
7531 let code_actions_task = project.update(cx, |project, cx| {
7532 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7533 });
7534
7535 // cx.run_until_parked();
7536 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7537 |mut code_actions_request| async move {
7538 code_actions_request
7539 .next()
7540 .await
7541 .expect("All code actions requests should have been triggered")
7542 },
7543 ))
7544 .await;
7545 assert_eq!(
7546 vec!["TailwindServer code action", "TypeScriptServer code action"],
7547 code_actions_task
7548 .await
7549 .unwrap()
7550 .unwrap()
7551 .into_iter()
7552 .map(|code_action| code_action.lsp_action.title().to_owned())
7553 .sorted()
7554 .collect::<Vec<_>>(),
7555 "Should receive code actions responses from all related servers with hover capabilities"
7556 );
7557}
7558
7559#[gpui::test]
7560async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7561 init_test(cx);
7562
7563 let fs = FakeFs::new(cx.executor());
7564 fs.insert_tree(
7565 "/dir",
7566 json!({
7567 "a.rs": "let a = 1;",
7568 "b.rs": "let b = 2;",
7569 "c.rs": "let c = 2;",
7570 }),
7571 )
7572 .await;
7573
7574 let project = Project::test(
7575 fs,
7576 [
7577 "/dir/a.rs".as_ref(),
7578 "/dir/b.rs".as_ref(),
7579 "/dir/c.rs".as_ref(),
7580 ],
7581 cx,
7582 )
7583 .await;
7584
7585 // check the initial state and get the worktrees
7586 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7587 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7588 assert_eq!(worktrees.len(), 3);
7589
7590 let worktree_a = worktrees[0].read(cx);
7591 let worktree_b = worktrees[1].read(cx);
7592 let worktree_c = worktrees[2].read(cx);
7593
7594 // check they start in the right order
7595 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7596 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7597 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7598
7599 (
7600 worktrees[0].clone(),
7601 worktrees[1].clone(),
7602 worktrees[2].clone(),
7603 )
7604 });
7605
7606 // move first worktree to after the second
7607 // [a, b, c] -> [b, a, c]
7608 project
7609 .update(cx, |project, cx| {
7610 let first = worktree_a.read(cx);
7611 let second = worktree_b.read(cx);
7612 project.move_worktree(first.id(), second.id(), cx)
7613 })
7614 .expect("moving first after second");
7615
7616 // check the state after moving
7617 project.update(cx, |project, cx| {
7618 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7619 assert_eq!(worktrees.len(), 3);
7620
7621 let first = worktrees[0].read(cx);
7622 let second = worktrees[1].read(cx);
7623 let third = worktrees[2].read(cx);
7624
7625 // check they are now in the right order
7626 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7627 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7628 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7629 });
7630
7631 // move the second worktree to before the first
7632 // [b, a, c] -> [a, b, c]
7633 project
7634 .update(cx, |project, cx| {
7635 let second = worktree_a.read(cx);
7636 let first = worktree_b.read(cx);
7637 project.move_worktree(first.id(), second.id(), cx)
7638 })
7639 .expect("moving second before first");
7640
7641 // check the state after moving
7642 project.update(cx, |project, cx| {
7643 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7644 assert_eq!(worktrees.len(), 3);
7645
7646 let first = worktrees[0].read(cx);
7647 let second = worktrees[1].read(cx);
7648 let third = worktrees[2].read(cx);
7649
7650 // check they are now in the right order
7651 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7652 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7653 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7654 });
7655
7656 // move the second worktree to after the third
7657 // [a, b, c] -> [a, c, b]
7658 project
7659 .update(cx, |project, cx| {
7660 let second = worktree_b.read(cx);
7661 let third = worktree_c.read(cx);
7662 project.move_worktree(second.id(), third.id(), cx)
7663 })
7664 .expect("moving second after third");
7665
7666 // check the state after moving
7667 project.update(cx, |project, cx| {
7668 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7669 assert_eq!(worktrees.len(), 3);
7670
7671 let first = worktrees[0].read(cx);
7672 let second = worktrees[1].read(cx);
7673 let third = worktrees[2].read(cx);
7674
7675 // check they are now in the right order
7676 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7677 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7678 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7679 });
7680
7681 // move the third worktree to before the second
7682 // [a, c, b] -> [a, b, c]
7683 project
7684 .update(cx, |project, cx| {
7685 let third = worktree_c.read(cx);
7686 let second = worktree_b.read(cx);
7687 project.move_worktree(third.id(), second.id(), cx)
7688 })
7689 .expect("moving third before second");
7690
7691 // check the state after moving
7692 project.update(cx, |project, cx| {
7693 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7694 assert_eq!(worktrees.len(), 3);
7695
7696 let first = worktrees[0].read(cx);
7697 let second = worktrees[1].read(cx);
7698 let third = worktrees[2].read(cx);
7699
7700 // check they are now in the right order
7701 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7702 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7703 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7704 });
7705
7706 // move the first worktree to after the third
7707 // [a, b, c] -> [b, c, a]
7708 project
7709 .update(cx, |project, cx| {
7710 let first = worktree_a.read(cx);
7711 let third = worktree_c.read(cx);
7712 project.move_worktree(first.id(), third.id(), cx)
7713 })
7714 .expect("moving first after third");
7715
7716 // check the state after moving
7717 project.update(cx, |project, cx| {
7718 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7719 assert_eq!(worktrees.len(), 3);
7720
7721 let first = worktrees[0].read(cx);
7722 let second = worktrees[1].read(cx);
7723 let third = worktrees[2].read(cx);
7724
7725 // check they are now in the right order
7726 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7727 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7728 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7729 });
7730
7731 // move the third worktree to before the first
7732 // [b, c, a] -> [a, b, c]
7733 project
7734 .update(cx, |project, cx| {
7735 let third = worktree_a.read(cx);
7736 let first = worktree_b.read(cx);
7737 project.move_worktree(third.id(), first.id(), cx)
7738 })
7739 .expect("moving third before first");
7740
7741 // check the state after moving
7742 project.update(cx, |project, cx| {
7743 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7744 assert_eq!(worktrees.len(), 3);
7745
7746 let first = worktrees[0].read(cx);
7747 let second = worktrees[1].read(cx);
7748 let third = worktrees[2].read(cx);
7749
7750 // check they are now in the right order
7751 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7752 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7753 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7754 });
7755}
7756
7757#[gpui::test]
7758async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7759 init_test(cx);
7760
7761 let staged_contents = r#"
7762 fn main() {
7763 println!("hello world");
7764 }
7765 "#
7766 .unindent();
7767 let file_contents = r#"
7768 // print goodbye
7769 fn main() {
7770 println!("goodbye world");
7771 }
7772 "#
7773 .unindent();
7774
7775 let fs = FakeFs::new(cx.background_executor.clone());
7776 fs.insert_tree(
7777 "/dir",
7778 json!({
7779 ".git": {},
7780 "src": {
7781 "main.rs": file_contents,
7782 }
7783 }),
7784 )
7785 .await;
7786
7787 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7788
7789 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7790
7791 let buffer = project
7792 .update(cx, |project, cx| {
7793 project.open_local_buffer("/dir/src/main.rs", cx)
7794 })
7795 .await
7796 .unwrap();
7797 let unstaged_diff = project
7798 .update(cx, |project, cx| {
7799 project.open_unstaged_diff(buffer.clone(), cx)
7800 })
7801 .await
7802 .unwrap();
7803
7804 cx.run_until_parked();
7805 unstaged_diff.update(cx, |unstaged_diff, cx| {
7806 let snapshot = buffer.read(cx).snapshot();
7807 assert_hunks(
7808 unstaged_diff.snapshot(cx).hunks(&snapshot),
7809 &snapshot,
7810 &unstaged_diff.base_text_string(cx).unwrap(),
7811 &[
7812 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7813 (
7814 2..3,
7815 " println!(\"hello world\");\n",
7816 " println!(\"goodbye world\");\n",
7817 DiffHunkStatus::modified_none(),
7818 ),
7819 ],
7820 );
7821 });
7822
7823 let staged_contents = r#"
7824 // print goodbye
7825 fn main() {
7826 }
7827 "#
7828 .unindent();
7829
7830 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7831
7832 cx.run_until_parked();
7833 unstaged_diff.update(cx, |unstaged_diff, cx| {
7834 let snapshot = buffer.read(cx).snapshot();
7835 assert_hunks(
7836 unstaged_diff
7837 .snapshot(cx)
7838 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7839 &snapshot,
7840 &unstaged_diff.base_text(cx).text(),
7841 &[(
7842 2..3,
7843 "",
7844 " println!(\"goodbye world\");\n",
7845 DiffHunkStatus::added_none(),
7846 )],
7847 );
7848 });
7849}
7850
7851#[gpui::test]
7852async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7853 init_test(cx);
7854
7855 let committed_contents = r#"
7856 fn main() {
7857 println!("hello world");
7858 }
7859 "#
7860 .unindent();
7861 let staged_contents = r#"
7862 fn main() {
7863 println!("goodbye world");
7864 }
7865 "#
7866 .unindent();
7867 let file_contents = r#"
7868 // print goodbye
7869 fn main() {
7870 println!("goodbye world");
7871 }
7872 "#
7873 .unindent();
7874
7875 let fs = FakeFs::new(cx.background_executor.clone());
7876 fs.insert_tree(
7877 "/dir",
7878 json!({
7879 ".git": {},
7880 "src": {
7881 "modification.rs": file_contents,
7882 }
7883 }),
7884 )
7885 .await;
7886
7887 fs.set_head_for_repo(
7888 Path::new("/dir/.git"),
7889 &[
7890 ("src/modification.rs", committed_contents),
7891 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7892 ],
7893 "deadbeef",
7894 );
7895 fs.set_index_for_repo(
7896 Path::new("/dir/.git"),
7897 &[
7898 ("src/modification.rs", staged_contents),
7899 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7900 ],
7901 );
7902
7903 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7904 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7905 let language = rust_lang();
7906 language_registry.add(language.clone());
7907
7908 let buffer_1 = project
7909 .update(cx, |project, cx| {
7910 project.open_local_buffer("/dir/src/modification.rs", cx)
7911 })
7912 .await
7913 .unwrap();
7914 let diff_1 = project
7915 .update(cx, |project, cx| {
7916 project.open_uncommitted_diff(buffer_1.clone(), cx)
7917 })
7918 .await
7919 .unwrap();
7920 diff_1.read_with(cx, |diff, cx| {
7921 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7922 });
7923 cx.run_until_parked();
7924 diff_1.update(cx, |diff, cx| {
7925 let snapshot = buffer_1.read(cx).snapshot();
7926 assert_hunks(
7927 diff.snapshot(cx)
7928 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7929 &snapshot,
7930 &diff.base_text_string(cx).unwrap(),
7931 &[
7932 (
7933 0..1,
7934 "",
7935 "// print goodbye\n",
7936 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7937 ),
7938 (
7939 2..3,
7940 " println!(\"hello world\");\n",
7941 " println!(\"goodbye world\");\n",
7942 DiffHunkStatus::modified_none(),
7943 ),
7944 ],
7945 );
7946 });
7947
7948 // Reset HEAD to a version that differs from both the buffer and the index.
7949 let committed_contents = r#"
7950 // print goodbye
7951 fn main() {
7952 }
7953 "#
7954 .unindent();
7955 fs.set_head_for_repo(
7956 Path::new("/dir/.git"),
7957 &[
7958 ("src/modification.rs", committed_contents.clone()),
7959 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7960 ],
7961 "deadbeef",
7962 );
7963
7964 // Buffer now has an unstaged hunk.
7965 cx.run_until_parked();
7966 diff_1.update(cx, |diff, cx| {
7967 let snapshot = buffer_1.read(cx).snapshot();
7968 assert_hunks(
7969 diff.snapshot(cx)
7970 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7971 &snapshot,
7972 &diff.base_text(cx).text(),
7973 &[(
7974 2..3,
7975 "",
7976 " println!(\"goodbye world\");\n",
7977 DiffHunkStatus::added_none(),
7978 )],
7979 );
7980 });
7981
7982 // Open a buffer for a file that's been deleted.
7983 let buffer_2 = project
7984 .update(cx, |project, cx| {
7985 project.open_local_buffer("/dir/src/deletion.rs", cx)
7986 })
7987 .await
7988 .unwrap();
7989 let diff_2 = project
7990 .update(cx, |project, cx| {
7991 project.open_uncommitted_diff(buffer_2.clone(), cx)
7992 })
7993 .await
7994 .unwrap();
7995 cx.run_until_parked();
7996 diff_2.update(cx, |diff, cx| {
7997 let snapshot = buffer_2.read(cx).snapshot();
7998 assert_hunks(
7999 diff.snapshot(cx)
8000 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8001 &snapshot,
8002 &diff.base_text_string(cx).unwrap(),
8003 &[(
8004 0..0,
8005 "// the-deleted-contents\n",
8006 "",
8007 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8008 )],
8009 );
8010 });
8011
8012 // Stage the deletion of this file
8013 fs.set_index_for_repo(
8014 Path::new("/dir/.git"),
8015 &[("src/modification.rs", committed_contents.clone())],
8016 );
8017 cx.run_until_parked();
8018 diff_2.update(cx, |diff, cx| {
8019 let snapshot = buffer_2.read(cx).snapshot();
8020 assert_hunks(
8021 diff.snapshot(cx)
8022 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8023 &snapshot,
8024 &diff.base_text_string(cx).unwrap(),
8025 &[(
8026 0..0,
8027 "// the-deleted-contents\n",
8028 "",
8029 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8030 )],
8031 );
8032 });
8033}
8034
8035#[gpui::test]
8036async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8037 use DiffHunkSecondaryStatus::*;
8038 init_test(cx);
8039
8040 let committed_contents = r#"
8041 zero
8042 one
8043 two
8044 three
8045 four
8046 five
8047 "#
8048 .unindent();
8049 let file_contents = r#"
8050 one
8051 TWO
8052 three
8053 FOUR
8054 five
8055 "#
8056 .unindent();
8057
8058 let fs = FakeFs::new(cx.background_executor.clone());
8059 fs.insert_tree(
8060 "/dir",
8061 json!({
8062 ".git": {},
8063 "file.txt": file_contents.clone()
8064 }),
8065 )
8066 .await;
8067
8068 fs.set_head_and_index_for_repo(
8069 path!("/dir/.git").as_ref(),
8070 &[("file.txt", committed_contents.clone())],
8071 );
8072
8073 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8074
8075 let buffer = project
8076 .update(cx, |project, cx| {
8077 project.open_local_buffer("/dir/file.txt", cx)
8078 })
8079 .await
8080 .unwrap();
8081 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8082 let uncommitted_diff = project
8083 .update(cx, |project, cx| {
8084 project.open_uncommitted_diff(buffer.clone(), cx)
8085 })
8086 .await
8087 .unwrap();
8088 let mut diff_events = cx.events(&uncommitted_diff);
8089
8090 // The hunks are initially unstaged.
8091 uncommitted_diff.read_with(cx, |diff, cx| {
8092 assert_hunks(
8093 diff.snapshot(cx).hunks(&snapshot),
8094 &snapshot,
8095 &diff.base_text_string(cx).unwrap(),
8096 &[
8097 (
8098 0..0,
8099 "zero\n",
8100 "",
8101 DiffHunkStatus::deleted(HasSecondaryHunk),
8102 ),
8103 (
8104 1..2,
8105 "two\n",
8106 "TWO\n",
8107 DiffHunkStatus::modified(HasSecondaryHunk),
8108 ),
8109 (
8110 3..4,
8111 "four\n",
8112 "FOUR\n",
8113 DiffHunkStatus::modified(HasSecondaryHunk),
8114 ),
8115 ],
8116 );
8117 });
8118
8119 // Stage a hunk. It appears as optimistically staged.
8120 uncommitted_diff.update(cx, |diff, cx| {
8121 let range =
8122 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8123 let hunks = diff
8124 .snapshot(cx)
8125 .hunks_intersecting_range(range, &snapshot)
8126 .collect::<Vec<_>>();
8127 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8128
8129 assert_hunks(
8130 diff.snapshot(cx).hunks(&snapshot),
8131 &snapshot,
8132 &diff.base_text_string(cx).unwrap(),
8133 &[
8134 (
8135 0..0,
8136 "zero\n",
8137 "",
8138 DiffHunkStatus::deleted(HasSecondaryHunk),
8139 ),
8140 (
8141 1..2,
8142 "two\n",
8143 "TWO\n",
8144 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8145 ),
8146 (
8147 3..4,
8148 "four\n",
8149 "FOUR\n",
8150 DiffHunkStatus::modified(HasSecondaryHunk),
8151 ),
8152 ],
8153 );
8154 });
8155
8156 // The diff emits a change event for the range of the staged hunk.
8157 assert!(matches!(
8158 diff_events.next().await.unwrap(),
8159 BufferDiffEvent::HunksStagedOrUnstaged(_)
8160 ));
8161 let event = diff_events.next().await.unwrap();
8162 if let BufferDiffEvent::DiffChanged {
8163 changed_range: Some(changed_range),
8164 base_text_changed_range: _,
8165 } = event
8166 {
8167 let changed_range = changed_range.to_point(&snapshot);
8168 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8169 } else {
8170 panic!("Unexpected event {event:?}");
8171 }
8172
8173 // When the write to the index completes, it appears as staged.
8174 cx.run_until_parked();
8175 uncommitted_diff.update(cx, |diff, cx| {
8176 assert_hunks(
8177 diff.snapshot(cx).hunks(&snapshot),
8178 &snapshot,
8179 &diff.base_text_string(cx).unwrap(),
8180 &[
8181 (
8182 0..0,
8183 "zero\n",
8184 "",
8185 DiffHunkStatus::deleted(HasSecondaryHunk),
8186 ),
8187 (
8188 1..2,
8189 "two\n",
8190 "TWO\n",
8191 DiffHunkStatus::modified(NoSecondaryHunk),
8192 ),
8193 (
8194 3..4,
8195 "four\n",
8196 "FOUR\n",
8197 DiffHunkStatus::modified(HasSecondaryHunk),
8198 ),
8199 ],
8200 );
8201 });
8202
8203 // The diff emits a change event for the changed index text.
8204 let event = diff_events.next().await.unwrap();
8205 if let BufferDiffEvent::DiffChanged {
8206 changed_range: Some(changed_range),
8207 base_text_changed_range: _,
8208 } = event
8209 {
8210 let changed_range = changed_range.to_point(&snapshot);
8211 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8212 } else {
8213 panic!("Unexpected event {event:?}");
8214 }
8215
8216 // Simulate a problem writing to the git index.
8217 fs.set_error_message_for_index_write(
8218 "/dir/.git".as_ref(),
8219 Some("failed to write git index".into()),
8220 );
8221
8222 // Stage another hunk.
8223 uncommitted_diff.update(cx, |diff, cx| {
8224 let range =
8225 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8226 let hunks = diff
8227 .snapshot(cx)
8228 .hunks_intersecting_range(range, &snapshot)
8229 .collect::<Vec<_>>();
8230 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8231
8232 assert_hunks(
8233 diff.snapshot(cx).hunks(&snapshot),
8234 &snapshot,
8235 &diff.base_text_string(cx).unwrap(),
8236 &[
8237 (
8238 0..0,
8239 "zero\n",
8240 "",
8241 DiffHunkStatus::deleted(HasSecondaryHunk),
8242 ),
8243 (
8244 1..2,
8245 "two\n",
8246 "TWO\n",
8247 DiffHunkStatus::modified(NoSecondaryHunk),
8248 ),
8249 (
8250 3..4,
8251 "four\n",
8252 "FOUR\n",
8253 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8254 ),
8255 ],
8256 );
8257 });
8258 assert!(matches!(
8259 diff_events.next().await.unwrap(),
8260 BufferDiffEvent::HunksStagedOrUnstaged(_)
8261 ));
8262 let event = diff_events.next().await.unwrap();
8263 if let BufferDiffEvent::DiffChanged {
8264 changed_range: Some(changed_range),
8265 base_text_changed_range: _,
8266 } = event
8267 {
8268 let changed_range = changed_range.to_point(&snapshot);
8269 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8270 } else {
8271 panic!("Unexpected event {event:?}");
8272 }
8273
8274 // When the write fails, the hunk returns to being unstaged.
8275 cx.run_until_parked();
8276 uncommitted_diff.update(cx, |diff, cx| {
8277 assert_hunks(
8278 diff.snapshot(cx).hunks(&snapshot),
8279 &snapshot,
8280 &diff.base_text_string(cx).unwrap(),
8281 &[
8282 (
8283 0..0,
8284 "zero\n",
8285 "",
8286 DiffHunkStatus::deleted(HasSecondaryHunk),
8287 ),
8288 (
8289 1..2,
8290 "two\n",
8291 "TWO\n",
8292 DiffHunkStatus::modified(NoSecondaryHunk),
8293 ),
8294 (
8295 3..4,
8296 "four\n",
8297 "FOUR\n",
8298 DiffHunkStatus::modified(HasSecondaryHunk),
8299 ),
8300 ],
8301 );
8302 });
8303
8304 let event = diff_events.next().await.unwrap();
8305 if let BufferDiffEvent::DiffChanged {
8306 changed_range: Some(changed_range),
8307 base_text_changed_range: _,
8308 } = event
8309 {
8310 let changed_range = changed_range.to_point(&snapshot);
8311 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8312 } else {
8313 panic!("Unexpected event {event:?}");
8314 }
8315
8316 // Allow writing to the git index to succeed again.
8317 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8318
8319 // Stage two hunks with separate operations.
8320 uncommitted_diff.update(cx, |diff, cx| {
8321 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8322 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8323 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8324 });
8325
8326 // Both staged hunks appear as pending.
8327 uncommitted_diff.update(cx, |diff, cx| {
8328 assert_hunks(
8329 diff.snapshot(cx).hunks(&snapshot),
8330 &snapshot,
8331 &diff.base_text_string(cx).unwrap(),
8332 &[
8333 (
8334 0..0,
8335 "zero\n",
8336 "",
8337 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8338 ),
8339 (
8340 1..2,
8341 "two\n",
8342 "TWO\n",
8343 DiffHunkStatus::modified(NoSecondaryHunk),
8344 ),
8345 (
8346 3..4,
8347 "four\n",
8348 "FOUR\n",
8349 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8350 ),
8351 ],
8352 );
8353 });
8354
8355 // Both staging operations take effect.
8356 cx.run_until_parked();
8357 uncommitted_diff.update(cx, |diff, cx| {
8358 assert_hunks(
8359 diff.snapshot(cx).hunks(&snapshot),
8360 &snapshot,
8361 &diff.base_text_string(cx).unwrap(),
8362 &[
8363 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8364 (
8365 1..2,
8366 "two\n",
8367 "TWO\n",
8368 DiffHunkStatus::modified(NoSecondaryHunk),
8369 ),
8370 (
8371 3..4,
8372 "four\n",
8373 "FOUR\n",
8374 DiffHunkStatus::modified(NoSecondaryHunk),
8375 ),
8376 ],
8377 );
8378 });
8379}
8380
8381#[gpui::test(seeds(340, 472))]
8382async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8383 use DiffHunkSecondaryStatus::*;
8384 init_test(cx);
8385
8386 let committed_contents = r#"
8387 zero
8388 one
8389 two
8390 three
8391 four
8392 five
8393 "#
8394 .unindent();
8395 let file_contents = r#"
8396 one
8397 TWO
8398 three
8399 FOUR
8400 five
8401 "#
8402 .unindent();
8403
8404 let fs = FakeFs::new(cx.background_executor.clone());
8405 fs.insert_tree(
8406 "/dir",
8407 json!({
8408 ".git": {},
8409 "file.txt": file_contents.clone()
8410 }),
8411 )
8412 .await;
8413
8414 fs.set_head_for_repo(
8415 "/dir/.git".as_ref(),
8416 &[("file.txt", committed_contents.clone())],
8417 "deadbeef",
8418 );
8419 fs.set_index_for_repo(
8420 "/dir/.git".as_ref(),
8421 &[("file.txt", committed_contents.clone())],
8422 );
8423
8424 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8425
8426 let buffer = project
8427 .update(cx, |project, cx| {
8428 project.open_local_buffer("/dir/file.txt", cx)
8429 })
8430 .await
8431 .unwrap();
8432 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8433 let uncommitted_diff = project
8434 .update(cx, |project, cx| {
8435 project.open_uncommitted_diff(buffer.clone(), cx)
8436 })
8437 .await
8438 .unwrap();
8439
8440 // The hunks are initially unstaged.
8441 uncommitted_diff.read_with(cx, |diff, cx| {
8442 assert_hunks(
8443 diff.snapshot(cx).hunks(&snapshot),
8444 &snapshot,
8445 &diff.base_text_string(cx).unwrap(),
8446 &[
8447 (
8448 0..0,
8449 "zero\n",
8450 "",
8451 DiffHunkStatus::deleted(HasSecondaryHunk),
8452 ),
8453 (
8454 1..2,
8455 "two\n",
8456 "TWO\n",
8457 DiffHunkStatus::modified(HasSecondaryHunk),
8458 ),
8459 (
8460 3..4,
8461 "four\n",
8462 "FOUR\n",
8463 DiffHunkStatus::modified(HasSecondaryHunk),
8464 ),
8465 ],
8466 );
8467 });
8468
8469 // Pause IO events
8470 fs.pause_events();
8471
8472 // Stage the first hunk.
8473 uncommitted_diff.update(cx, |diff, cx| {
8474 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8475 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8476 assert_hunks(
8477 diff.snapshot(cx).hunks(&snapshot),
8478 &snapshot,
8479 &diff.base_text_string(cx).unwrap(),
8480 &[
8481 (
8482 0..0,
8483 "zero\n",
8484 "",
8485 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8486 ),
8487 (
8488 1..2,
8489 "two\n",
8490 "TWO\n",
8491 DiffHunkStatus::modified(HasSecondaryHunk),
8492 ),
8493 (
8494 3..4,
8495 "four\n",
8496 "FOUR\n",
8497 DiffHunkStatus::modified(HasSecondaryHunk),
8498 ),
8499 ],
8500 );
8501 });
8502
8503 // Stage the second hunk *before* receiving the FS event for the first hunk.
8504 cx.run_until_parked();
8505 uncommitted_diff.update(cx, |diff, cx| {
8506 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8507 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8508 assert_hunks(
8509 diff.snapshot(cx).hunks(&snapshot),
8510 &snapshot,
8511 &diff.base_text_string(cx).unwrap(),
8512 &[
8513 (
8514 0..0,
8515 "zero\n",
8516 "",
8517 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8518 ),
8519 (
8520 1..2,
8521 "two\n",
8522 "TWO\n",
8523 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8524 ),
8525 (
8526 3..4,
8527 "four\n",
8528 "FOUR\n",
8529 DiffHunkStatus::modified(HasSecondaryHunk),
8530 ),
8531 ],
8532 );
8533 });
8534
8535 // Process the FS event for staging the first hunk (second event is still pending).
8536 fs.flush_events(1);
8537 cx.run_until_parked();
8538
8539 // Stage the third hunk before receiving the second FS event.
8540 uncommitted_diff.update(cx, |diff, cx| {
8541 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8542 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8543 });
8544
8545 // Wait for all remaining IO.
8546 cx.run_until_parked();
8547 fs.flush_events(fs.buffered_event_count());
8548
8549 // Now all hunks are staged.
8550 cx.run_until_parked();
8551 uncommitted_diff.update(cx, |diff, cx| {
8552 assert_hunks(
8553 diff.snapshot(cx).hunks(&snapshot),
8554 &snapshot,
8555 &diff.base_text_string(cx).unwrap(),
8556 &[
8557 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8558 (
8559 1..2,
8560 "two\n",
8561 "TWO\n",
8562 DiffHunkStatus::modified(NoSecondaryHunk),
8563 ),
8564 (
8565 3..4,
8566 "four\n",
8567 "FOUR\n",
8568 DiffHunkStatus::modified(NoSecondaryHunk),
8569 ),
8570 ],
8571 );
8572 });
8573}
8574
8575#[gpui::test(iterations = 25)]
8576async fn test_staging_random_hunks(
8577 mut rng: StdRng,
8578 _executor: BackgroundExecutor,
8579 cx: &mut gpui::TestAppContext,
8580) {
8581 let operations = env::var("OPERATIONS")
8582 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8583 .unwrap_or(20);
8584
8585 use DiffHunkSecondaryStatus::*;
8586 init_test(cx);
8587
8588 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8589 let index_text = committed_text.clone();
8590 let buffer_text = (0..30)
8591 .map(|i| match i % 5 {
8592 0 => format!("line {i} (modified)\n"),
8593 _ => format!("line {i}\n"),
8594 })
8595 .collect::<String>();
8596
8597 let fs = FakeFs::new(cx.background_executor.clone());
8598 fs.insert_tree(
8599 path!("/dir"),
8600 json!({
8601 ".git": {},
8602 "file.txt": buffer_text.clone()
8603 }),
8604 )
8605 .await;
8606 fs.set_head_for_repo(
8607 path!("/dir/.git").as_ref(),
8608 &[("file.txt", committed_text.clone())],
8609 "deadbeef",
8610 );
8611 fs.set_index_for_repo(
8612 path!("/dir/.git").as_ref(),
8613 &[("file.txt", index_text.clone())],
8614 );
8615 let repo = fs
8616 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8617 .unwrap();
8618
8619 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8620 let buffer = project
8621 .update(cx, |project, cx| {
8622 project.open_local_buffer(path!("/dir/file.txt"), cx)
8623 })
8624 .await
8625 .unwrap();
8626 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8627 let uncommitted_diff = project
8628 .update(cx, |project, cx| {
8629 project.open_uncommitted_diff(buffer.clone(), cx)
8630 })
8631 .await
8632 .unwrap();
8633
8634 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8635 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8636 });
8637 assert_eq!(hunks.len(), 6);
8638
8639 for _i in 0..operations {
8640 let hunk_ix = rng.random_range(0..hunks.len());
8641 let hunk = &mut hunks[hunk_ix];
8642 let row = hunk.range.start.row;
8643
8644 if hunk.status().has_secondary_hunk() {
8645 log::info!("staging hunk at {row}");
8646 uncommitted_diff.update(cx, |diff, cx| {
8647 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8648 });
8649 hunk.secondary_status = SecondaryHunkRemovalPending;
8650 } else {
8651 log::info!("unstaging hunk at {row}");
8652 uncommitted_diff.update(cx, |diff, cx| {
8653 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8654 });
8655 hunk.secondary_status = SecondaryHunkAdditionPending;
8656 }
8657
8658 for _ in 0..rng.random_range(0..10) {
8659 log::info!("yielding");
8660 cx.executor().simulate_random_delay().await;
8661 }
8662 }
8663
8664 cx.executor().run_until_parked();
8665
8666 for hunk in &mut hunks {
8667 if hunk.secondary_status == SecondaryHunkRemovalPending {
8668 hunk.secondary_status = NoSecondaryHunk;
8669 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8670 hunk.secondary_status = HasSecondaryHunk;
8671 }
8672 }
8673
8674 log::info!(
8675 "index text:\n{}",
8676 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8677 .await
8678 .unwrap()
8679 );
8680
8681 uncommitted_diff.update(cx, |diff, cx| {
8682 let expected_hunks = hunks
8683 .iter()
8684 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8685 .collect::<Vec<_>>();
8686 let actual_hunks = diff
8687 .snapshot(cx)
8688 .hunks(&snapshot)
8689 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8690 .collect::<Vec<_>>();
8691 assert_eq!(actual_hunks, expected_hunks);
8692 });
8693}
8694
8695#[gpui::test]
8696async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8697 init_test(cx);
8698
8699 let committed_contents = r#"
8700 fn main() {
8701 println!("hello from HEAD");
8702 }
8703 "#
8704 .unindent();
8705 let file_contents = r#"
8706 fn main() {
8707 println!("hello from the working copy");
8708 }
8709 "#
8710 .unindent();
8711
8712 let fs = FakeFs::new(cx.background_executor.clone());
8713 fs.insert_tree(
8714 "/dir",
8715 json!({
8716 ".git": {},
8717 "src": {
8718 "main.rs": file_contents,
8719 }
8720 }),
8721 )
8722 .await;
8723
8724 fs.set_head_for_repo(
8725 Path::new("/dir/.git"),
8726 &[("src/main.rs", committed_contents.clone())],
8727 "deadbeef",
8728 );
8729 fs.set_index_for_repo(
8730 Path::new("/dir/.git"),
8731 &[("src/main.rs", committed_contents.clone())],
8732 );
8733
8734 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8735
8736 let buffer = project
8737 .update(cx, |project, cx| {
8738 project.open_local_buffer("/dir/src/main.rs", cx)
8739 })
8740 .await
8741 .unwrap();
8742 let uncommitted_diff = project
8743 .update(cx, |project, cx| {
8744 project.open_uncommitted_diff(buffer.clone(), cx)
8745 })
8746 .await
8747 .unwrap();
8748
8749 cx.run_until_parked();
8750 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8751 let snapshot = buffer.read(cx).snapshot();
8752 assert_hunks(
8753 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8754 &snapshot,
8755 &uncommitted_diff.base_text_string(cx).unwrap(),
8756 &[(
8757 1..2,
8758 " println!(\"hello from HEAD\");\n",
8759 " println!(\"hello from the working copy\");\n",
8760 DiffHunkStatus {
8761 kind: DiffHunkStatusKind::Modified,
8762 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8763 },
8764 )],
8765 );
8766 });
8767}
8768
8769// TODO: Should we test this on Windows also?
8770#[gpui::test]
8771#[cfg(not(windows))]
8772async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8773 use std::os::unix::fs::PermissionsExt;
8774 init_test(cx);
8775 cx.executor().allow_parking();
8776 let committed_contents = "bar\n";
8777 let file_contents = "baz\n";
8778 let root = TempTree::new(json!({
8779 "project": {
8780 "foo": committed_contents
8781 },
8782 }));
8783
8784 let work_dir = root.path().join("project");
8785 let file_path = work_dir.join("foo");
8786 let repo = git_init(work_dir.as_path());
8787 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8788 perms.set_mode(0o755);
8789 std::fs::set_permissions(&file_path, perms).unwrap();
8790 git_add("foo", &repo);
8791 git_commit("Initial commit", &repo);
8792 std::fs::write(&file_path, file_contents).unwrap();
8793
8794 let project = Project::test(
8795 Arc::new(RealFs::new(None, cx.executor())),
8796 [root.path()],
8797 cx,
8798 )
8799 .await;
8800
8801 let buffer = project
8802 .update(cx, |project, cx| {
8803 project.open_local_buffer(file_path.as_path(), cx)
8804 })
8805 .await
8806 .unwrap();
8807
8808 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8809
8810 let uncommitted_diff = project
8811 .update(cx, |project, cx| {
8812 project.open_uncommitted_diff(buffer.clone(), cx)
8813 })
8814 .await
8815 .unwrap();
8816
8817 uncommitted_diff.update(cx, |diff, cx| {
8818 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8819 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8820 });
8821
8822 cx.run_until_parked();
8823
8824 let output = smol::process::Command::new("git")
8825 .current_dir(&work_dir)
8826 .args(["diff", "--staged"])
8827 .output()
8828 .await
8829 .unwrap();
8830
8831 let staged_diff = String::from_utf8_lossy(&output.stdout);
8832
8833 assert!(
8834 !staged_diff.contains("new mode 100644"),
8835 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8836 staged_diff
8837 );
8838
8839 let output = smol::process::Command::new("git")
8840 .current_dir(&work_dir)
8841 .args(["ls-files", "-s"])
8842 .output()
8843 .await
8844 .unwrap();
8845 let index_contents = String::from_utf8_lossy(&output.stdout);
8846
8847 assert!(
8848 index_contents.contains("100755"),
8849 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8850 index_contents
8851 );
8852}
8853
8854#[gpui::test]
8855async fn test_repository_and_path_for_project_path(
8856 background_executor: BackgroundExecutor,
8857 cx: &mut gpui::TestAppContext,
8858) {
8859 init_test(cx);
8860 let fs = FakeFs::new(background_executor);
8861 fs.insert_tree(
8862 path!("/root"),
8863 json!({
8864 "c.txt": "",
8865 "dir1": {
8866 ".git": {},
8867 "deps": {
8868 "dep1": {
8869 ".git": {},
8870 "src": {
8871 "a.txt": ""
8872 }
8873 }
8874 },
8875 "src": {
8876 "b.txt": ""
8877 }
8878 },
8879 }),
8880 )
8881 .await;
8882
8883 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8884 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8885 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8886 project
8887 .update(cx, |project, cx| project.git_scans_complete(cx))
8888 .await;
8889 cx.run_until_parked();
8890
8891 project.read_with(cx, |project, cx| {
8892 let git_store = project.git_store().read(cx);
8893 let pairs = [
8894 ("c.txt", None),
8895 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8896 (
8897 "dir1/deps/dep1/src/a.txt",
8898 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8899 ),
8900 ];
8901 let expected = pairs
8902 .iter()
8903 .map(|(path, result)| {
8904 (
8905 path,
8906 result.map(|(repo, repo_path)| {
8907 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8908 }),
8909 )
8910 })
8911 .collect::<Vec<_>>();
8912 let actual = pairs
8913 .iter()
8914 .map(|(path, _)| {
8915 let project_path = (tree_id, rel_path(path)).into();
8916 let result = maybe!({
8917 let (repo, repo_path) =
8918 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8919 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8920 });
8921 (path, result)
8922 })
8923 .collect::<Vec<_>>();
8924 pretty_assertions::assert_eq!(expected, actual);
8925 });
8926
8927 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8928 .await
8929 .unwrap();
8930 cx.run_until_parked();
8931
8932 project.read_with(cx, |project, cx| {
8933 let git_store = project.git_store().read(cx);
8934 assert_eq!(
8935 git_store.repository_and_path_for_project_path(
8936 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8937 cx
8938 ),
8939 None
8940 );
8941 });
8942}
8943
8944#[gpui::test]
8945async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8946 init_test(cx);
8947 let fs = FakeFs::new(cx.background_executor.clone());
8948 let home = paths::home_dir();
8949 fs.insert_tree(
8950 home,
8951 json!({
8952 ".git": {},
8953 "project": {
8954 "a.txt": "A"
8955 },
8956 }),
8957 )
8958 .await;
8959
8960 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8961 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8962 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8963
8964 project
8965 .update(cx, |project, cx| project.git_scans_complete(cx))
8966 .await;
8967 tree.flush_fs_events(cx).await;
8968
8969 project.read_with(cx, |project, cx| {
8970 let containing = project
8971 .git_store()
8972 .read(cx)
8973 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8974 assert!(containing.is_none());
8975 });
8976
8977 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8978 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8979 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8980 project
8981 .update(cx, |project, cx| project.git_scans_complete(cx))
8982 .await;
8983 tree.flush_fs_events(cx).await;
8984
8985 project.read_with(cx, |project, cx| {
8986 let containing = project
8987 .git_store()
8988 .read(cx)
8989 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8990 assert_eq!(
8991 containing
8992 .unwrap()
8993 .0
8994 .read(cx)
8995 .work_directory_abs_path
8996 .as_ref(),
8997 home,
8998 );
8999 });
9000}
9001
9002#[gpui::test]
9003async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9004 init_test(cx);
9005 cx.executor().allow_parking();
9006
9007 let root = TempTree::new(json!({
9008 "project": {
9009 "a.txt": "a", // Modified
9010 "b.txt": "bb", // Added
9011 "c.txt": "ccc", // Unchanged
9012 "d.txt": "dddd", // Deleted
9013 },
9014 }));
9015
9016 // Set up git repository before creating the project.
9017 let work_dir = root.path().join("project");
9018 let repo = git_init(work_dir.as_path());
9019 git_add("a.txt", &repo);
9020 git_add("c.txt", &repo);
9021 git_add("d.txt", &repo);
9022 git_commit("Initial commit", &repo);
9023 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9024 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9025
9026 let project = Project::test(
9027 Arc::new(RealFs::new(None, cx.executor())),
9028 [root.path()],
9029 cx,
9030 )
9031 .await;
9032
9033 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9034 tree.flush_fs_events(cx).await;
9035 project
9036 .update(cx, |project, cx| project.git_scans_complete(cx))
9037 .await;
9038 cx.executor().run_until_parked();
9039
9040 let repository = project.read_with(cx, |project, cx| {
9041 project.repositories(cx).values().next().unwrap().clone()
9042 });
9043
9044 // Check that the right git state is observed on startup
9045 repository.read_with(cx, |repository, _| {
9046 let entries = repository.cached_status().collect::<Vec<_>>();
9047 assert_eq!(
9048 entries,
9049 [
9050 StatusEntry {
9051 repo_path: repo_path("a.txt"),
9052 status: StatusCode::Modified.worktree(),
9053 },
9054 StatusEntry {
9055 repo_path: repo_path("b.txt"),
9056 status: FileStatus::Untracked,
9057 },
9058 StatusEntry {
9059 repo_path: repo_path("d.txt"),
9060 status: StatusCode::Deleted.worktree(),
9061 },
9062 ]
9063 );
9064 });
9065
9066 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9067
9068 tree.flush_fs_events(cx).await;
9069 project
9070 .update(cx, |project, cx| project.git_scans_complete(cx))
9071 .await;
9072 cx.executor().run_until_parked();
9073
9074 repository.read_with(cx, |repository, _| {
9075 let entries = repository.cached_status().collect::<Vec<_>>();
9076 assert_eq!(
9077 entries,
9078 [
9079 StatusEntry {
9080 repo_path: repo_path("a.txt"),
9081 status: StatusCode::Modified.worktree(),
9082 },
9083 StatusEntry {
9084 repo_path: repo_path("b.txt"),
9085 status: FileStatus::Untracked,
9086 },
9087 StatusEntry {
9088 repo_path: repo_path("c.txt"),
9089 status: StatusCode::Modified.worktree(),
9090 },
9091 StatusEntry {
9092 repo_path: repo_path("d.txt"),
9093 status: StatusCode::Deleted.worktree(),
9094 },
9095 ]
9096 );
9097 });
9098
9099 git_add("a.txt", &repo);
9100 git_add("c.txt", &repo);
9101 git_remove_index(Path::new("d.txt"), &repo);
9102 git_commit("Another commit", &repo);
9103 tree.flush_fs_events(cx).await;
9104 project
9105 .update(cx, |project, cx| project.git_scans_complete(cx))
9106 .await;
9107 cx.executor().run_until_parked();
9108
9109 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9110 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9111 tree.flush_fs_events(cx).await;
9112 project
9113 .update(cx, |project, cx| project.git_scans_complete(cx))
9114 .await;
9115 cx.executor().run_until_parked();
9116
9117 repository.read_with(cx, |repository, _cx| {
9118 let entries = repository.cached_status().collect::<Vec<_>>();
9119
9120 // Deleting an untracked entry, b.txt, should leave no status
9121 // a.txt was tracked, and so should have a status
9122 assert_eq!(
9123 entries,
9124 [StatusEntry {
9125 repo_path: repo_path("a.txt"),
9126 status: StatusCode::Deleted.worktree(),
9127 }]
9128 );
9129 });
9130}
9131
9132#[gpui::test]
9133#[ignore]
9134async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9135 init_test(cx);
9136 cx.executor().allow_parking();
9137
9138 let root = TempTree::new(json!({
9139 "project": {
9140 "sub": {},
9141 "a.txt": "",
9142 },
9143 }));
9144
9145 let work_dir = root.path().join("project");
9146 let repo = git_init(work_dir.as_path());
9147 // a.txt exists in HEAD and the working copy but is deleted in the index.
9148 git_add("a.txt", &repo);
9149 git_commit("Initial commit", &repo);
9150 git_remove_index("a.txt".as_ref(), &repo);
9151 // `sub` is a nested git repository.
9152 let _sub = git_init(&work_dir.join("sub"));
9153
9154 let project = Project::test(
9155 Arc::new(RealFs::new(None, cx.executor())),
9156 [root.path()],
9157 cx,
9158 )
9159 .await;
9160
9161 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9162 tree.flush_fs_events(cx).await;
9163 project
9164 .update(cx, |project, cx| project.git_scans_complete(cx))
9165 .await;
9166 cx.executor().run_until_parked();
9167
9168 let repository = project.read_with(cx, |project, cx| {
9169 project
9170 .repositories(cx)
9171 .values()
9172 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9173 .unwrap()
9174 .clone()
9175 });
9176
9177 repository.read_with(cx, |repository, _cx| {
9178 let entries = repository.cached_status().collect::<Vec<_>>();
9179
9180 // `sub` doesn't appear in our computed statuses.
9181 // a.txt appears with a combined `DA` status.
9182 assert_eq!(
9183 entries,
9184 [StatusEntry {
9185 repo_path: repo_path("a.txt"),
9186 status: TrackedStatus {
9187 index_status: StatusCode::Deleted,
9188 worktree_status: StatusCode::Added
9189 }
9190 .into(),
9191 }]
9192 )
9193 });
9194}
9195
9196#[track_caller]
9197/// We merge lhs into rhs.
9198fn merge_pending_ops_snapshots(
9199 source: Vec<pending_op::PendingOps>,
9200 mut target: Vec<pending_op::PendingOps>,
9201) -> Vec<pending_op::PendingOps> {
9202 for s_ops in source {
9203 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9204 if ops.repo_path == s_ops.repo_path {
9205 Some(idx)
9206 } else {
9207 None
9208 }
9209 }) {
9210 let t_ops = &mut target[idx];
9211 for s_op in s_ops.ops {
9212 if let Some(op_idx) = t_ops
9213 .ops
9214 .iter()
9215 .zip(0..)
9216 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9217 {
9218 let t_op = &mut t_ops.ops[op_idx];
9219 match (s_op.job_status, t_op.job_status) {
9220 (pending_op::JobStatus::Running, _) => {}
9221 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9222 (s_st, t_st) if s_st == t_st => {}
9223 _ => unreachable!(),
9224 }
9225 } else {
9226 t_ops.ops.push(s_op);
9227 }
9228 }
9229 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9230 } else {
9231 target.push(s_ops);
9232 }
9233 }
9234 target
9235}
9236
9237#[gpui::test]
9238async fn test_repository_pending_ops_staging(
9239 executor: gpui::BackgroundExecutor,
9240 cx: &mut gpui::TestAppContext,
9241) {
9242 init_test(cx);
9243
9244 let fs = FakeFs::new(executor);
9245 fs.insert_tree(
9246 path!("/root"),
9247 json!({
9248 "my-repo": {
9249 ".git": {},
9250 "a.txt": "a",
9251 }
9252
9253 }),
9254 )
9255 .await;
9256
9257 fs.set_status_for_repo(
9258 path!("/root/my-repo/.git").as_ref(),
9259 &[("a.txt", FileStatus::Untracked)],
9260 );
9261
9262 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9263 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9264 project.update(cx, |project, cx| {
9265 let pending_ops_all = pending_ops_all.clone();
9266 cx.subscribe(project.git_store(), move |_, _, e, _| {
9267 if let GitStoreEvent::RepositoryUpdated(
9268 _,
9269 RepositoryEvent::PendingOpsChanged { pending_ops },
9270 _,
9271 ) = e
9272 {
9273 let merged = merge_pending_ops_snapshots(
9274 pending_ops.items(()),
9275 pending_ops_all.lock().items(()),
9276 );
9277 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9278 }
9279 })
9280 .detach();
9281 });
9282 project
9283 .update(cx, |project, cx| project.git_scans_complete(cx))
9284 .await;
9285
9286 let repo = project.read_with(cx, |project, cx| {
9287 project.repositories(cx).values().next().unwrap().clone()
9288 });
9289
9290 // Ensure we have no pending ops for any of the untracked files
9291 repo.read_with(cx, |repo, _cx| {
9292 assert!(repo.pending_ops().next().is_none());
9293 });
9294
9295 let mut id = 1u16;
9296
9297 let mut assert_stage = async |path: RepoPath, stage| {
9298 let git_status = if stage {
9299 pending_op::GitStatus::Staged
9300 } else {
9301 pending_op::GitStatus::Unstaged
9302 };
9303 repo.update(cx, |repo, cx| {
9304 let task = if stage {
9305 repo.stage_entries(vec![path.clone()], cx)
9306 } else {
9307 repo.unstage_entries(vec![path.clone()], cx)
9308 };
9309 let ops = repo.pending_ops_for_path(&path).unwrap();
9310 assert_eq!(
9311 ops.ops.last(),
9312 Some(&pending_op::PendingOp {
9313 id: id.into(),
9314 git_status,
9315 job_status: pending_op::JobStatus::Running
9316 })
9317 );
9318 task
9319 })
9320 .await
9321 .unwrap();
9322
9323 repo.read_with(cx, |repo, _cx| {
9324 let ops = repo.pending_ops_for_path(&path).unwrap();
9325 assert_eq!(
9326 ops.ops.last(),
9327 Some(&pending_op::PendingOp {
9328 id: id.into(),
9329 git_status,
9330 job_status: pending_op::JobStatus::Finished
9331 })
9332 );
9333 });
9334
9335 id += 1;
9336 };
9337
9338 assert_stage(repo_path("a.txt"), true).await;
9339 assert_stage(repo_path("a.txt"), false).await;
9340 assert_stage(repo_path("a.txt"), true).await;
9341 assert_stage(repo_path("a.txt"), false).await;
9342 assert_stage(repo_path("a.txt"), true).await;
9343
9344 cx.run_until_parked();
9345
9346 assert_eq!(
9347 pending_ops_all
9348 .lock()
9349 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9350 .unwrap()
9351 .ops,
9352 vec![
9353 pending_op::PendingOp {
9354 id: 1u16.into(),
9355 git_status: pending_op::GitStatus::Staged,
9356 job_status: pending_op::JobStatus::Finished
9357 },
9358 pending_op::PendingOp {
9359 id: 2u16.into(),
9360 git_status: pending_op::GitStatus::Unstaged,
9361 job_status: pending_op::JobStatus::Finished
9362 },
9363 pending_op::PendingOp {
9364 id: 3u16.into(),
9365 git_status: pending_op::GitStatus::Staged,
9366 job_status: pending_op::JobStatus::Finished
9367 },
9368 pending_op::PendingOp {
9369 id: 4u16.into(),
9370 git_status: pending_op::GitStatus::Unstaged,
9371 job_status: pending_op::JobStatus::Finished
9372 },
9373 pending_op::PendingOp {
9374 id: 5u16.into(),
9375 git_status: pending_op::GitStatus::Staged,
9376 job_status: pending_op::JobStatus::Finished
9377 }
9378 ],
9379 );
9380
9381 repo.update(cx, |repo, _cx| {
9382 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9383
9384 assert_eq!(
9385 git_statuses,
9386 [StatusEntry {
9387 repo_path: repo_path("a.txt"),
9388 status: TrackedStatus {
9389 index_status: StatusCode::Added,
9390 worktree_status: StatusCode::Unmodified
9391 }
9392 .into(),
9393 }]
9394 );
9395 });
9396}
9397
9398#[gpui::test]
9399async fn test_repository_pending_ops_long_running_staging(
9400 executor: gpui::BackgroundExecutor,
9401 cx: &mut gpui::TestAppContext,
9402) {
9403 init_test(cx);
9404
9405 let fs = FakeFs::new(executor);
9406 fs.insert_tree(
9407 path!("/root"),
9408 json!({
9409 "my-repo": {
9410 ".git": {},
9411 "a.txt": "a",
9412 }
9413
9414 }),
9415 )
9416 .await;
9417
9418 fs.set_status_for_repo(
9419 path!("/root/my-repo/.git").as_ref(),
9420 &[("a.txt", FileStatus::Untracked)],
9421 );
9422
9423 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9424 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9425 project.update(cx, |project, cx| {
9426 let pending_ops_all = pending_ops_all.clone();
9427 cx.subscribe(project.git_store(), move |_, _, e, _| {
9428 if let GitStoreEvent::RepositoryUpdated(
9429 _,
9430 RepositoryEvent::PendingOpsChanged { pending_ops },
9431 _,
9432 ) = e
9433 {
9434 let merged = merge_pending_ops_snapshots(
9435 pending_ops.items(()),
9436 pending_ops_all.lock().items(()),
9437 );
9438 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9439 }
9440 })
9441 .detach();
9442 });
9443
9444 project
9445 .update(cx, |project, cx| project.git_scans_complete(cx))
9446 .await;
9447
9448 let repo = project.read_with(cx, |project, cx| {
9449 project.repositories(cx).values().next().unwrap().clone()
9450 });
9451
9452 repo.update(cx, |repo, cx| {
9453 repo.stage_entries(vec![repo_path("a.txt")], cx)
9454 })
9455 .detach();
9456
9457 repo.update(cx, |repo, cx| {
9458 repo.stage_entries(vec![repo_path("a.txt")], cx)
9459 })
9460 .unwrap()
9461 .with_timeout(Duration::from_secs(1), &cx.executor())
9462 .await
9463 .unwrap();
9464
9465 cx.run_until_parked();
9466
9467 assert_eq!(
9468 pending_ops_all
9469 .lock()
9470 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9471 .unwrap()
9472 .ops,
9473 vec![
9474 pending_op::PendingOp {
9475 id: 1u16.into(),
9476 git_status: pending_op::GitStatus::Staged,
9477 job_status: pending_op::JobStatus::Skipped
9478 },
9479 pending_op::PendingOp {
9480 id: 2u16.into(),
9481 git_status: pending_op::GitStatus::Staged,
9482 job_status: pending_op::JobStatus::Finished
9483 }
9484 ],
9485 );
9486
9487 repo.update(cx, |repo, _cx| {
9488 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9489
9490 assert_eq!(
9491 git_statuses,
9492 [StatusEntry {
9493 repo_path: repo_path("a.txt"),
9494 status: TrackedStatus {
9495 index_status: StatusCode::Added,
9496 worktree_status: StatusCode::Unmodified
9497 }
9498 .into(),
9499 }]
9500 );
9501 });
9502}
9503
9504#[gpui::test]
9505async fn test_repository_pending_ops_stage_all(
9506 executor: gpui::BackgroundExecutor,
9507 cx: &mut gpui::TestAppContext,
9508) {
9509 init_test(cx);
9510
9511 let fs = FakeFs::new(executor);
9512 fs.insert_tree(
9513 path!("/root"),
9514 json!({
9515 "my-repo": {
9516 ".git": {},
9517 "a.txt": "a",
9518 "b.txt": "b"
9519 }
9520
9521 }),
9522 )
9523 .await;
9524
9525 fs.set_status_for_repo(
9526 path!("/root/my-repo/.git").as_ref(),
9527 &[
9528 ("a.txt", FileStatus::Untracked),
9529 ("b.txt", FileStatus::Untracked),
9530 ],
9531 );
9532
9533 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9534 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9535 project.update(cx, |project, cx| {
9536 let pending_ops_all = pending_ops_all.clone();
9537 cx.subscribe(project.git_store(), move |_, _, e, _| {
9538 if let GitStoreEvent::RepositoryUpdated(
9539 _,
9540 RepositoryEvent::PendingOpsChanged { pending_ops },
9541 _,
9542 ) = e
9543 {
9544 let merged = merge_pending_ops_snapshots(
9545 pending_ops.items(()),
9546 pending_ops_all.lock().items(()),
9547 );
9548 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9549 }
9550 })
9551 .detach();
9552 });
9553 project
9554 .update(cx, |project, cx| project.git_scans_complete(cx))
9555 .await;
9556
9557 let repo = project.read_with(cx, |project, cx| {
9558 project.repositories(cx).values().next().unwrap().clone()
9559 });
9560
9561 repo.update(cx, |repo, cx| {
9562 repo.stage_entries(vec![repo_path("a.txt")], cx)
9563 })
9564 .await
9565 .unwrap();
9566 repo.update(cx, |repo, cx| repo.stage_all(cx))
9567 .await
9568 .unwrap();
9569 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9570 .await
9571 .unwrap();
9572
9573 cx.run_until_parked();
9574
9575 assert_eq!(
9576 pending_ops_all
9577 .lock()
9578 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9579 .unwrap()
9580 .ops,
9581 vec![
9582 pending_op::PendingOp {
9583 id: 1u16.into(),
9584 git_status: pending_op::GitStatus::Staged,
9585 job_status: pending_op::JobStatus::Finished
9586 },
9587 pending_op::PendingOp {
9588 id: 2u16.into(),
9589 git_status: pending_op::GitStatus::Unstaged,
9590 job_status: pending_op::JobStatus::Finished
9591 },
9592 ],
9593 );
9594 assert_eq!(
9595 pending_ops_all
9596 .lock()
9597 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9598 .unwrap()
9599 .ops,
9600 vec![
9601 pending_op::PendingOp {
9602 id: 1u16.into(),
9603 git_status: pending_op::GitStatus::Staged,
9604 job_status: pending_op::JobStatus::Finished
9605 },
9606 pending_op::PendingOp {
9607 id: 2u16.into(),
9608 git_status: pending_op::GitStatus::Unstaged,
9609 job_status: pending_op::JobStatus::Finished
9610 },
9611 ],
9612 );
9613
9614 repo.update(cx, |repo, _cx| {
9615 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9616
9617 assert_eq!(
9618 git_statuses,
9619 [
9620 StatusEntry {
9621 repo_path: repo_path("a.txt"),
9622 status: FileStatus::Untracked,
9623 },
9624 StatusEntry {
9625 repo_path: repo_path("b.txt"),
9626 status: FileStatus::Untracked,
9627 },
9628 ]
9629 );
9630 });
9631}
9632
9633#[gpui::test]
9634async fn test_repository_subfolder_git_status(
9635 executor: gpui::BackgroundExecutor,
9636 cx: &mut gpui::TestAppContext,
9637) {
9638 init_test(cx);
9639
9640 let fs = FakeFs::new(executor);
9641 fs.insert_tree(
9642 path!("/root"),
9643 json!({
9644 "my-repo": {
9645 ".git": {},
9646 "a.txt": "a",
9647 "sub-folder-1": {
9648 "sub-folder-2": {
9649 "c.txt": "cc",
9650 "d": {
9651 "e.txt": "eee"
9652 }
9653 },
9654 }
9655 },
9656 }),
9657 )
9658 .await;
9659
9660 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9661 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9662
9663 fs.set_status_for_repo(
9664 path!("/root/my-repo/.git").as_ref(),
9665 &[(E_TXT, FileStatus::Untracked)],
9666 );
9667
9668 let project = Project::test(
9669 fs.clone(),
9670 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9671 cx,
9672 )
9673 .await;
9674
9675 project
9676 .update(cx, |project, cx| project.git_scans_complete(cx))
9677 .await;
9678 cx.run_until_parked();
9679
9680 let repository = project.read_with(cx, |project, cx| {
9681 project.repositories(cx).values().next().unwrap().clone()
9682 });
9683
9684 // Ensure that the git status is loaded correctly
9685 repository.read_with(cx, |repository, _cx| {
9686 assert_eq!(
9687 repository.work_directory_abs_path,
9688 Path::new(path!("/root/my-repo")).into()
9689 );
9690
9691 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9692 assert_eq!(
9693 repository
9694 .status_for_path(&repo_path(E_TXT))
9695 .unwrap()
9696 .status,
9697 FileStatus::Untracked
9698 );
9699 });
9700
9701 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9702 project
9703 .update(cx, |project, cx| project.git_scans_complete(cx))
9704 .await;
9705 cx.run_until_parked();
9706
9707 repository.read_with(cx, |repository, _cx| {
9708 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9709 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9710 });
9711}
9712
9713// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9714#[cfg(any())]
9715#[gpui::test]
9716async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9717 init_test(cx);
9718 cx.executor().allow_parking();
9719
9720 let root = TempTree::new(json!({
9721 "project": {
9722 "a.txt": "a",
9723 },
9724 }));
9725 let root_path = root.path();
9726
9727 let repo = git_init(&root_path.join("project"));
9728 git_add("a.txt", &repo);
9729 git_commit("init", &repo);
9730
9731 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9732
9733 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9734 tree.flush_fs_events(cx).await;
9735 project
9736 .update(cx, |project, cx| project.git_scans_complete(cx))
9737 .await;
9738 cx.executor().run_until_parked();
9739
9740 let repository = project.read_with(cx, |project, cx| {
9741 project.repositories(cx).values().next().unwrap().clone()
9742 });
9743
9744 git_branch("other-branch", &repo);
9745 git_checkout("refs/heads/other-branch", &repo);
9746 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9747 git_add("a.txt", &repo);
9748 git_commit("capitalize", &repo);
9749 let commit = repo
9750 .head()
9751 .expect("Failed to get HEAD")
9752 .peel_to_commit()
9753 .expect("HEAD is not a commit");
9754 git_checkout("refs/heads/main", &repo);
9755 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9756 git_add("a.txt", &repo);
9757 git_commit("improve letter", &repo);
9758 git_cherry_pick(&commit, &repo);
9759 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9760 .expect("No CHERRY_PICK_HEAD");
9761 pretty_assertions::assert_eq!(
9762 git_status(&repo),
9763 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9764 );
9765 tree.flush_fs_events(cx).await;
9766 project
9767 .update(cx, |project, cx| project.git_scans_complete(cx))
9768 .await;
9769 cx.executor().run_until_parked();
9770 let conflicts = repository.update(cx, |repository, _| {
9771 repository
9772 .merge_conflicts
9773 .iter()
9774 .cloned()
9775 .collect::<Vec<_>>()
9776 });
9777 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9778
9779 git_add("a.txt", &repo);
9780 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9781 git_commit("whatevs", &repo);
9782 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9783 .expect("Failed to remove CHERRY_PICK_HEAD");
9784 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9785 tree.flush_fs_events(cx).await;
9786 let conflicts = repository.update(cx, |repository, _| {
9787 repository
9788 .merge_conflicts
9789 .iter()
9790 .cloned()
9791 .collect::<Vec<_>>()
9792 });
9793 pretty_assertions::assert_eq!(conflicts, []);
9794}
9795
9796#[gpui::test]
9797async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9798 init_test(cx);
9799 let fs = FakeFs::new(cx.background_executor.clone());
9800 fs.insert_tree(
9801 path!("/root"),
9802 json!({
9803 ".git": {},
9804 ".gitignore": "*.txt\n",
9805 "a.xml": "<a></a>",
9806 "b.txt": "Some text"
9807 }),
9808 )
9809 .await;
9810
9811 fs.set_head_and_index_for_repo(
9812 path!("/root/.git").as_ref(),
9813 &[
9814 (".gitignore", "*.txt\n".into()),
9815 ("a.xml", "<a></a>".into()),
9816 ],
9817 );
9818
9819 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9820
9821 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9822 tree.flush_fs_events(cx).await;
9823 project
9824 .update(cx, |project, cx| project.git_scans_complete(cx))
9825 .await;
9826 cx.executor().run_until_parked();
9827
9828 let repository = project.read_with(cx, |project, cx| {
9829 project.repositories(cx).values().next().unwrap().clone()
9830 });
9831
9832 // One file is unmodified, the other is ignored.
9833 cx.read(|cx| {
9834 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9835 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9836 });
9837
9838 // Change the gitignore, and stage the newly non-ignored file.
9839 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9840 .await
9841 .unwrap();
9842 fs.set_index_for_repo(
9843 Path::new(path!("/root/.git")),
9844 &[
9845 (".gitignore", "*.txt\n".into()),
9846 ("a.xml", "<a></a>".into()),
9847 ("b.txt", "Some text".into()),
9848 ],
9849 );
9850
9851 cx.executor().run_until_parked();
9852 cx.read(|cx| {
9853 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9854 assert_entry_git_state(
9855 tree.read(cx),
9856 repository.read(cx),
9857 "b.txt",
9858 Some(StatusCode::Added),
9859 false,
9860 );
9861 });
9862}
9863
9864// NOTE:
9865// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9866// a directory which some program has already open.
9867// This is a limitation of the Windows.
9868// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9869// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9870#[gpui::test]
9871#[cfg_attr(target_os = "windows", ignore)]
9872async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9873 init_test(cx);
9874 cx.executor().allow_parking();
9875 let root = TempTree::new(json!({
9876 "projects": {
9877 "project1": {
9878 "a": "",
9879 "b": "",
9880 }
9881 },
9882
9883 }));
9884 let root_path = root.path();
9885
9886 let repo = git_init(&root_path.join("projects/project1"));
9887 git_add("a", &repo);
9888 git_commit("init", &repo);
9889 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9890
9891 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9892
9893 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9894 tree.flush_fs_events(cx).await;
9895 project
9896 .update(cx, |project, cx| project.git_scans_complete(cx))
9897 .await;
9898 cx.executor().run_until_parked();
9899
9900 let repository = project.read_with(cx, |project, cx| {
9901 project.repositories(cx).values().next().unwrap().clone()
9902 });
9903
9904 repository.read_with(cx, |repository, _| {
9905 assert_eq!(
9906 repository.work_directory_abs_path.as_ref(),
9907 root_path.join("projects/project1").as_path()
9908 );
9909 assert_eq!(
9910 repository
9911 .status_for_path(&repo_path("a"))
9912 .map(|entry| entry.status),
9913 Some(StatusCode::Modified.worktree()),
9914 );
9915 assert_eq!(
9916 repository
9917 .status_for_path(&repo_path("b"))
9918 .map(|entry| entry.status),
9919 Some(FileStatus::Untracked),
9920 );
9921 });
9922
9923 std::fs::rename(
9924 root_path.join("projects/project1"),
9925 root_path.join("projects/project2"),
9926 )
9927 .unwrap();
9928 tree.flush_fs_events(cx).await;
9929
9930 repository.read_with(cx, |repository, _| {
9931 assert_eq!(
9932 repository.work_directory_abs_path.as_ref(),
9933 root_path.join("projects/project2").as_path()
9934 );
9935 assert_eq!(
9936 repository.status_for_path(&repo_path("a")).unwrap().status,
9937 StatusCode::Modified.worktree(),
9938 );
9939 assert_eq!(
9940 repository.status_for_path(&repo_path("b")).unwrap().status,
9941 FileStatus::Untracked,
9942 );
9943 });
9944}
9945
9946// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9947// you can't rename a directory which some program has already open. This is a
9948// limitation of the Windows. See:
9949// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9950// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9951#[gpui::test]
9952#[cfg_attr(target_os = "windows", ignore)]
9953async fn test_file_status(cx: &mut gpui::TestAppContext) {
9954 init_test(cx);
9955 cx.executor().allow_parking();
9956 const IGNORE_RULE: &str = "**/target";
9957
9958 let root = TempTree::new(json!({
9959 "project": {
9960 "a.txt": "a",
9961 "b.txt": "bb",
9962 "c": {
9963 "d": {
9964 "e.txt": "eee"
9965 }
9966 },
9967 "f.txt": "ffff",
9968 "target": {
9969 "build_file": "???"
9970 },
9971 ".gitignore": IGNORE_RULE
9972 },
9973
9974 }));
9975 let root_path = root.path();
9976
9977 const A_TXT: &str = "a.txt";
9978 const B_TXT: &str = "b.txt";
9979 const E_TXT: &str = "c/d/e.txt";
9980 const F_TXT: &str = "f.txt";
9981 const DOTGITIGNORE: &str = ".gitignore";
9982 const BUILD_FILE: &str = "target/build_file";
9983
9984 // Set up git repository before creating the worktree.
9985 let work_dir = root.path().join("project");
9986 let mut repo = git_init(work_dir.as_path());
9987 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9988 git_add(A_TXT, &repo);
9989 git_add(E_TXT, &repo);
9990 git_add(DOTGITIGNORE, &repo);
9991 git_commit("Initial commit", &repo);
9992
9993 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9994
9995 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9996 tree.flush_fs_events(cx).await;
9997 project
9998 .update(cx, |project, cx| project.git_scans_complete(cx))
9999 .await;
10000 cx.executor().run_until_parked();
10001
10002 let repository = project.read_with(cx, |project, cx| {
10003 project.repositories(cx).values().next().unwrap().clone()
10004 });
10005
10006 // Check that the right git state is observed on startup
10007 repository.read_with(cx, |repository, _cx| {
10008 assert_eq!(
10009 repository.work_directory_abs_path.as_ref(),
10010 root_path.join("project").as_path()
10011 );
10012
10013 assert_eq!(
10014 repository
10015 .status_for_path(&repo_path(B_TXT))
10016 .unwrap()
10017 .status,
10018 FileStatus::Untracked,
10019 );
10020 assert_eq!(
10021 repository
10022 .status_for_path(&repo_path(F_TXT))
10023 .unwrap()
10024 .status,
10025 FileStatus::Untracked,
10026 );
10027 });
10028
10029 // Modify a file in the working copy.
10030 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10031 tree.flush_fs_events(cx).await;
10032 project
10033 .update(cx, |project, cx| project.git_scans_complete(cx))
10034 .await;
10035 cx.executor().run_until_parked();
10036
10037 // The worktree detects that the file's git status has changed.
10038 repository.read_with(cx, |repository, _| {
10039 assert_eq!(
10040 repository
10041 .status_for_path(&repo_path(A_TXT))
10042 .unwrap()
10043 .status,
10044 StatusCode::Modified.worktree(),
10045 );
10046 });
10047
10048 // Create a commit in the git repository.
10049 git_add(A_TXT, &repo);
10050 git_add(B_TXT, &repo);
10051 git_commit("Committing modified and added", &repo);
10052 tree.flush_fs_events(cx).await;
10053 project
10054 .update(cx, |project, cx| project.git_scans_complete(cx))
10055 .await;
10056 cx.executor().run_until_parked();
10057
10058 // The worktree detects that the files' git status have changed.
10059 repository.read_with(cx, |repository, _cx| {
10060 assert_eq!(
10061 repository
10062 .status_for_path(&repo_path(F_TXT))
10063 .unwrap()
10064 .status,
10065 FileStatus::Untracked,
10066 );
10067 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10068 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10069 });
10070
10071 // Modify files in the working copy and perform git operations on other files.
10072 git_reset(0, &repo);
10073 git_remove_index(Path::new(B_TXT), &repo);
10074 git_stash(&mut repo);
10075 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10076 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10077 tree.flush_fs_events(cx).await;
10078 project
10079 .update(cx, |project, cx| project.git_scans_complete(cx))
10080 .await;
10081 cx.executor().run_until_parked();
10082
10083 // Check that more complex repo changes are tracked
10084 repository.read_with(cx, |repository, _cx| {
10085 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10086 assert_eq!(
10087 repository
10088 .status_for_path(&repo_path(B_TXT))
10089 .unwrap()
10090 .status,
10091 FileStatus::Untracked,
10092 );
10093 assert_eq!(
10094 repository
10095 .status_for_path(&repo_path(E_TXT))
10096 .unwrap()
10097 .status,
10098 StatusCode::Modified.worktree(),
10099 );
10100 });
10101
10102 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10103 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10104 std::fs::write(
10105 work_dir.join(DOTGITIGNORE),
10106 [IGNORE_RULE, "f.txt"].join("\n"),
10107 )
10108 .unwrap();
10109
10110 git_add(Path::new(DOTGITIGNORE), &repo);
10111 git_commit("Committing modified git ignore", &repo);
10112
10113 tree.flush_fs_events(cx).await;
10114 cx.executor().run_until_parked();
10115
10116 let mut renamed_dir_name = "first_directory/second_directory";
10117 const RENAMED_FILE: &str = "rf.txt";
10118
10119 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10120 std::fs::write(
10121 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10122 "new-contents",
10123 )
10124 .unwrap();
10125
10126 tree.flush_fs_events(cx).await;
10127 project
10128 .update(cx, |project, cx| project.git_scans_complete(cx))
10129 .await;
10130 cx.executor().run_until_parked();
10131
10132 repository.read_with(cx, |repository, _cx| {
10133 assert_eq!(
10134 repository
10135 .status_for_path(&RepoPath::from_rel_path(
10136 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10137 ))
10138 .unwrap()
10139 .status,
10140 FileStatus::Untracked,
10141 );
10142 });
10143
10144 renamed_dir_name = "new_first_directory/second_directory";
10145
10146 std::fs::rename(
10147 work_dir.join("first_directory"),
10148 work_dir.join("new_first_directory"),
10149 )
10150 .unwrap();
10151
10152 tree.flush_fs_events(cx).await;
10153 project
10154 .update(cx, |project, cx| project.git_scans_complete(cx))
10155 .await;
10156 cx.executor().run_until_parked();
10157
10158 repository.read_with(cx, |repository, _cx| {
10159 assert_eq!(
10160 repository
10161 .status_for_path(&RepoPath::from_rel_path(
10162 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10163 ))
10164 .unwrap()
10165 .status,
10166 FileStatus::Untracked,
10167 );
10168 });
10169}
10170
10171#[gpui::test]
10172#[ignore]
10173async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10174 init_test(cx);
10175 cx.executor().allow_parking();
10176
10177 const IGNORE_RULE: &str = "**/target";
10178
10179 let root = TempTree::new(json!({
10180 "project": {
10181 "src": {
10182 "main.rs": "fn main() {}"
10183 },
10184 "target": {
10185 "debug": {
10186 "important_text.txt": "important text",
10187 },
10188 },
10189 ".gitignore": IGNORE_RULE
10190 },
10191
10192 }));
10193 let root_path = root.path();
10194
10195 // Set up git repository before creating the worktree.
10196 let work_dir = root.path().join("project");
10197 let repo = git_init(work_dir.as_path());
10198 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10199 git_add("src/main.rs", &repo);
10200 git_add(".gitignore", &repo);
10201 git_commit("Initial commit", &repo);
10202
10203 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10204 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10205 let project_events = Arc::new(Mutex::new(Vec::new()));
10206 project.update(cx, |project, cx| {
10207 let repo_events = repository_updates.clone();
10208 cx.subscribe(project.git_store(), move |_, _, e, _| {
10209 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10210 repo_events.lock().push(e.clone());
10211 }
10212 })
10213 .detach();
10214 let project_events = project_events.clone();
10215 cx.subscribe_self(move |_, e, _| {
10216 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10217 project_events.lock().extend(
10218 updates
10219 .iter()
10220 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10221 .filter(|(path, _)| path != "fs-event-sentinel"),
10222 );
10223 }
10224 })
10225 .detach();
10226 });
10227
10228 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10229 tree.flush_fs_events(cx).await;
10230 tree.update(cx, |tree, cx| {
10231 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10232 })
10233 .await
10234 .unwrap();
10235 tree.update(cx, |tree, _| {
10236 assert_eq!(
10237 tree.entries(true, 0)
10238 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10239 .collect::<Vec<_>>(),
10240 vec![
10241 (rel_path(""), false),
10242 (rel_path("project/"), false),
10243 (rel_path("project/.gitignore"), false),
10244 (rel_path("project/src"), false),
10245 (rel_path("project/src/main.rs"), false),
10246 (rel_path("project/target"), true),
10247 (rel_path("project/target/debug"), true),
10248 (rel_path("project/target/debug/important_text.txt"), true),
10249 ]
10250 );
10251 });
10252
10253 assert_eq!(
10254 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10255 vec![
10256 RepositoryEvent::StatusesChanged,
10257 RepositoryEvent::MergeHeadsChanged,
10258 ],
10259 "Initial worktree scan should produce a repo update event"
10260 );
10261 assert_eq!(
10262 project_events.lock().drain(..).collect::<Vec<_>>(),
10263 vec![
10264 ("project/target".to_string(), PathChange::Loaded),
10265 ("project/target/debug".to_string(), PathChange::Loaded),
10266 (
10267 "project/target/debug/important_text.txt".to_string(),
10268 PathChange::Loaded
10269 ),
10270 ],
10271 "Initial project changes should show that all not-ignored and all opened files are loaded"
10272 );
10273
10274 let deps_dir = work_dir.join("target").join("debug").join("deps");
10275 std::fs::create_dir_all(&deps_dir).unwrap();
10276 tree.flush_fs_events(cx).await;
10277 project
10278 .update(cx, |project, cx| project.git_scans_complete(cx))
10279 .await;
10280 cx.executor().run_until_parked();
10281 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10282 tree.flush_fs_events(cx).await;
10283 project
10284 .update(cx, |project, cx| project.git_scans_complete(cx))
10285 .await;
10286 cx.executor().run_until_parked();
10287 std::fs::remove_dir_all(&deps_dir).unwrap();
10288 tree.flush_fs_events(cx).await;
10289 project
10290 .update(cx, |project, cx| project.git_scans_complete(cx))
10291 .await;
10292 cx.executor().run_until_parked();
10293
10294 tree.update(cx, |tree, _| {
10295 assert_eq!(
10296 tree.entries(true, 0)
10297 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10298 .collect::<Vec<_>>(),
10299 vec![
10300 (rel_path(""), false),
10301 (rel_path("project/"), false),
10302 (rel_path("project/.gitignore"), false),
10303 (rel_path("project/src"), false),
10304 (rel_path("project/src/main.rs"), false),
10305 (rel_path("project/target"), true),
10306 (rel_path("project/target/debug"), true),
10307 (rel_path("project/target/debug/important_text.txt"), true),
10308 ],
10309 "No stray temp files should be left after the flycheck changes"
10310 );
10311 });
10312
10313 assert_eq!(
10314 repository_updates
10315 .lock()
10316 .iter()
10317 .cloned()
10318 .collect::<Vec<_>>(),
10319 Vec::new(),
10320 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10321 );
10322 assert_eq!(
10323 project_events.lock().as_slice(),
10324 vec![
10325 ("project/target/debug/deps".to_string(), PathChange::Added),
10326 ("project/target/debug/deps".to_string(), PathChange::Removed),
10327 ],
10328 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10329 No updates for more nested directories should happen as those are ignored",
10330 );
10331}
10332
10333// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10334// to different timings/ordering of events.
10335#[ignore]
10336#[gpui::test]
10337async fn test_odd_events_for_ignored_dirs(
10338 executor: BackgroundExecutor,
10339 cx: &mut gpui::TestAppContext,
10340) {
10341 init_test(cx);
10342 let fs = FakeFs::new(executor);
10343 fs.insert_tree(
10344 path!("/root"),
10345 json!({
10346 ".git": {},
10347 ".gitignore": "**/target/",
10348 "src": {
10349 "main.rs": "fn main() {}",
10350 },
10351 "target": {
10352 "debug": {
10353 "foo.txt": "foo",
10354 "deps": {}
10355 }
10356 }
10357 }),
10358 )
10359 .await;
10360 fs.set_head_and_index_for_repo(
10361 path!("/root/.git").as_ref(),
10362 &[
10363 (".gitignore", "**/target/".into()),
10364 ("src/main.rs", "fn main() {}".into()),
10365 ],
10366 );
10367
10368 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10369 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10370 let project_events = Arc::new(Mutex::new(Vec::new()));
10371 project.update(cx, |project, cx| {
10372 let repository_updates = repository_updates.clone();
10373 cx.subscribe(project.git_store(), move |_, _, e, _| {
10374 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10375 repository_updates.lock().push(e.clone());
10376 }
10377 })
10378 .detach();
10379 let project_events = project_events.clone();
10380 cx.subscribe_self(move |_, e, _| {
10381 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10382 project_events.lock().extend(
10383 updates
10384 .iter()
10385 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10386 .filter(|(path, _)| path != "fs-event-sentinel"),
10387 );
10388 }
10389 })
10390 .detach();
10391 });
10392
10393 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10394 tree.update(cx, |tree, cx| {
10395 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10396 })
10397 .await
10398 .unwrap();
10399 tree.flush_fs_events(cx).await;
10400 project
10401 .update(cx, |project, cx| project.git_scans_complete(cx))
10402 .await;
10403 cx.run_until_parked();
10404 tree.update(cx, |tree, _| {
10405 assert_eq!(
10406 tree.entries(true, 0)
10407 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10408 .collect::<Vec<_>>(),
10409 vec![
10410 (rel_path(""), false),
10411 (rel_path(".gitignore"), false),
10412 (rel_path("src"), false),
10413 (rel_path("src/main.rs"), false),
10414 (rel_path("target"), true),
10415 (rel_path("target/debug"), true),
10416 (rel_path("target/debug/deps"), true),
10417 (rel_path("target/debug/foo.txt"), true),
10418 ]
10419 );
10420 });
10421
10422 assert_eq!(
10423 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10424 vec![
10425 RepositoryEvent::MergeHeadsChanged,
10426 RepositoryEvent::BranchChanged,
10427 RepositoryEvent::StatusesChanged,
10428 RepositoryEvent::StatusesChanged,
10429 ],
10430 "Initial worktree scan should produce a repo update event"
10431 );
10432 assert_eq!(
10433 project_events.lock().drain(..).collect::<Vec<_>>(),
10434 vec![
10435 ("target".to_string(), PathChange::Loaded),
10436 ("target/debug".to_string(), PathChange::Loaded),
10437 ("target/debug/deps".to_string(), PathChange::Loaded),
10438 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10439 ],
10440 "All non-ignored entries and all opened firs should be getting a project event",
10441 );
10442
10443 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10444 // This may happen multiple times during a single flycheck, but once is enough for testing.
10445 fs.emit_fs_event("/root/target/debug/deps", None);
10446 tree.flush_fs_events(cx).await;
10447 project
10448 .update(cx, |project, cx| project.git_scans_complete(cx))
10449 .await;
10450 cx.executor().run_until_parked();
10451
10452 assert_eq!(
10453 repository_updates
10454 .lock()
10455 .iter()
10456 .cloned()
10457 .collect::<Vec<_>>(),
10458 Vec::new(),
10459 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10460 );
10461 assert_eq!(
10462 project_events.lock().as_slice(),
10463 Vec::new(),
10464 "No further project events should happen, as only ignored dirs received FS events",
10465 );
10466}
10467
10468#[gpui::test]
10469async fn test_repos_in_invisible_worktrees(
10470 executor: BackgroundExecutor,
10471 cx: &mut gpui::TestAppContext,
10472) {
10473 init_test(cx);
10474 let fs = FakeFs::new(executor);
10475 fs.insert_tree(
10476 path!("/root"),
10477 json!({
10478 "dir1": {
10479 ".git": {},
10480 "dep1": {
10481 ".git": {},
10482 "src": {
10483 "a.txt": "",
10484 },
10485 },
10486 "b.txt": "",
10487 },
10488 }),
10489 )
10490 .await;
10491
10492 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10493 let _visible_worktree =
10494 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10495 project
10496 .update(cx, |project, cx| project.git_scans_complete(cx))
10497 .await;
10498
10499 let repos = project.read_with(cx, |project, cx| {
10500 project
10501 .repositories(cx)
10502 .values()
10503 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10504 .collect::<Vec<_>>()
10505 });
10506 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10507
10508 let (_invisible_worktree, _) = project
10509 .update(cx, |project, cx| {
10510 project.worktree_store.update(cx, |worktree_store, cx| {
10511 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10512 })
10513 })
10514 .await
10515 .expect("failed to create worktree");
10516 project
10517 .update(cx, |project, cx| project.git_scans_complete(cx))
10518 .await;
10519
10520 let repos = project.read_with(cx, |project, cx| {
10521 project
10522 .repositories(cx)
10523 .values()
10524 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10525 .collect::<Vec<_>>()
10526 });
10527 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10528}
10529
10530#[gpui::test(iterations = 10)]
10531async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10532 init_test(cx);
10533 cx.update(|cx| {
10534 cx.update_global::<SettingsStore, _>(|store, cx| {
10535 store.update_user_settings(cx, |settings| {
10536 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10537 });
10538 });
10539 });
10540 let fs = FakeFs::new(cx.background_executor.clone());
10541 fs.insert_tree(
10542 path!("/root"),
10543 json!({
10544 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10545 "tree": {
10546 ".git": {},
10547 ".gitignore": "ignored-dir\n",
10548 "tracked-dir": {
10549 "tracked-file1": "",
10550 "ancestor-ignored-file1": "",
10551 },
10552 "ignored-dir": {
10553 "ignored-file1": ""
10554 }
10555 }
10556 }),
10557 )
10558 .await;
10559 fs.set_head_and_index_for_repo(
10560 path!("/root/tree/.git").as_ref(),
10561 &[
10562 (".gitignore", "ignored-dir\n".into()),
10563 ("tracked-dir/tracked-file1", "".into()),
10564 ],
10565 );
10566
10567 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10568
10569 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10570 tree.flush_fs_events(cx).await;
10571 project
10572 .update(cx, |project, cx| project.git_scans_complete(cx))
10573 .await;
10574 cx.executor().run_until_parked();
10575
10576 let repository = project.read_with(cx, |project, cx| {
10577 project.repositories(cx).values().next().unwrap().clone()
10578 });
10579
10580 tree.read_with(cx, |tree, _| {
10581 tree.as_local()
10582 .unwrap()
10583 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10584 })
10585 .recv()
10586 .await;
10587
10588 cx.read(|cx| {
10589 assert_entry_git_state(
10590 tree.read(cx),
10591 repository.read(cx),
10592 "tracked-dir/tracked-file1",
10593 None,
10594 false,
10595 );
10596 assert_entry_git_state(
10597 tree.read(cx),
10598 repository.read(cx),
10599 "tracked-dir/ancestor-ignored-file1",
10600 None,
10601 false,
10602 );
10603 assert_entry_git_state(
10604 tree.read(cx),
10605 repository.read(cx),
10606 "ignored-dir/ignored-file1",
10607 None,
10608 true,
10609 );
10610 });
10611
10612 fs.create_file(
10613 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10614 Default::default(),
10615 )
10616 .await
10617 .unwrap();
10618 fs.set_index_for_repo(
10619 path!("/root/tree/.git").as_ref(),
10620 &[
10621 (".gitignore", "ignored-dir\n".into()),
10622 ("tracked-dir/tracked-file1", "".into()),
10623 ("tracked-dir/tracked-file2", "".into()),
10624 ],
10625 );
10626 fs.create_file(
10627 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10628 Default::default(),
10629 )
10630 .await
10631 .unwrap();
10632 fs.create_file(
10633 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10634 Default::default(),
10635 )
10636 .await
10637 .unwrap();
10638
10639 cx.executor().run_until_parked();
10640 cx.read(|cx| {
10641 assert_entry_git_state(
10642 tree.read(cx),
10643 repository.read(cx),
10644 "tracked-dir/tracked-file2",
10645 Some(StatusCode::Added),
10646 false,
10647 );
10648 assert_entry_git_state(
10649 tree.read(cx),
10650 repository.read(cx),
10651 "tracked-dir/ancestor-ignored-file2",
10652 None,
10653 false,
10654 );
10655 assert_entry_git_state(
10656 tree.read(cx),
10657 repository.read(cx),
10658 "ignored-dir/ignored-file2",
10659 None,
10660 true,
10661 );
10662 assert!(
10663 tree.read(cx)
10664 .entry_for_path(&rel_path(".git"))
10665 .unwrap()
10666 .is_ignored
10667 );
10668 });
10669}
10670
10671#[gpui::test]
10672async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10673 init_test(cx);
10674
10675 let fs = FakeFs::new(cx.executor());
10676 fs.insert_tree(
10677 path!("/project"),
10678 json!({
10679 ".git": {
10680 "worktrees": {
10681 "some-worktree": {
10682 "commondir": "../..\n",
10683 // For is_git_dir
10684 "HEAD": "",
10685 "config": ""
10686 }
10687 },
10688 "modules": {
10689 "subdir": {
10690 "some-submodule": {
10691 // For is_git_dir
10692 "HEAD": "",
10693 "config": "",
10694 }
10695 }
10696 }
10697 },
10698 "src": {
10699 "a.txt": "A",
10700 },
10701 "some-worktree": {
10702 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10703 "src": {
10704 "b.txt": "B",
10705 }
10706 },
10707 "subdir": {
10708 "some-submodule": {
10709 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10710 "c.txt": "C",
10711 }
10712 }
10713 }),
10714 )
10715 .await;
10716
10717 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10718 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10719 scan_complete.await;
10720
10721 let mut repositories = project.update(cx, |project, cx| {
10722 project
10723 .repositories(cx)
10724 .values()
10725 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10726 .collect::<Vec<_>>()
10727 });
10728 repositories.sort();
10729 pretty_assertions::assert_eq!(
10730 repositories,
10731 [
10732 Path::new(path!("/project")).into(),
10733 Path::new(path!("/project/some-worktree")).into(),
10734 Path::new(path!("/project/subdir/some-submodule")).into(),
10735 ]
10736 );
10737
10738 // Generate a git-related event for the worktree and check that it's refreshed.
10739 fs.with_git_state(
10740 path!("/project/some-worktree/.git").as_ref(),
10741 true,
10742 |state| {
10743 state
10744 .head_contents
10745 .insert(repo_path("src/b.txt"), "b".to_owned());
10746 state
10747 .index_contents
10748 .insert(repo_path("src/b.txt"), "b".to_owned());
10749 },
10750 )
10751 .unwrap();
10752 cx.run_until_parked();
10753
10754 let buffer = project
10755 .update(cx, |project, cx| {
10756 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10757 })
10758 .await
10759 .unwrap();
10760 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10761 let (repo, _) = project
10762 .git_store()
10763 .read(cx)
10764 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10765 .unwrap();
10766 pretty_assertions::assert_eq!(
10767 repo.read(cx).work_directory_abs_path,
10768 Path::new(path!("/project/some-worktree")).into(),
10769 );
10770 let barrier = repo.update(cx, |repo, _| repo.barrier());
10771 (repo.clone(), barrier)
10772 });
10773 barrier.await.unwrap();
10774 worktree_repo.update(cx, |repo, _| {
10775 pretty_assertions::assert_eq!(
10776 repo.status_for_path(&repo_path("src/b.txt"))
10777 .unwrap()
10778 .status,
10779 StatusCode::Modified.worktree(),
10780 );
10781 });
10782
10783 // The same for the submodule.
10784 fs.with_git_state(
10785 path!("/project/subdir/some-submodule/.git").as_ref(),
10786 true,
10787 |state| {
10788 state
10789 .head_contents
10790 .insert(repo_path("c.txt"), "c".to_owned());
10791 state
10792 .index_contents
10793 .insert(repo_path("c.txt"), "c".to_owned());
10794 },
10795 )
10796 .unwrap();
10797 cx.run_until_parked();
10798
10799 let buffer = project
10800 .update(cx, |project, cx| {
10801 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10802 })
10803 .await
10804 .unwrap();
10805 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10806 let (repo, _) = project
10807 .git_store()
10808 .read(cx)
10809 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10810 .unwrap();
10811 pretty_assertions::assert_eq!(
10812 repo.read(cx).work_directory_abs_path,
10813 Path::new(path!("/project/subdir/some-submodule")).into(),
10814 );
10815 let barrier = repo.update(cx, |repo, _| repo.barrier());
10816 (repo.clone(), barrier)
10817 });
10818 barrier.await.unwrap();
10819 submodule_repo.update(cx, |repo, _| {
10820 pretty_assertions::assert_eq!(
10821 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10822 StatusCode::Modified.worktree(),
10823 );
10824 });
10825}
10826
10827#[gpui::test]
10828async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10829 init_test(cx);
10830 let fs = FakeFs::new(cx.background_executor.clone());
10831 fs.insert_tree(
10832 path!("/root"),
10833 json!({
10834 "project": {
10835 ".git": {},
10836 "child1": {
10837 "a.txt": "A",
10838 },
10839 "child2": {
10840 "b.txt": "B",
10841 }
10842 }
10843 }),
10844 )
10845 .await;
10846
10847 let project = Project::test(
10848 fs.clone(),
10849 [
10850 path!("/root/project/child1").as_ref(),
10851 path!("/root/project/child2").as_ref(),
10852 ],
10853 cx,
10854 )
10855 .await;
10856
10857 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10858 tree.flush_fs_events(cx).await;
10859 project
10860 .update(cx, |project, cx| project.git_scans_complete(cx))
10861 .await;
10862 cx.executor().run_until_parked();
10863
10864 let repos = project.read_with(cx, |project, cx| {
10865 project
10866 .repositories(cx)
10867 .values()
10868 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10869 .collect::<Vec<_>>()
10870 });
10871 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10872}
10873
10874#[gpui::test]
10875async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10876 init_test(cx);
10877
10878 let file_1_committed = String::from(r#"file_1_committed"#);
10879 let file_1_staged = String::from(r#"file_1_staged"#);
10880 let file_2_committed = String::from(r#"file_2_committed"#);
10881 let file_2_staged = String::from(r#"file_2_staged"#);
10882 let buffer_contents = String::from(r#"buffer"#);
10883
10884 let fs = FakeFs::new(cx.background_executor.clone());
10885 fs.insert_tree(
10886 path!("/dir"),
10887 json!({
10888 ".git": {},
10889 "src": {
10890 "file_1.rs": file_1_committed.clone(),
10891 "file_2.rs": file_2_committed.clone(),
10892 }
10893 }),
10894 )
10895 .await;
10896
10897 fs.set_head_for_repo(
10898 path!("/dir/.git").as_ref(),
10899 &[
10900 ("src/file_1.rs", file_1_committed.clone()),
10901 ("src/file_2.rs", file_2_committed.clone()),
10902 ],
10903 "deadbeef",
10904 );
10905 fs.set_index_for_repo(
10906 path!("/dir/.git").as_ref(),
10907 &[
10908 ("src/file_1.rs", file_1_staged.clone()),
10909 ("src/file_2.rs", file_2_staged.clone()),
10910 ],
10911 );
10912
10913 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10914
10915 let buffer = project
10916 .update(cx, |project, cx| {
10917 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10918 })
10919 .await
10920 .unwrap();
10921
10922 buffer.update(cx, |buffer, cx| {
10923 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10924 });
10925
10926 let unstaged_diff = project
10927 .update(cx, |project, cx| {
10928 project.open_unstaged_diff(buffer.clone(), cx)
10929 })
10930 .await
10931 .unwrap();
10932
10933 cx.run_until_parked();
10934
10935 unstaged_diff.update(cx, |unstaged_diff, cx| {
10936 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10937 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10938 });
10939
10940 // Save the buffer as `file_2.rs`, which should trigger the
10941 // `BufferChangedFilePath` event.
10942 project
10943 .update(cx, |project, cx| {
10944 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10945 let path = ProjectPath {
10946 worktree_id,
10947 path: rel_path("src/file_2.rs").into(),
10948 };
10949 project.save_buffer_as(buffer.clone(), path, cx)
10950 })
10951 .await
10952 .unwrap();
10953
10954 cx.run_until_parked();
10955
10956 // Verify that the diff bases have been updated to file_2's contents due to
10957 // the `BufferChangedFilePath` event being handled.
10958 unstaged_diff.update(cx, |unstaged_diff, cx| {
10959 let snapshot = buffer.read(cx).snapshot();
10960 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10961 assert_eq!(
10962 base_text, file_2_staged,
10963 "Diff bases should be automatically updated to file_2 staged content"
10964 );
10965
10966 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10967 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10968 });
10969
10970 let uncommitted_diff = project
10971 .update(cx, |project, cx| {
10972 project.open_uncommitted_diff(buffer.clone(), cx)
10973 })
10974 .await
10975 .unwrap();
10976
10977 cx.run_until_parked();
10978
10979 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10980 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10981 assert_eq!(
10982 base_text, file_2_committed,
10983 "Uncommitted diff should compare against file_2 committed content"
10984 );
10985 });
10986}
10987
10988async fn search(
10989 project: &Entity<Project>,
10990 query: SearchQuery,
10991 cx: &mut gpui::TestAppContext,
10992) -> Result<HashMap<String, Vec<Range<usize>>>> {
10993 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10994 let mut results = HashMap::default();
10995 while let Ok(search_result) = search_rx.rx.recv().await {
10996 match search_result {
10997 SearchResult::Buffer { buffer, ranges } => {
10998 results.entry(buffer).or_insert(ranges);
10999 }
11000 SearchResult::LimitReached => {}
11001 }
11002 }
11003 Ok(results
11004 .into_iter()
11005 .map(|(buffer, ranges)| {
11006 buffer.update(cx, |buffer, cx| {
11007 let path = buffer
11008 .file()
11009 .unwrap()
11010 .full_path(cx)
11011 .to_string_lossy()
11012 .to_string();
11013 let ranges = ranges
11014 .into_iter()
11015 .map(|range| range.to_offset(buffer))
11016 .collect::<Vec<_>>();
11017 (path, ranges)
11018 })
11019 })
11020 .collect())
11021}
11022
11023pub fn init_test(cx: &mut gpui::TestAppContext) {
11024 zlog::init_test();
11025
11026 cx.update(|cx| {
11027 let settings_store = SettingsStore::test(cx);
11028 cx.set_global(settings_store);
11029 release_channel::init(semver::Version::new(0, 0, 0), cx);
11030 });
11031}
11032
11033fn json_lang() -> Arc<Language> {
11034 Arc::new(Language::new(
11035 LanguageConfig {
11036 name: "JSON".into(),
11037 matcher: LanguageMatcher {
11038 path_suffixes: vec!["json".to_string()],
11039 ..Default::default()
11040 },
11041 ..Default::default()
11042 },
11043 None,
11044 ))
11045}
11046
11047fn js_lang() -> Arc<Language> {
11048 Arc::new(Language::new(
11049 LanguageConfig {
11050 name: "JavaScript".into(),
11051 matcher: LanguageMatcher {
11052 path_suffixes: vec!["js".to_string()],
11053 ..Default::default()
11054 },
11055 ..Default::default()
11056 },
11057 None,
11058 ))
11059}
11060
11061fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11062 struct PythonMootToolchainLister(Arc<FakeFs>);
11063 #[async_trait]
11064 impl ToolchainLister for PythonMootToolchainLister {
11065 async fn list(
11066 &self,
11067 worktree_root: PathBuf,
11068 subroot_relative_path: Arc<RelPath>,
11069 _: Option<HashMap<String, String>>,
11070 _: &dyn Fs,
11071 ) -> ToolchainList {
11072 // This lister will always return a path .venv directories within ancestors
11073 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11074 let mut toolchains = vec![];
11075 for ancestor in ancestors {
11076 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11077 if self.0.is_dir(&venv_path).await {
11078 toolchains.push(Toolchain {
11079 name: SharedString::new("Python Venv"),
11080 path: venv_path.to_string_lossy().into_owned().into(),
11081 language_name: LanguageName(SharedString::new_static("Python")),
11082 as_json: serde_json::Value::Null,
11083 })
11084 }
11085 }
11086 ToolchainList {
11087 toolchains,
11088 ..Default::default()
11089 }
11090 }
11091 async fn resolve(
11092 &self,
11093 _: PathBuf,
11094 _: Option<HashMap<String, String>>,
11095 _: &dyn Fs,
11096 ) -> anyhow::Result<Toolchain> {
11097 Err(anyhow::anyhow!("Not implemented"))
11098 }
11099 fn meta(&self) -> ToolchainMetadata {
11100 ToolchainMetadata {
11101 term: SharedString::new_static("Virtual Environment"),
11102 new_toolchain_placeholder: SharedString::new_static(
11103 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11104 ),
11105 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11106 }
11107 }
11108 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
11109 vec![]
11110 }
11111 }
11112 Arc::new(
11113 Language::new(
11114 LanguageConfig {
11115 name: "Python".into(),
11116 matcher: LanguageMatcher {
11117 path_suffixes: vec!["py".to_string()],
11118 ..Default::default()
11119 },
11120 ..Default::default()
11121 },
11122 None, // We're not testing Python parsing with this language.
11123 )
11124 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11125 "pyproject.toml",
11126 ))))
11127 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11128 )
11129}
11130
11131fn typescript_lang() -> Arc<Language> {
11132 Arc::new(Language::new(
11133 LanguageConfig {
11134 name: "TypeScript".into(),
11135 matcher: LanguageMatcher {
11136 path_suffixes: vec!["ts".to_string()],
11137 ..Default::default()
11138 },
11139 ..Default::default()
11140 },
11141 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11142 ))
11143}
11144
11145fn tsx_lang() -> Arc<Language> {
11146 Arc::new(Language::new(
11147 LanguageConfig {
11148 name: "tsx".into(),
11149 matcher: LanguageMatcher {
11150 path_suffixes: vec!["tsx".to_string()],
11151 ..Default::default()
11152 },
11153 ..Default::default()
11154 },
11155 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11156 ))
11157}
11158
11159fn get_all_tasks(
11160 project: &Entity<Project>,
11161 task_contexts: Arc<TaskContexts>,
11162 cx: &mut App,
11163) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11164 let new_tasks = project.update(cx, |project, cx| {
11165 project.task_store.update(cx, |task_store, cx| {
11166 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11167 this.used_and_current_resolved_tasks(task_contexts, cx)
11168 })
11169 })
11170 });
11171
11172 cx.background_spawn(async move {
11173 let (mut old, new) = new_tasks.await;
11174 old.extend(new);
11175 old
11176 })
11177}
11178
11179#[track_caller]
11180fn assert_entry_git_state(
11181 tree: &Worktree,
11182 repository: &Repository,
11183 path: &str,
11184 index_status: Option<StatusCode>,
11185 is_ignored: bool,
11186) {
11187 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11188 let entry = tree
11189 .entry_for_path(&rel_path(path))
11190 .unwrap_or_else(|| panic!("entry {path} not found"));
11191 let status = repository
11192 .status_for_path(&repo_path(path))
11193 .map(|entry| entry.status);
11194 let expected = index_status.map(|index_status| {
11195 TrackedStatus {
11196 index_status,
11197 worktree_status: StatusCode::Unmodified,
11198 }
11199 .into()
11200 });
11201 assert_eq!(
11202 status, expected,
11203 "expected {path} to have git status: {expected:?}"
11204 );
11205 assert_eq!(
11206 entry.is_ignored, is_ignored,
11207 "expected {path} to have is_ignored: {is_ignored}"
11208 );
11209}
11210
11211#[track_caller]
11212fn git_init(path: &Path) -> git2::Repository {
11213 let mut init_opts = RepositoryInitOptions::new();
11214 init_opts.initial_head("main");
11215 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11216}
11217
11218#[track_caller]
11219fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11220 let path = path.as_ref();
11221 let mut index = repo.index().expect("Failed to get index");
11222 index.add_path(path).expect("Failed to add file");
11223 index.write().expect("Failed to write index");
11224}
11225
11226#[track_caller]
11227fn git_remove_index(path: &Path, repo: &git2::Repository) {
11228 let mut index = repo.index().expect("Failed to get index");
11229 index.remove_path(path).expect("Failed to add file");
11230 index.write().expect("Failed to write index");
11231}
11232
11233#[track_caller]
11234fn git_commit(msg: &'static str, repo: &git2::Repository) {
11235 use git2::Signature;
11236
11237 let signature = Signature::now("test", "test@zed.dev").unwrap();
11238 let oid = repo.index().unwrap().write_tree().unwrap();
11239 let tree = repo.find_tree(oid).unwrap();
11240 if let Ok(head) = repo.head() {
11241 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11242
11243 let parent_commit = parent_obj.as_commit().unwrap();
11244
11245 repo.commit(
11246 Some("HEAD"),
11247 &signature,
11248 &signature,
11249 msg,
11250 &tree,
11251 &[parent_commit],
11252 )
11253 .expect("Failed to commit with parent");
11254 } else {
11255 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11256 .expect("Failed to commit");
11257 }
11258}
11259
11260#[cfg(any())]
11261#[track_caller]
11262fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11263 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11264}
11265
11266#[track_caller]
11267fn git_stash(repo: &mut git2::Repository) {
11268 use git2::Signature;
11269
11270 let signature = Signature::now("test", "test@zed.dev").unwrap();
11271 repo.stash_save(&signature, "N/A", None)
11272 .expect("Failed to stash");
11273}
11274
11275#[track_caller]
11276fn git_reset(offset: usize, repo: &git2::Repository) {
11277 let head = repo.head().expect("Couldn't get repo head");
11278 let object = head.peel(git2::ObjectType::Commit).unwrap();
11279 let commit = object.as_commit().unwrap();
11280 let new_head = commit
11281 .parents()
11282 .inspect(|parnet| {
11283 parnet.message();
11284 })
11285 .nth(offset)
11286 .expect("Not enough history");
11287 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11288 .expect("Could not reset");
11289}
11290
11291#[cfg(any())]
11292#[track_caller]
11293fn git_branch(name: &str, repo: &git2::Repository) {
11294 let head = repo
11295 .head()
11296 .expect("Couldn't get repo head")
11297 .peel_to_commit()
11298 .expect("HEAD is not a commit");
11299 repo.branch(name, &head, false).expect("Failed to commit");
11300}
11301
11302#[cfg(any())]
11303#[track_caller]
11304fn git_checkout(name: &str, repo: &git2::Repository) {
11305 repo.set_head(name).expect("Failed to set head");
11306 repo.checkout_head(None).expect("Failed to check out head");
11307}
11308
11309#[cfg(any())]
11310#[track_caller]
11311fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11312 repo.statuses(None)
11313 .unwrap()
11314 .iter()
11315 .map(|status| (status.path().unwrap().to_string(), status.status()))
11316 .collect()
11317}
11318
11319#[gpui::test]
11320async fn test_find_project_path_abs(
11321 background_executor: BackgroundExecutor,
11322 cx: &mut gpui::TestAppContext,
11323) {
11324 // find_project_path should work with absolute paths
11325 init_test(cx);
11326
11327 let fs = FakeFs::new(background_executor);
11328 fs.insert_tree(
11329 path!("/root"),
11330 json!({
11331 "project1": {
11332 "file1.txt": "content1",
11333 "subdir": {
11334 "file2.txt": "content2"
11335 }
11336 },
11337 "project2": {
11338 "file3.txt": "content3"
11339 }
11340 }),
11341 )
11342 .await;
11343
11344 let project = Project::test(
11345 fs.clone(),
11346 [
11347 path!("/root/project1").as_ref(),
11348 path!("/root/project2").as_ref(),
11349 ],
11350 cx,
11351 )
11352 .await;
11353
11354 // Make sure the worktrees are fully initialized
11355 project
11356 .update(cx, |project, cx| project.git_scans_complete(cx))
11357 .await;
11358 cx.run_until_parked();
11359
11360 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11361 project.read_with(cx, |project, cx| {
11362 let worktrees: Vec<_> = project.worktrees(cx).collect();
11363 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11364 let id1 = worktrees[0].read(cx).id();
11365 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11366 let id2 = worktrees[1].read(cx).id();
11367 (abs_path1, id1, abs_path2, id2)
11368 });
11369
11370 project.update(cx, |project, cx| {
11371 let abs_path = project1_abs_path.join("file1.txt");
11372 let found_path = project.find_project_path(abs_path, cx).unwrap();
11373 assert_eq!(found_path.worktree_id, project1_id);
11374 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11375
11376 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11377 let found_path = project.find_project_path(abs_path, cx).unwrap();
11378 assert_eq!(found_path.worktree_id, project1_id);
11379 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11380
11381 let abs_path = project2_abs_path.join("file3.txt");
11382 let found_path = project.find_project_path(abs_path, cx).unwrap();
11383 assert_eq!(found_path.worktree_id, project2_id);
11384 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11385
11386 let abs_path = project1_abs_path.join("nonexistent.txt");
11387 let found_path = project.find_project_path(abs_path, cx);
11388 assert!(
11389 found_path.is_some(),
11390 "Should find project path for nonexistent file in worktree"
11391 );
11392
11393 // Test with an absolute path outside any worktree
11394 let abs_path = Path::new("/some/other/path");
11395 let found_path = project.find_project_path(abs_path, cx);
11396 assert!(
11397 found_path.is_none(),
11398 "Should not find project path for path outside any worktree"
11399 );
11400 });
11401}
11402
11403#[gpui::test]
11404async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11405 init_test(cx);
11406
11407 let fs = FakeFs::new(cx.executor());
11408 fs.insert_tree(
11409 path!("/root"),
11410 json!({
11411 "a": {
11412 ".git": {},
11413 "src": {
11414 "main.rs": "fn main() {}",
11415 }
11416 },
11417 "b": {
11418 ".git": {},
11419 "src": {
11420 "main.rs": "fn main() {}",
11421 },
11422 "script": {
11423 "run.sh": "#!/bin/bash"
11424 }
11425 }
11426 }),
11427 )
11428 .await;
11429
11430 let project = Project::test(
11431 fs.clone(),
11432 [
11433 path!("/root/a").as_ref(),
11434 path!("/root/b/script").as_ref(),
11435 path!("/root/b").as_ref(),
11436 ],
11437 cx,
11438 )
11439 .await;
11440 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11441 scan_complete.await;
11442
11443 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11444 assert_eq!(worktrees.len(), 3);
11445
11446 let worktree_id_by_abs_path = worktrees
11447 .into_iter()
11448 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11449 .collect::<HashMap<_, _>>();
11450 let worktree_id = worktree_id_by_abs_path
11451 .get(Path::new(path!("/root/b/script")))
11452 .unwrap();
11453
11454 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11455 assert_eq!(repos.len(), 2);
11456
11457 project.update(cx, |project, cx| {
11458 project.remove_worktree(*worktree_id, cx);
11459 });
11460 cx.run_until_parked();
11461
11462 let mut repo_paths = project
11463 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11464 .values()
11465 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11466 .collect::<Vec<_>>();
11467 repo_paths.sort();
11468
11469 pretty_assertions::assert_eq!(
11470 repo_paths,
11471 [
11472 Path::new(path!("/root/a")).into(),
11473 Path::new(path!("/root/b")).into(),
11474 ]
11475 );
11476
11477 let active_repo_path = project
11478 .read_with(cx, |p, cx| {
11479 p.active_repository(cx)
11480 .map(|r| r.read(cx).work_directory_abs_path.clone())
11481 })
11482 .unwrap();
11483 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11484
11485 let worktree_id = worktree_id_by_abs_path
11486 .get(Path::new(path!("/root/a")))
11487 .unwrap();
11488 project.update(cx, |project, cx| {
11489 project.remove_worktree(*worktree_id, cx);
11490 });
11491 cx.run_until_parked();
11492
11493 let active_repo_path = project
11494 .read_with(cx, |p, cx| {
11495 p.active_repository(cx)
11496 .map(|r| r.read(cx).work_directory_abs_path.clone())
11497 })
11498 .unwrap();
11499 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11500
11501 let worktree_id = worktree_id_by_abs_path
11502 .get(Path::new(path!("/root/b")))
11503 .unwrap();
11504 project.update(cx, |project, cx| {
11505 project.remove_worktree(*worktree_id, cx);
11506 });
11507 cx.run_until_parked();
11508
11509 let active_repo_path = project.read_with(cx, |p, cx| {
11510 p.active_repository(cx)
11511 .map(|r| r.read(cx).work_directory_abs_path.clone())
11512 });
11513 assert!(active_repo_path.is_none());
11514}
11515
11516#[gpui::test]
11517async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11518 use DiffHunkSecondaryStatus::*;
11519 init_test(cx);
11520
11521 let committed_contents = r#"
11522 one
11523 two
11524 three
11525 "#
11526 .unindent();
11527 let file_contents = r#"
11528 one
11529 TWO
11530 three
11531 "#
11532 .unindent();
11533
11534 let fs = FakeFs::new(cx.background_executor.clone());
11535 fs.insert_tree(
11536 path!("/dir"),
11537 json!({
11538 ".git": {},
11539 "file.txt": file_contents.clone()
11540 }),
11541 )
11542 .await;
11543
11544 fs.set_head_and_index_for_repo(
11545 path!("/dir/.git").as_ref(),
11546 &[("file.txt", committed_contents.clone())],
11547 );
11548
11549 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11550
11551 let buffer = project
11552 .update(cx, |project, cx| {
11553 project.open_local_buffer(path!("/dir/file.txt"), cx)
11554 })
11555 .await
11556 .unwrap();
11557 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11558 let uncommitted_diff = project
11559 .update(cx, |project, cx| {
11560 project.open_uncommitted_diff(buffer.clone(), cx)
11561 })
11562 .await
11563 .unwrap();
11564
11565 // The hunk is initially unstaged.
11566 uncommitted_diff.read_with(cx, |diff, cx| {
11567 assert_hunks(
11568 diff.snapshot(cx).hunks(&snapshot),
11569 &snapshot,
11570 &diff.base_text_string(cx).unwrap(),
11571 &[(
11572 1..2,
11573 "two\n",
11574 "TWO\n",
11575 DiffHunkStatus::modified(HasSecondaryHunk),
11576 )],
11577 );
11578 });
11579
11580 // Get the repository handle.
11581 let repo = project.read_with(cx, |project, cx| {
11582 project.repositories(cx).values().next().unwrap().clone()
11583 });
11584
11585 // Stage the file.
11586 let stage_task = repo.update(cx, |repo, cx| {
11587 repo.stage_entries(vec![repo_path("file.txt")], cx)
11588 });
11589
11590 // Run a few ticks to let the job start and mark hunks as pending,
11591 // but don't run_until_parked which would complete the entire operation.
11592 for _ in 0..10 {
11593 cx.executor().tick();
11594 let [hunk]: [_; 1] = uncommitted_diff
11595 .read_with(cx, |diff, cx| {
11596 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11597 })
11598 .try_into()
11599 .unwrap();
11600 match hunk.secondary_status {
11601 HasSecondaryHunk => {}
11602 SecondaryHunkRemovalPending => break,
11603 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11604 _ => panic!("unexpected hunk state"),
11605 }
11606 }
11607 uncommitted_diff.read_with(cx, |diff, cx| {
11608 assert_hunks(
11609 diff.snapshot(cx).hunks(&snapshot),
11610 &snapshot,
11611 &diff.base_text_string(cx).unwrap(),
11612 &[(
11613 1..2,
11614 "two\n",
11615 "TWO\n",
11616 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11617 )],
11618 );
11619 });
11620
11621 // Let the staging complete.
11622 stage_task.await.unwrap();
11623 cx.run_until_parked();
11624
11625 // The hunk is now fully staged.
11626 uncommitted_diff.read_with(cx, |diff, cx| {
11627 assert_hunks(
11628 diff.snapshot(cx).hunks(&snapshot),
11629 &snapshot,
11630 &diff.base_text_string(cx).unwrap(),
11631 &[(
11632 1..2,
11633 "two\n",
11634 "TWO\n",
11635 DiffHunkStatus::modified(NoSecondaryHunk),
11636 )],
11637 );
11638 });
11639
11640 // Simulate a commit by updating HEAD to match the current file contents.
11641 // The FakeGitRepository's commit method is a no-op, so we need to manually
11642 // update HEAD to simulate the commit completing.
11643 fs.set_head_for_repo(
11644 path!("/dir/.git").as_ref(),
11645 &[("file.txt", file_contents.clone())],
11646 "newhead",
11647 );
11648 cx.run_until_parked();
11649
11650 // After committing, there are no more hunks.
11651 uncommitted_diff.read_with(cx, |diff, cx| {
11652 assert_hunks(
11653 diff.snapshot(cx).hunks(&snapshot),
11654 &snapshot,
11655 &diff.base_text_string(cx).unwrap(),
11656 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11657 );
11658 });
11659}
11660
11661#[gpui::test]
11662async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11663 init_test(cx);
11664
11665 // Configure read_only_files setting
11666 cx.update(|cx| {
11667 cx.update_global::<SettingsStore, _>(|store, cx| {
11668 store.update_user_settings(cx, |settings| {
11669 settings.project.worktree.read_only_files = Some(vec![
11670 "**/generated/**".to_string(),
11671 "**/*.gen.rs".to_string(),
11672 ]);
11673 });
11674 });
11675 });
11676
11677 let fs = FakeFs::new(cx.background_executor.clone());
11678 fs.insert_tree(
11679 path!("/root"),
11680 json!({
11681 "src": {
11682 "main.rs": "fn main() {}",
11683 "types.gen.rs": "// Generated file",
11684 },
11685 "generated": {
11686 "schema.rs": "// Auto-generated schema",
11687 }
11688 }),
11689 )
11690 .await;
11691
11692 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11693
11694 // Open a regular file - should be read-write
11695 let regular_buffer = project
11696 .update(cx, |project, cx| {
11697 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11698 })
11699 .await
11700 .unwrap();
11701
11702 regular_buffer.read_with(cx, |buffer, _| {
11703 assert!(!buffer.read_only(), "Regular file should not be read-only");
11704 });
11705
11706 // Open a file matching *.gen.rs pattern - should be read-only
11707 let gen_buffer = project
11708 .update(cx, |project, cx| {
11709 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11710 })
11711 .await
11712 .unwrap();
11713
11714 gen_buffer.read_with(cx, |buffer, _| {
11715 assert!(
11716 buffer.read_only(),
11717 "File matching *.gen.rs pattern should be read-only"
11718 );
11719 });
11720
11721 // Open a file in generated directory - should be read-only
11722 let generated_buffer = project
11723 .update(cx, |project, cx| {
11724 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11725 })
11726 .await
11727 .unwrap();
11728
11729 generated_buffer.read_with(cx, |buffer, _| {
11730 assert!(
11731 buffer.read_only(),
11732 "File in generated directory should be read-only"
11733 );
11734 });
11735}
11736
11737#[gpui::test]
11738async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11739 init_test(cx);
11740
11741 // Explicitly set read_only_files to empty (default behavior)
11742 cx.update(|cx| {
11743 cx.update_global::<SettingsStore, _>(|store, cx| {
11744 store.update_user_settings(cx, |settings| {
11745 settings.project.worktree.read_only_files = Some(vec![]);
11746 });
11747 });
11748 });
11749
11750 let fs = FakeFs::new(cx.background_executor.clone());
11751 fs.insert_tree(
11752 path!("/root"),
11753 json!({
11754 "src": {
11755 "main.rs": "fn main() {}",
11756 },
11757 "generated": {
11758 "schema.rs": "// Auto-generated schema",
11759 }
11760 }),
11761 )
11762 .await;
11763
11764 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11765
11766 // All files should be read-write when read_only_files is empty
11767 let main_buffer = project
11768 .update(cx, |project, cx| {
11769 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11770 })
11771 .await
11772 .unwrap();
11773
11774 main_buffer.read_with(cx, |buffer, _| {
11775 assert!(
11776 !buffer.read_only(),
11777 "Files should not be read-only when read_only_files is empty"
11778 );
11779 });
11780
11781 let generated_buffer = project
11782 .update(cx, |project, cx| {
11783 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11784 })
11785 .await
11786 .unwrap();
11787
11788 generated_buffer.read_with(cx, |buffer, _| {
11789 assert!(
11790 !buffer.read_only(),
11791 "Generated files should not be read-only when read_only_files is empty"
11792 );
11793 });
11794}
11795
11796#[gpui::test]
11797async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11798 init_test(cx);
11799
11800 // Configure to make lock files read-only
11801 cx.update(|cx| {
11802 cx.update_global::<SettingsStore, _>(|store, cx| {
11803 store.update_user_settings(cx, |settings| {
11804 settings.project.worktree.read_only_files = Some(vec![
11805 "**/*.lock".to_string(),
11806 "**/package-lock.json".to_string(),
11807 ]);
11808 });
11809 });
11810 });
11811
11812 let fs = FakeFs::new(cx.background_executor.clone());
11813 fs.insert_tree(
11814 path!("/root"),
11815 json!({
11816 "Cargo.lock": "# Lock file",
11817 "Cargo.toml": "[package]",
11818 "package-lock.json": "{}",
11819 "package.json": "{}",
11820 }),
11821 )
11822 .await;
11823
11824 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11825
11826 // Cargo.lock should be read-only
11827 let cargo_lock = project
11828 .update(cx, |project, cx| {
11829 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11830 })
11831 .await
11832 .unwrap();
11833
11834 cargo_lock.read_with(cx, |buffer, _| {
11835 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11836 });
11837
11838 // Cargo.toml should be read-write
11839 let cargo_toml = project
11840 .update(cx, |project, cx| {
11841 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11842 })
11843 .await
11844 .unwrap();
11845
11846 cargo_toml.read_with(cx, |buffer, _| {
11847 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11848 });
11849
11850 // package-lock.json should be read-only
11851 let package_lock = project
11852 .update(cx, |project, cx| {
11853 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11854 })
11855 .await
11856 .unwrap();
11857
11858 package_lock.read_with(cx, |buffer, _| {
11859 assert!(buffer.read_only(), "package-lock.json should be read-only");
11860 });
11861
11862 // package.json should be read-write
11863 let package_json = project
11864 .update(cx, |project, cx| {
11865 project.open_local_buffer(path!("/root/package.json"), cx)
11866 })
11867 .await
11868 .unwrap();
11869
11870 package_json.read_with(cx, |buffer, _| {
11871 assert!(!buffer.read_only(), "package.json should not be read-only");
11872 });
11873}