1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
13 assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 markdown_lang, rust_lang, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .foreground_executor()
215 .block_on(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251
252 let fs = FakeFs::new(cx.executor());
253 fs.insert_tree(
254 path!("/grandparent"),
255 json!({
256 ".editorconfig": "[*]\nindent_size = 4\n",
257 "parent": {
258 ".editorconfig": "[*.rs]\nindent_size = 2\n",
259 "worktree": {
260 ".editorconfig": "[*.md]\nindent_size = 3\n",
261 "main.rs": "fn main() {}",
262 "README.md": "# README",
263 "other.txt": "other content",
264 }
265 }
266 }),
267 )
268 .await;
269
270 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
271
272 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
273 language_registry.add(rust_lang());
274 language_registry.add(markdown_lang());
275
276 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
277
278 cx.executor().run_until_parked();
279
280 cx.update(|cx| {
281 let tree = worktree.read(cx);
282 let settings_for = |path: &str| {
283 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
284 let file = File::for_entry(file_entry, worktree.clone());
285 let file_language = project
286 .read(cx)
287 .languages()
288 .load_language_for_file_path(file.path.as_std_path());
289 let file_language = cx
290 .foreground_executor()
291 .block_on(file_language)
292 .expect("Failed to get file language");
293 let file = file as _;
294 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
295 };
296
297 let settings_rs = settings_for("main.rs");
298 let settings_md = settings_for("README.md");
299 let settings_txt = settings_for("other.txt");
300
301 // main.rs gets indent_size = 2 from parent's external .editorconfig
302 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
303
304 // README.md gets indent_size = 3 from internal worktree .editorconfig
305 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
306
307 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
308 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
309 });
310}
311
312#[gpui::test]
313async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315
316 let fs = FakeFs::new(cx.executor());
317 fs.insert_tree(
318 path!("/parent"),
319 json!({
320 ".editorconfig": "[*]\nindent_size = 99\n",
321 "worktree": {
322 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
323 "file.rs": "fn main() {}",
324 }
325 }),
326 )
327 .await;
328
329 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
330
331 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
332 language_registry.add(rust_lang());
333
334 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
335
336 cx.executor().run_until_parked();
337
338 cx.update(|cx| {
339 let tree = worktree.read(cx);
340 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
341 let file = File::for_entry(file_entry, worktree.clone());
342 let file_language = project
343 .read(cx)
344 .languages()
345 .load_language_for_file_path(file.path.as_std_path());
346 let file_language = cx
347 .foreground_executor()
348 .block_on(file_language)
349 .expect("Failed to get file language");
350 let file = file as _;
351 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
352
353 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
354 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
355 });
356}
357
358#[gpui::test]
359async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
360 init_test(cx);
361
362 let fs = FakeFs::new(cx.executor());
363 fs.insert_tree(
364 path!("/grandparent"),
365 json!({
366 ".editorconfig": "[*]\nindent_size = 99\n",
367 "parent": {
368 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
369 "worktree": {
370 "file.rs": "fn main() {}",
371 }
372 }
373 }),
374 )
375 .await;
376
377 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
378
379 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
380 language_registry.add(rust_lang());
381
382 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
383
384 cx.executor().run_until_parked();
385
386 cx.update(|cx| {
387 let tree = worktree.read(cx);
388 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
389 let file = File::for_entry(file_entry, worktree.clone());
390 let file_language = project
391 .read(cx)
392 .languages()
393 .load_language_for_file_path(file.path.as_std_path());
394 let file_language = cx
395 .foreground_executor()
396 .block_on(file_language)
397 .expect("Failed to get file language");
398 let file = file as _;
399 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
400
401 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
402 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
403 });
404}
405
406#[gpui::test]
407async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
408 init_test(cx);
409
410 let fs = FakeFs::new(cx.executor());
411 fs.insert_tree(
412 path!("/parent"),
413 json!({
414 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
415 "worktree_a": {
416 "file.rs": "fn a() {}",
417 ".editorconfig": "[*]\ninsert_final_newline = true\n",
418 },
419 "worktree_b": {
420 "file.rs": "fn b() {}",
421 ".editorconfig": "[*]\ninsert_final_newline = false\n",
422 }
423 }),
424 )
425 .await;
426
427 let project = Project::test(
428 fs,
429 [
430 path!("/parent/worktree_a").as_ref(),
431 path!("/parent/worktree_b").as_ref(),
432 ],
433 cx,
434 )
435 .await;
436
437 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
438 language_registry.add(rust_lang());
439
440 cx.executor().run_until_parked();
441
442 cx.update(|cx| {
443 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
444 assert_eq!(worktrees.len(), 2);
445
446 for worktree in worktrees {
447 let tree = worktree.read(cx);
448 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
449 let file = File::for_entry(file_entry, worktree.clone());
450 let file_language = project
451 .read(cx)
452 .languages()
453 .load_language_for_file_path(file.path.as_std_path());
454 let file_language = cx
455 .foreground_executor()
456 .block_on(file_language)
457 .expect("Failed to get file language");
458 let file = file as _;
459 let settings =
460 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
461
462 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
463 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
464 }
465 });
466}
467
468#[gpui::test]
469async fn test_external_editorconfig_not_loaded_without_internal_config(
470 cx: &mut gpui::TestAppContext,
471) {
472 init_test(cx);
473
474 let fs = FakeFs::new(cx.executor());
475 fs.insert_tree(
476 path!("/parent"),
477 json!({
478 ".editorconfig": "[*]\nindent_size = 99\n",
479 "worktree": {
480 "file.rs": "fn main() {}",
481 }
482 }),
483 )
484 .await;
485
486 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
487
488 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
489 language_registry.add(rust_lang());
490
491 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
492
493 cx.executor().run_until_parked();
494
495 cx.update(|cx| {
496 let tree = worktree.read(cx);
497 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
498 let file = File::for_entry(file_entry, worktree.clone());
499 let file_language = project
500 .read(cx)
501 .languages()
502 .load_language_for_file_path(file.path.as_std_path());
503 let file_language = cx
504 .foreground_executor()
505 .block_on(file_language)
506 .expect("Failed to get file language");
507 let file = file as _;
508 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
509
510 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
511 // because without an internal .editorconfig, external configs are not loaded
512 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
513 });
514}
515
516#[gpui::test]
517async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/parent"),
523 json!({
524 ".editorconfig": "[*]\nindent_size = 4\n",
525 "worktree": {
526 ".editorconfig": "[*]\n",
527 "file.rs": "fn main() {}",
528 }
529 }),
530 )
531 .await;
532
533 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
534
535 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
536 language_registry.add(rust_lang());
537
538 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
539
540 cx.executor().run_until_parked();
541
542 cx.update(|cx| {
543 let tree = worktree.read(cx);
544 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
545 let file = File::for_entry(file_entry, worktree.clone());
546 let file_language = project
547 .read(cx)
548 .languages()
549 .load_language_for_file_path(file.path.as_std_path());
550 let file_language = cx
551 .foreground_executor()
552 .block_on(file_language)
553 .expect("Failed to get file language");
554 let file = file as _;
555 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
556
557 // Test initial settings: tab_size = 4 from parent's external .editorconfig
558 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
559 });
560
561 fs.atomic_write(
562 PathBuf::from(path!("/parent/.editorconfig")),
563 "[*]\nindent_size = 8\n".to_owned(),
564 )
565 .await
566 .unwrap();
567
568 cx.executor().run_until_parked();
569
570 cx.update(|cx| {
571 let tree = worktree.read(cx);
572 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
573 let file = File::for_entry(file_entry, worktree.clone());
574 let file_language = project
575 .read(cx)
576 .languages()
577 .load_language_for_file_path(file.path.as_std_path());
578 let file_language = cx
579 .foreground_executor()
580 .block_on(file_language)
581 .expect("Failed to get file language");
582 let file = file as _;
583 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
584
585 // Test settings updated: tab_size = 8
586 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
587 });
588}
589
590#[gpui::test]
591async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/parent"),
597 json!({
598 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
599 "existing_worktree": {
600 ".editorconfig": "[*]\n",
601 "file.rs": "fn a() {}",
602 },
603 "new_worktree": {
604 ".editorconfig": "[*]\n",
605 "file.rs": "fn b() {}",
606 }
607 }),
608 )
609 .await;
610
611 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
612
613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
614 language_registry.add(rust_lang());
615
616 cx.executor().run_until_parked();
617
618 cx.update(|cx| {
619 let worktree = project.read(cx).worktrees(cx).next().unwrap();
620 let tree = worktree.read(cx);
621 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
622 let file = File::for_entry(file_entry, worktree.clone());
623 let file_language = project
624 .read(cx)
625 .languages()
626 .load_language_for_file_path(file.path.as_std_path());
627 let file_language = cx
628 .foreground_executor()
629 .block_on(file_language)
630 .expect("Failed to get file language");
631 let file = file as _;
632 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
633
634 // Test existing worktree has tab_size = 7
635 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
636 });
637
638 let (new_worktree, _) = project
639 .update(cx, |project, cx| {
640 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
641 })
642 .await
643 .unwrap();
644
645 cx.executor().run_until_parked();
646
647 cx.update(|cx| {
648 let tree = new_worktree.read(cx);
649 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
650 let file = File::for_entry(file_entry, new_worktree.clone());
651 let file_language = project
652 .read(cx)
653 .languages()
654 .load_language_for_file_path(file.path.as_std_path());
655 let file_language = cx
656 .foreground_executor()
657 .block_on(file_language)
658 .expect("Failed to get file language");
659 let file = file as _;
660 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
661
662 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
663 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
664 });
665}
666
667#[gpui::test]
668async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
669 init_test(cx);
670
671 let fs = FakeFs::new(cx.executor());
672 fs.insert_tree(
673 path!("/parent"),
674 json!({
675 ".editorconfig": "[*]\nindent_size = 6\n",
676 "worktree": {
677 ".editorconfig": "[*]\n",
678 "file.rs": "fn main() {}",
679 }
680 }),
681 )
682 .await;
683
684 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
685
686 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
687 language_registry.add(rust_lang());
688
689 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
690 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
691
692 cx.executor().run_until_parked();
693
694 cx.update(|cx| {
695 let store = cx.global::<SettingsStore>();
696 let (worktree_ids, external_paths, watcher_paths) =
697 store.editorconfig_store.read(cx).test_state();
698
699 // Test external config is loaded
700 assert!(worktree_ids.contains(&worktree_id));
701 assert!(!external_paths.is_empty());
702 assert!(!watcher_paths.is_empty());
703 });
704
705 project.update(cx, |project, cx| {
706 project.remove_worktree(worktree_id, cx);
707 });
708
709 cx.executor().run_until_parked();
710
711 cx.update(|cx| {
712 let store = cx.global::<SettingsStore>();
713 let (worktree_ids, external_paths, watcher_paths) =
714 store.editorconfig_store.read(cx).test_state();
715
716 // Test worktree state, external configs, and watchers all removed
717 assert!(!worktree_ids.contains(&worktree_id));
718 assert!(external_paths.is_empty());
719 assert!(watcher_paths.is_empty());
720 });
721}
722
723#[gpui::test]
724async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
725 cx: &mut gpui::TestAppContext,
726) {
727 init_test(cx);
728
729 let fs = FakeFs::new(cx.executor());
730 fs.insert_tree(
731 path!("/parent"),
732 json!({
733 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
734 "worktree_a": {
735 ".editorconfig": "[*]\n",
736 "file.rs": "fn a() {}",
737 },
738 "worktree_b": {
739 ".editorconfig": "[*]\n",
740 "file.rs": "fn b() {}",
741 }
742 }),
743 )
744 .await;
745
746 let project = Project::test(
747 fs,
748 [
749 path!("/parent/worktree_a").as_ref(),
750 path!("/parent/worktree_b").as_ref(),
751 ],
752 cx,
753 )
754 .await;
755
756 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
757 language_registry.add(rust_lang());
758
759 cx.executor().run_until_parked();
760
761 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
762 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
763 assert_eq!(worktrees.len(), 2);
764
765 let worktree_a = &worktrees[0];
766 let worktree_b = &worktrees[1];
767 let worktree_a_id = worktree_a.read(cx).id();
768 let worktree_b_id = worktree_b.read(cx).id();
769 (worktree_a_id, worktree_b.clone(), worktree_b_id)
770 });
771
772 cx.update(|cx| {
773 let store = cx.global::<SettingsStore>();
774 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
775
776 // Test both worktrees have settings and share external config
777 assert!(worktree_ids.contains(&worktree_a_id));
778 assert!(worktree_ids.contains(&worktree_b_id));
779 assert_eq!(external_paths.len(), 1); // single shared external config
780 });
781
782 project.update(cx, |project, cx| {
783 project.remove_worktree(worktree_a_id, cx);
784 });
785
786 cx.executor().run_until_parked();
787
788 cx.update(|cx| {
789 let store = cx.global::<SettingsStore>();
790 let (worktree_ids, external_paths, watcher_paths) =
791 store.editorconfig_store.read(cx).test_state();
792
793 // Test worktree_a is gone but external config remains for worktree_b
794 assert!(!worktree_ids.contains(&worktree_a_id));
795 assert!(worktree_ids.contains(&worktree_b_id));
796 // External config should still exist because worktree_b uses it
797 assert_eq!(external_paths.len(), 1);
798 assert_eq!(watcher_paths.len(), 1);
799 });
800
801 cx.update(|cx| {
802 let tree = worktree_b.read(cx);
803 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
804 let file = File::for_entry(file_entry, worktree_b.clone());
805 let file_language = project
806 .read(cx)
807 .languages()
808 .load_language_for_file_path(file.path.as_std_path());
809 let file_language = cx
810 .foreground_executor()
811 .block_on(file_language)
812 .expect("Failed to get file language");
813 let file = file as _;
814 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
815
816 // Test worktree_b still has correct settings
817 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
818 });
819}
820
821#[gpui::test]
822async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
823 init_test(cx);
824 cx.update(|cx| {
825 GitHostingProviderRegistry::default_global(cx);
826 git_hosting_providers::init(cx);
827 });
828
829 let fs = FakeFs::new(cx.executor());
830 let str_path = path!("/dir");
831 let path = Path::new(str_path);
832
833 fs.insert_tree(
834 path!("/dir"),
835 json!({
836 ".zed": {
837 "settings.json": r#"{
838 "git_hosting_providers": [
839 {
840 "provider": "gitlab",
841 "base_url": "https://google.com",
842 "name": "foo"
843 }
844 ]
845 }"#
846 },
847 }),
848 )
849 .await;
850
851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
852 let (_worktree, _) =
853 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
854 cx.executor().run_until_parked();
855
856 cx.update(|cx| {
857 let provider = GitHostingProviderRegistry::global(cx);
858 assert!(
859 provider
860 .list_hosting_providers()
861 .into_iter()
862 .any(|provider| provider.name() == "foo")
863 );
864 });
865
866 fs.atomic_write(
867 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
868 "{}".into(),
869 )
870 .await
871 .unwrap();
872
873 cx.run_until_parked();
874
875 cx.update(|cx| {
876 let provider = GitHostingProviderRegistry::global(cx);
877 assert!(
878 !provider
879 .list_hosting_providers()
880 .into_iter()
881 .any(|provider| provider.name() == "foo")
882 );
883 });
884}
885
886#[gpui::test]
887async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
888 init_test(cx);
889 TaskStore::init(None);
890
891 let fs = FakeFs::new(cx.executor());
892 fs.insert_tree(
893 path!("/dir"),
894 json!({
895 ".zed": {
896 "settings.json": r#"{ "tab_size": 8 }"#,
897 "tasks.json": r#"[{
898 "label": "cargo check all",
899 "command": "cargo",
900 "args": ["check", "--all"]
901 },]"#,
902 },
903 "a": {
904 "a.rs": "fn a() {\n A\n}"
905 },
906 "b": {
907 ".zed": {
908 "settings.json": r#"{ "tab_size": 2 }"#,
909 "tasks.json": r#"[{
910 "label": "cargo check",
911 "command": "cargo",
912 "args": ["check"]
913 },]"#,
914 },
915 "b.rs": "fn b() {\n B\n}"
916 }
917 }),
918 )
919 .await;
920
921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
922 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
923
924 cx.executor().run_until_parked();
925 let worktree_id = cx.update(|cx| {
926 project.update(cx, |project, cx| {
927 project.worktrees(cx).next().unwrap().read(cx).id()
928 })
929 });
930
931 let mut task_contexts = TaskContexts::default();
932 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
933 let task_contexts = Arc::new(task_contexts);
934
935 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
936 id: worktree_id,
937 directory_in_worktree: rel_path(".zed").into(),
938 id_base: "local worktree tasks from directory \".zed\"".into(),
939 };
940
941 let all_tasks = cx
942 .update(|cx| {
943 let tree = worktree.read(cx);
944
945 let file_a = File::for_entry(
946 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
947 worktree.clone(),
948 ) as _;
949 let settings_a = language_settings(None, Some(&file_a), cx);
950 let file_b = File::for_entry(
951 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
952 worktree.clone(),
953 ) as _;
954 let settings_b = language_settings(None, Some(&file_b), cx);
955
956 assert_eq!(settings_a.tab_size.get(), 8);
957 assert_eq!(settings_b.tab_size.get(), 2);
958
959 get_all_tasks(&project, task_contexts.clone(), cx)
960 })
961 .await
962 .into_iter()
963 .map(|(source_kind, task)| {
964 let resolved = task.resolved;
965 (
966 source_kind,
967 task.resolved_label,
968 resolved.args,
969 resolved.env,
970 )
971 })
972 .collect::<Vec<_>>();
973 assert_eq!(
974 all_tasks,
975 vec![
976 (
977 TaskSourceKind::Worktree {
978 id: worktree_id,
979 directory_in_worktree: rel_path("b/.zed").into(),
980 id_base: "local worktree tasks from directory \"b/.zed\"".into()
981 },
982 "cargo check".to_string(),
983 vec!["check".to_string()],
984 HashMap::default(),
985 ),
986 (
987 topmost_local_task_source_kind.clone(),
988 "cargo check all".to_string(),
989 vec!["check".to_string(), "--all".to_string()],
990 HashMap::default(),
991 ),
992 ]
993 );
994
995 let (_, resolved_task) = cx
996 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
997 .await
998 .into_iter()
999 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1000 .expect("should have one global task");
1001 project.update(cx, |project, cx| {
1002 let task_inventory = project
1003 .task_store
1004 .read(cx)
1005 .task_inventory()
1006 .cloned()
1007 .unwrap();
1008 task_inventory.update(cx, |inventory, _| {
1009 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1010 inventory
1011 .update_file_based_tasks(
1012 TaskSettingsLocation::Global(tasks_file()),
1013 Some(
1014 &json!([{
1015 "label": "cargo check unstable",
1016 "command": "cargo",
1017 "args": [
1018 "check",
1019 "--all",
1020 "--all-targets"
1021 ],
1022 "env": {
1023 "RUSTFLAGS": "-Zunstable-options"
1024 }
1025 }])
1026 .to_string(),
1027 ),
1028 )
1029 .unwrap();
1030 });
1031 });
1032 cx.run_until_parked();
1033
1034 let all_tasks = cx
1035 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1036 .await
1037 .into_iter()
1038 .map(|(source_kind, task)| {
1039 let resolved = task.resolved;
1040 (
1041 source_kind,
1042 task.resolved_label,
1043 resolved.args,
1044 resolved.env,
1045 )
1046 })
1047 .collect::<Vec<_>>();
1048 assert_eq!(
1049 all_tasks,
1050 vec![
1051 (
1052 topmost_local_task_source_kind.clone(),
1053 "cargo check all".to_string(),
1054 vec!["check".to_string(), "--all".to_string()],
1055 HashMap::default(),
1056 ),
1057 (
1058 TaskSourceKind::Worktree {
1059 id: worktree_id,
1060 directory_in_worktree: rel_path("b/.zed").into(),
1061 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1062 },
1063 "cargo check".to_string(),
1064 vec!["check".to_string()],
1065 HashMap::default(),
1066 ),
1067 (
1068 TaskSourceKind::AbsPath {
1069 abs_path: paths::tasks_file().clone(),
1070 id_base: "global tasks.json".into(),
1071 },
1072 "cargo check unstable".to_string(),
1073 vec![
1074 "check".to_string(),
1075 "--all".to_string(),
1076 "--all-targets".to_string(),
1077 ],
1078 HashMap::from_iter(Some((
1079 "RUSTFLAGS".to_string(),
1080 "-Zunstable-options".to_string()
1081 ))),
1082 ),
1083 ]
1084 );
1085}
1086
1087#[gpui::test]
1088async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1089 init_test(cx);
1090 TaskStore::init(None);
1091
1092 let fs = FakeFs::new(cx.executor());
1093 fs.insert_tree(
1094 path!("/dir"),
1095 json!({
1096 ".zed": {
1097 "tasks.json": r#"[{
1098 "label": "test worktree root",
1099 "command": "echo $ZED_WORKTREE_ROOT"
1100 }]"#,
1101 },
1102 "a": {
1103 "a.rs": "fn a() {\n A\n}"
1104 },
1105 }),
1106 )
1107 .await;
1108
1109 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1110 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1111
1112 cx.executor().run_until_parked();
1113 let worktree_id = cx.update(|cx| {
1114 project.update(cx, |project, cx| {
1115 project.worktrees(cx).next().unwrap().read(cx).id()
1116 })
1117 });
1118
1119 let active_non_worktree_item_tasks = cx
1120 .update(|cx| {
1121 get_all_tasks(
1122 &project,
1123 Arc::new(TaskContexts {
1124 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1125 active_worktree_context: None,
1126 other_worktree_contexts: Vec::new(),
1127 lsp_task_sources: HashMap::default(),
1128 latest_selection: None,
1129 }),
1130 cx,
1131 )
1132 })
1133 .await;
1134 assert!(
1135 active_non_worktree_item_tasks.is_empty(),
1136 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1137 );
1138
1139 let active_worktree_tasks = cx
1140 .update(|cx| {
1141 get_all_tasks(
1142 &project,
1143 Arc::new(TaskContexts {
1144 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1145 active_worktree_context: Some((worktree_id, {
1146 let mut worktree_context = TaskContext::default();
1147 worktree_context
1148 .task_variables
1149 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1150 worktree_context
1151 })),
1152 other_worktree_contexts: Vec::new(),
1153 lsp_task_sources: HashMap::default(),
1154 latest_selection: None,
1155 }),
1156 cx,
1157 )
1158 })
1159 .await;
1160 assert_eq!(
1161 active_worktree_tasks
1162 .into_iter()
1163 .map(|(source_kind, task)| {
1164 let resolved = task.resolved;
1165 (source_kind, resolved.command.unwrap())
1166 })
1167 .collect::<Vec<_>>(),
1168 vec![(
1169 TaskSourceKind::Worktree {
1170 id: worktree_id,
1171 directory_in_worktree: rel_path(".zed").into(),
1172 id_base: "local worktree tasks from directory \".zed\"".into(),
1173 },
1174 "echo /dir".to_string(),
1175 )]
1176 );
1177}
1178
1179#[gpui::test]
1180async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1181 cx: &mut gpui::TestAppContext,
1182) {
1183 pub(crate) struct PyprojectTomlManifestProvider;
1184
1185 impl ManifestProvider for PyprojectTomlManifestProvider {
1186 fn name(&self) -> ManifestName {
1187 SharedString::new_static("pyproject.toml").into()
1188 }
1189
1190 fn search(
1191 &self,
1192 ManifestQuery {
1193 path,
1194 depth,
1195 delegate,
1196 }: ManifestQuery,
1197 ) -> Option<Arc<RelPath>> {
1198 for path in path.ancestors().take(depth) {
1199 let p = path.join(rel_path("pyproject.toml"));
1200 if delegate.exists(&p, Some(false)) {
1201 return Some(path.into());
1202 }
1203 }
1204
1205 None
1206 }
1207 }
1208
1209 init_test(cx);
1210 let fs = FakeFs::new(cx.executor());
1211
1212 fs.insert_tree(
1213 path!("/the-root"),
1214 json!({
1215 ".zed": {
1216 "settings.json": r#"
1217 {
1218 "languages": {
1219 "Python": {
1220 "language_servers": ["ty"]
1221 }
1222 }
1223 }"#
1224 },
1225 "project-a": {
1226 ".venv": {},
1227 "file.py": "",
1228 "pyproject.toml": ""
1229 },
1230 "project-b": {
1231 ".venv": {},
1232 "source_file.py":"",
1233 "another_file.py": "",
1234 "pyproject.toml": ""
1235 }
1236 }),
1237 )
1238 .await;
1239 cx.update(|cx| {
1240 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1241 });
1242
1243 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1244 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1245 let _fake_python_server = language_registry.register_fake_lsp(
1246 "Python",
1247 FakeLspAdapter {
1248 name: "ty",
1249 capabilities: lsp::ServerCapabilities {
1250 ..Default::default()
1251 },
1252 ..Default::default()
1253 },
1254 );
1255
1256 language_registry.add(python_lang(fs.clone()));
1257 let (first_buffer, _handle) = project
1258 .update(cx, |project, cx| {
1259 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1260 })
1261 .await
1262 .unwrap();
1263 cx.executor().run_until_parked();
1264 let servers = project.update(cx, |project, cx| {
1265 project.lsp_store.update(cx, |this, cx| {
1266 first_buffer.update(cx, |buffer, cx| {
1267 this.running_language_servers_for_local_buffer(buffer, cx)
1268 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1269 .collect::<Vec<_>>()
1270 })
1271 })
1272 });
1273 cx.executor().run_until_parked();
1274 assert_eq!(servers.len(), 1);
1275 let (adapter, server) = servers.into_iter().next().unwrap();
1276 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1277 assert_eq!(server.server_id(), LanguageServerId(0));
1278 // `workspace_folders` are set to the rooting point.
1279 assert_eq!(
1280 server.workspace_folders(),
1281 BTreeSet::from_iter(
1282 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1283 )
1284 );
1285
1286 let (second_project_buffer, _other_handle) = project
1287 .update(cx, |project, cx| {
1288 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1289 })
1290 .await
1291 .unwrap();
1292 cx.executor().run_until_parked();
1293 let servers = project.update(cx, |project, cx| {
1294 project.lsp_store.update(cx, |this, cx| {
1295 second_project_buffer.update(cx, |buffer, cx| {
1296 this.running_language_servers_for_local_buffer(buffer, cx)
1297 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1298 .collect::<Vec<_>>()
1299 })
1300 })
1301 });
1302 cx.executor().run_until_parked();
1303 assert_eq!(servers.len(), 1);
1304 let (adapter, server) = servers.into_iter().next().unwrap();
1305 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1306 // We're not using venvs at all here, so both folders should fall under the same root.
1307 assert_eq!(server.server_id(), LanguageServerId(0));
1308 // Now, let's select a different toolchain for one of subprojects.
1309
1310 let Toolchains {
1311 toolchains: available_toolchains_for_b,
1312 root_path,
1313 ..
1314 } = project
1315 .update(cx, |this, cx| {
1316 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1317 this.available_toolchains(
1318 ProjectPath {
1319 worktree_id,
1320 path: rel_path("project-b/source_file.py").into(),
1321 },
1322 LanguageName::new_static("Python"),
1323 cx,
1324 )
1325 })
1326 .await
1327 .expect("A toolchain to be discovered");
1328 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1329 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1330 let currently_active_toolchain = project
1331 .update(cx, |this, cx| {
1332 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1333 this.active_toolchain(
1334 ProjectPath {
1335 worktree_id,
1336 path: rel_path("project-b/source_file.py").into(),
1337 },
1338 LanguageName::new_static("Python"),
1339 cx,
1340 )
1341 })
1342 .await;
1343
1344 assert!(currently_active_toolchain.is_none());
1345 let _ = project
1346 .update(cx, |this, cx| {
1347 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1348 this.activate_toolchain(
1349 ProjectPath {
1350 worktree_id,
1351 path: root_path,
1352 },
1353 available_toolchains_for_b
1354 .toolchains
1355 .into_iter()
1356 .next()
1357 .unwrap(),
1358 cx,
1359 )
1360 })
1361 .await
1362 .unwrap();
1363 cx.run_until_parked();
1364 let servers = project.update(cx, |project, cx| {
1365 project.lsp_store.update(cx, |this, cx| {
1366 second_project_buffer.update(cx, |buffer, cx| {
1367 this.running_language_servers_for_local_buffer(buffer, cx)
1368 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1369 .collect::<Vec<_>>()
1370 })
1371 })
1372 });
1373 cx.executor().run_until_parked();
1374 assert_eq!(servers.len(), 1);
1375 let (adapter, server) = servers.into_iter().next().unwrap();
1376 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1377 // There's a new language server in town.
1378 assert_eq!(server.server_id(), LanguageServerId(1));
1379}
1380
1381#[gpui::test]
1382async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1383 init_test(cx);
1384
1385 let fs = FakeFs::new(cx.executor());
1386 fs.insert_tree(
1387 path!("/dir"),
1388 json!({
1389 "test.rs": "const A: i32 = 1;",
1390 "test2.rs": "",
1391 "Cargo.toml": "a = 1",
1392 "package.json": "{\"a\": 1}",
1393 }),
1394 )
1395 .await;
1396
1397 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1399
1400 let mut fake_rust_servers = language_registry.register_fake_lsp(
1401 "Rust",
1402 FakeLspAdapter {
1403 name: "the-rust-language-server",
1404 capabilities: lsp::ServerCapabilities {
1405 completion_provider: Some(lsp::CompletionOptions {
1406 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1407 ..Default::default()
1408 }),
1409 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1410 lsp::TextDocumentSyncOptions {
1411 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1412 ..Default::default()
1413 },
1414 )),
1415 ..Default::default()
1416 },
1417 ..Default::default()
1418 },
1419 );
1420 let mut fake_json_servers = language_registry.register_fake_lsp(
1421 "JSON",
1422 FakeLspAdapter {
1423 name: "the-json-language-server",
1424 capabilities: lsp::ServerCapabilities {
1425 completion_provider: Some(lsp::CompletionOptions {
1426 trigger_characters: Some(vec![":".to_string()]),
1427 ..Default::default()
1428 }),
1429 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1430 lsp::TextDocumentSyncOptions {
1431 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1432 ..Default::default()
1433 },
1434 )),
1435 ..Default::default()
1436 },
1437 ..Default::default()
1438 },
1439 );
1440
1441 // Open a buffer without an associated language server.
1442 let (toml_buffer, _handle) = project
1443 .update(cx, |project, cx| {
1444 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1445 })
1446 .await
1447 .unwrap();
1448
1449 // Open a buffer with an associated language server before the language for it has been loaded.
1450 let (rust_buffer, _handle2) = project
1451 .update(cx, |project, cx| {
1452 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1453 })
1454 .await
1455 .unwrap();
1456 rust_buffer.update(cx, |buffer, _| {
1457 assert_eq!(buffer.language().map(|l| l.name()), None);
1458 });
1459
1460 // Now we add the languages to the project, and ensure they get assigned to all
1461 // the relevant open buffers.
1462 language_registry.add(json_lang());
1463 language_registry.add(rust_lang());
1464 cx.executor().run_until_parked();
1465 rust_buffer.update(cx, |buffer, _| {
1466 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1467 });
1468
1469 // A server is started up, and it is notified about Rust files.
1470 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1471 assert_eq!(
1472 fake_rust_server
1473 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1474 .await
1475 .text_document,
1476 lsp::TextDocumentItem {
1477 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1478 version: 0,
1479 text: "const A: i32 = 1;".to_string(),
1480 language_id: "rust".to_string(),
1481 }
1482 );
1483
1484 // The buffer is configured based on the language server's capabilities.
1485 rust_buffer.update(cx, |buffer, _| {
1486 assert_eq!(
1487 buffer
1488 .completion_triggers()
1489 .iter()
1490 .cloned()
1491 .collect::<Vec<_>>(),
1492 &[".".to_string(), "::".to_string()]
1493 );
1494 });
1495 toml_buffer.update(cx, |buffer, _| {
1496 assert!(buffer.completion_triggers().is_empty());
1497 });
1498
1499 // Edit a buffer. The changes are reported to the language server.
1500 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1501 assert_eq!(
1502 fake_rust_server
1503 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1504 .await
1505 .text_document,
1506 lsp::VersionedTextDocumentIdentifier::new(
1507 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1508 1
1509 )
1510 );
1511
1512 // Open a third buffer with a different associated language server.
1513 let (json_buffer, _json_handle) = project
1514 .update(cx, |project, cx| {
1515 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1516 })
1517 .await
1518 .unwrap();
1519
1520 // A json language server is started up and is only notified about the json buffer.
1521 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1522 assert_eq!(
1523 fake_json_server
1524 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1525 .await
1526 .text_document,
1527 lsp::TextDocumentItem {
1528 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1529 version: 0,
1530 text: "{\"a\": 1}".to_string(),
1531 language_id: "json".to_string(),
1532 }
1533 );
1534
1535 // This buffer is configured based on the second language server's
1536 // capabilities.
1537 json_buffer.update(cx, |buffer, _| {
1538 assert_eq!(
1539 buffer
1540 .completion_triggers()
1541 .iter()
1542 .cloned()
1543 .collect::<Vec<_>>(),
1544 &[":".to_string()]
1545 );
1546 });
1547
1548 // When opening another buffer whose language server is already running,
1549 // it is also configured based on the existing language server's capabilities.
1550 let (rust_buffer2, _handle4) = project
1551 .update(cx, |project, cx| {
1552 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1553 })
1554 .await
1555 .unwrap();
1556 rust_buffer2.update(cx, |buffer, _| {
1557 assert_eq!(
1558 buffer
1559 .completion_triggers()
1560 .iter()
1561 .cloned()
1562 .collect::<Vec<_>>(),
1563 &[".".to_string(), "::".to_string()]
1564 );
1565 });
1566
1567 // Changes are reported only to servers matching the buffer's language.
1568 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1569 rust_buffer2.update(cx, |buffer, cx| {
1570 buffer.edit([(0..0, "let x = 1;")], None, cx)
1571 });
1572 assert_eq!(
1573 fake_rust_server
1574 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1575 .await
1576 .text_document,
1577 lsp::VersionedTextDocumentIdentifier::new(
1578 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1579 1
1580 )
1581 );
1582
1583 // Save notifications are reported to all servers.
1584 project
1585 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1586 .await
1587 .unwrap();
1588 assert_eq!(
1589 fake_rust_server
1590 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1591 .await
1592 .text_document,
1593 lsp::TextDocumentIdentifier::new(
1594 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1595 )
1596 );
1597 assert_eq!(
1598 fake_json_server
1599 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1600 .await
1601 .text_document,
1602 lsp::TextDocumentIdentifier::new(
1603 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1604 )
1605 );
1606
1607 // Renames are reported only to servers matching the buffer's language.
1608 fs.rename(
1609 Path::new(path!("/dir/test2.rs")),
1610 Path::new(path!("/dir/test3.rs")),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 assert_eq!(
1616 fake_rust_server
1617 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1618 .await
1619 .text_document,
1620 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1621 );
1622 assert_eq!(
1623 fake_rust_server
1624 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1625 .await
1626 .text_document,
1627 lsp::TextDocumentItem {
1628 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1629 version: 0,
1630 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1631 language_id: "rust".to_string(),
1632 },
1633 );
1634
1635 rust_buffer2.update(cx, |buffer, cx| {
1636 buffer.update_diagnostics(
1637 LanguageServerId(0),
1638 DiagnosticSet::from_sorted_entries(
1639 vec![DiagnosticEntry {
1640 diagnostic: Default::default(),
1641 range: Anchor::MIN..Anchor::MAX,
1642 }],
1643 &buffer.snapshot(),
1644 ),
1645 cx,
1646 );
1647 assert_eq!(
1648 buffer
1649 .snapshot()
1650 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1651 .count(),
1652 1
1653 );
1654 });
1655
1656 // When the rename changes the extension of the file, the buffer gets closed on the old
1657 // language server and gets opened on the new one.
1658 fs.rename(
1659 Path::new(path!("/dir/test3.rs")),
1660 Path::new(path!("/dir/test3.json")),
1661 Default::default(),
1662 )
1663 .await
1664 .unwrap();
1665 assert_eq!(
1666 fake_rust_server
1667 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1668 .await
1669 .text_document,
1670 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1671 );
1672 assert_eq!(
1673 fake_json_server
1674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1675 .await
1676 .text_document,
1677 lsp::TextDocumentItem {
1678 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1679 version: 0,
1680 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1681 language_id: "json".to_string(),
1682 },
1683 );
1684
1685 // We clear the diagnostics, since the language has changed.
1686 rust_buffer2.update(cx, |buffer, _| {
1687 assert_eq!(
1688 buffer
1689 .snapshot()
1690 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1691 .count(),
1692 0
1693 );
1694 });
1695
1696 // The renamed file's version resets after changing language server.
1697 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1698 assert_eq!(
1699 fake_json_server
1700 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1701 .await
1702 .text_document,
1703 lsp::VersionedTextDocumentIdentifier::new(
1704 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1705 1
1706 )
1707 );
1708
1709 // Restart language servers
1710 project.update(cx, |project, cx| {
1711 project.restart_language_servers_for_buffers(
1712 vec![rust_buffer.clone(), json_buffer.clone()],
1713 HashSet::default(),
1714 cx,
1715 );
1716 });
1717
1718 let mut rust_shutdown_requests = fake_rust_server
1719 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1720 let mut json_shutdown_requests = fake_json_server
1721 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1722 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1723
1724 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1725 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1726
1727 // Ensure rust document is reopened in new rust language server
1728 assert_eq!(
1729 fake_rust_server
1730 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1731 .await
1732 .text_document,
1733 lsp::TextDocumentItem {
1734 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1735 version: 0,
1736 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1737 language_id: "rust".to_string(),
1738 }
1739 );
1740
1741 // Ensure json documents are reopened in new json language server
1742 assert_set_eq!(
1743 [
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1746 .await
1747 .text_document,
1748 fake_json_server
1749 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1750 .await
1751 .text_document,
1752 ],
1753 [
1754 lsp::TextDocumentItem {
1755 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1756 version: 0,
1757 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1758 language_id: "json".to_string(),
1759 },
1760 lsp::TextDocumentItem {
1761 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1762 version: 0,
1763 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1764 language_id: "json".to_string(),
1765 }
1766 ]
1767 );
1768
1769 // Close notifications are reported only to servers matching the buffer's language.
1770 cx.update(|_| drop(_json_handle));
1771 let close_message = lsp::DidCloseTextDocumentParams {
1772 text_document: lsp::TextDocumentIdentifier::new(
1773 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1774 ),
1775 };
1776 assert_eq!(
1777 fake_json_server
1778 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1779 .await,
1780 close_message,
1781 );
1782}
1783
1784#[gpui::test]
1785async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1786 init_test(cx);
1787
1788 let settings_json_contents = json!({
1789 "languages": {
1790 "Rust": {
1791 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1792 }
1793 },
1794 "lsp": {
1795 "my_fake_lsp": {
1796 "binary": {
1797 // file exists, so this is treated as a relative path
1798 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1799 }
1800 },
1801 "lsp_on_path": {
1802 "binary": {
1803 // file doesn't exist, so it will fall back on PATH env var
1804 "path": path!("lsp_on_path.exe").to_string(),
1805 }
1806 }
1807 },
1808 });
1809
1810 let fs = FakeFs::new(cx.executor());
1811 fs.insert_tree(
1812 path!("/the-root"),
1813 json!({
1814 ".zed": {
1815 "settings.json": settings_json_contents.to_string(),
1816 },
1817 ".relative_path": {
1818 "to": {
1819 "my_fake_lsp.exe": "",
1820 },
1821 },
1822 "src": {
1823 "main.rs": "",
1824 }
1825 }),
1826 )
1827 .await;
1828
1829 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1831 language_registry.add(rust_lang());
1832
1833 let mut my_fake_lsp = language_registry.register_fake_lsp(
1834 "Rust",
1835 FakeLspAdapter {
1836 name: "my_fake_lsp",
1837 ..Default::default()
1838 },
1839 );
1840 let mut lsp_on_path = language_registry.register_fake_lsp(
1841 "Rust",
1842 FakeLspAdapter {
1843 name: "lsp_on_path",
1844 ..Default::default()
1845 },
1846 );
1847
1848 cx.run_until_parked();
1849
1850 // Start the language server by opening a buffer with a compatible file extension.
1851 project
1852 .update(cx, |project, cx| {
1853 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1854 })
1855 .await
1856 .unwrap();
1857
1858 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1859 assert_eq!(
1860 lsp_path.to_string_lossy(),
1861 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1862 );
1863
1864 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1865 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1866}
1867
1868#[gpui::test]
1869async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1870 init_test(cx);
1871
1872 let settings_json_contents = json!({
1873 "languages": {
1874 "Rust": {
1875 "language_servers": ["tilde_lsp"]
1876 }
1877 },
1878 "lsp": {
1879 "tilde_lsp": {
1880 "binary": {
1881 "path": "~/.local/bin/rust-analyzer",
1882 }
1883 }
1884 },
1885 });
1886
1887 let fs = FakeFs::new(cx.executor());
1888 fs.insert_tree(
1889 path!("/root"),
1890 json!({
1891 ".zed": {
1892 "settings.json": settings_json_contents.to_string(),
1893 },
1894 "src": {
1895 "main.rs": "fn main() {}",
1896 }
1897 }),
1898 )
1899 .await;
1900
1901 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1902 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1903 language_registry.add(rust_lang());
1904
1905 let mut tilde_lsp = language_registry.register_fake_lsp(
1906 "Rust",
1907 FakeLspAdapter {
1908 name: "tilde_lsp",
1909 ..Default::default()
1910 },
1911 );
1912 cx.run_until_parked();
1913
1914 project
1915 .update(cx, |project, cx| {
1916 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1917 })
1918 .await
1919 .unwrap();
1920
1921 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1922 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1923 assert_eq!(
1924 lsp_path, expected_path,
1925 "Tilde path should expand to home directory"
1926 );
1927}
1928
1929#[gpui::test]
1930async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1931 init_test(cx);
1932
1933 let fs = FakeFs::new(cx.executor());
1934 fs.insert_tree(
1935 path!("/the-root"),
1936 json!({
1937 ".gitignore": "target\n",
1938 "Cargo.lock": "",
1939 "src": {
1940 "a.rs": "",
1941 "b.rs": "",
1942 },
1943 "target": {
1944 "x": {
1945 "out": {
1946 "x.rs": ""
1947 }
1948 },
1949 "y": {
1950 "out": {
1951 "y.rs": "",
1952 }
1953 },
1954 "z": {
1955 "out": {
1956 "z.rs": ""
1957 }
1958 }
1959 }
1960 }),
1961 )
1962 .await;
1963 fs.insert_tree(
1964 path!("/the-registry"),
1965 json!({
1966 "dep1": {
1967 "src": {
1968 "dep1.rs": "",
1969 }
1970 },
1971 "dep2": {
1972 "src": {
1973 "dep2.rs": "",
1974 }
1975 },
1976 }),
1977 )
1978 .await;
1979 fs.insert_tree(
1980 path!("/the/stdlib"),
1981 json!({
1982 "LICENSE": "",
1983 "src": {
1984 "string.rs": "",
1985 }
1986 }),
1987 )
1988 .await;
1989
1990 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1991 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1992 (project.languages().clone(), project.lsp_store())
1993 });
1994 language_registry.add(rust_lang());
1995 let mut fake_servers = language_registry.register_fake_lsp(
1996 "Rust",
1997 FakeLspAdapter {
1998 name: "the-language-server",
1999 ..Default::default()
2000 },
2001 );
2002
2003 cx.executor().run_until_parked();
2004
2005 // Start the language server by opening a buffer with a compatible file extension.
2006 project
2007 .update(cx, |project, cx| {
2008 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2009 })
2010 .await
2011 .unwrap();
2012
2013 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2014 project.update(cx, |project, cx| {
2015 let worktree = project.worktrees(cx).next().unwrap();
2016 assert_eq!(
2017 worktree
2018 .read(cx)
2019 .snapshot()
2020 .entries(true, 0)
2021 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2022 .collect::<Vec<_>>(),
2023 &[
2024 ("", false),
2025 (".gitignore", false),
2026 ("Cargo.lock", false),
2027 ("src", false),
2028 ("src/a.rs", false),
2029 ("src/b.rs", false),
2030 ("target", true),
2031 ]
2032 );
2033 });
2034
2035 let prev_read_dir_count = fs.read_dir_call_count();
2036
2037 let fake_server = fake_servers.next().await.unwrap();
2038 cx.executor().run_until_parked();
2039 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2040 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2041 id
2042 });
2043
2044 // Simulate jumping to a definition in a dependency outside of the worktree.
2045 let _out_of_worktree_buffer = project
2046 .update(cx, |project, cx| {
2047 project.open_local_buffer_via_lsp(
2048 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2049 server_id,
2050 cx,
2051 )
2052 })
2053 .await
2054 .unwrap();
2055
2056 // Keep track of the FS events reported to the language server.
2057 let file_changes = Arc::new(Mutex::new(Vec::new()));
2058 fake_server
2059 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2060 registrations: vec![lsp::Registration {
2061 id: Default::default(),
2062 method: "workspace/didChangeWatchedFiles".to_string(),
2063 register_options: serde_json::to_value(
2064 lsp::DidChangeWatchedFilesRegistrationOptions {
2065 watchers: vec![
2066 lsp::FileSystemWatcher {
2067 glob_pattern: lsp::GlobPattern::String(
2068 path!("/the-root/Cargo.toml").to_string(),
2069 ),
2070 kind: None,
2071 },
2072 lsp::FileSystemWatcher {
2073 glob_pattern: lsp::GlobPattern::String(
2074 path!("/the-root/src/*.{rs,c}").to_string(),
2075 ),
2076 kind: None,
2077 },
2078 lsp::FileSystemWatcher {
2079 glob_pattern: lsp::GlobPattern::String(
2080 path!("/the-root/target/y/**/*.rs").to_string(),
2081 ),
2082 kind: None,
2083 },
2084 lsp::FileSystemWatcher {
2085 glob_pattern: lsp::GlobPattern::String(
2086 path!("/the/stdlib/src/**/*.rs").to_string(),
2087 ),
2088 kind: None,
2089 },
2090 lsp::FileSystemWatcher {
2091 glob_pattern: lsp::GlobPattern::String(
2092 path!("**/Cargo.lock").to_string(),
2093 ),
2094 kind: None,
2095 },
2096 ],
2097 },
2098 )
2099 .ok(),
2100 }],
2101 })
2102 .await
2103 .into_response()
2104 .unwrap();
2105 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2106 let file_changes = file_changes.clone();
2107 move |params, _| {
2108 let mut file_changes = file_changes.lock();
2109 file_changes.extend(params.changes);
2110 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2111 }
2112 });
2113
2114 cx.executor().run_until_parked();
2115 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2116 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2117
2118 let mut new_watched_paths = fs.watched_paths();
2119 new_watched_paths.retain(|path| {
2120 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2121 });
2122 assert_eq!(
2123 &new_watched_paths,
2124 &[
2125 Path::new(path!("/the-root")),
2126 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2127 Path::new(path!("/the/stdlib/src"))
2128 ]
2129 );
2130
2131 // Now the language server has asked us to watch an ignored directory path,
2132 // so we recursively load it.
2133 project.update(cx, |project, cx| {
2134 let worktree = project.visible_worktrees(cx).next().unwrap();
2135 assert_eq!(
2136 worktree
2137 .read(cx)
2138 .snapshot()
2139 .entries(true, 0)
2140 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2141 .collect::<Vec<_>>(),
2142 &[
2143 ("", false),
2144 (".gitignore", false),
2145 ("Cargo.lock", false),
2146 ("src", false),
2147 ("src/a.rs", false),
2148 ("src/b.rs", false),
2149 ("target", true),
2150 ("target/x", true),
2151 ("target/y", true),
2152 ("target/y/out", true),
2153 ("target/y/out/y.rs", true),
2154 ("target/z", true),
2155 ]
2156 );
2157 });
2158
2159 // Perform some file system mutations, two of which match the watched patterns,
2160 // and one of which does not.
2161 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2162 .await
2163 .unwrap();
2164 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2165 .await
2166 .unwrap();
2167 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2168 .await
2169 .unwrap();
2170 fs.create_file(
2171 path!("/the-root/target/x/out/x2.rs").as_ref(),
2172 Default::default(),
2173 )
2174 .await
2175 .unwrap();
2176 fs.create_file(
2177 path!("/the-root/target/y/out/y2.rs").as_ref(),
2178 Default::default(),
2179 )
2180 .await
2181 .unwrap();
2182 fs.save(
2183 path!("/the-root/Cargo.lock").as_ref(),
2184 &"".into(),
2185 Default::default(),
2186 )
2187 .await
2188 .unwrap();
2189 fs.save(
2190 path!("/the-stdlib/LICENSE").as_ref(),
2191 &"".into(),
2192 Default::default(),
2193 )
2194 .await
2195 .unwrap();
2196 fs.save(
2197 path!("/the/stdlib/src/string.rs").as_ref(),
2198 &"".into(),
2199 Default::default(),
2200 )
2201 .await
2202 .unwrap();
2203
2204 // The language server receives events for the FS mutations that match its watch patterns.
2205 cx.executor().run_until_parked();
2206 assert_eq!(
2207 &*file_changes.lock(),
2208 &[
2209 lsp::FileEvent {
2210 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2211 typ: lsp::FileChangeType::CHANGED,
2212 },
2213 lsp::FileEvent {
2214 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2215 typ: lsp::FileChangeType::DELETED,
2216 },
2217 lsp::FileEvent {
2218 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2219 typ: lsp::FileChangeType::CREATED,
2220 },
2221 lsp::FileEvent {
2222 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2223 typ: lsp::FileChangeType::CREATED,
2224 },
2225 lsp::FileEvent {
2226 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2227 typ: lsp::FileChangeType::CHANGED,
2228 },
2229 ]
2230 );
2231}
2232
2233#[gpui::test]
2234async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2235 init_test(cx);
2236
2237 let fs = FakeFs::new(cx.executor());
2238 fs.insert_tree(
2239 path!("/dir"),
2240 json!({
2241 "a.rs": "let a = 1;",
2242 "b.rs": "let b = 2;"
2243 }),
2244 )
2245 .await;
2246
2247 let project = Project::test(
2248 fs,
2249 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2250 cx,
2251 )
2252 .await;
2253 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2254
2255 let buffer_a = project
2256 .update(cx, |project, cx| {
2257 project.open_local_buffer(path!("/dir/a.rs"), cx)
2258 })
2259 .await
2260 .unwrap();
2261 let buffer_b = project
2262 .update(cx, |project, cx| {
2263 project.open_local_buffer(path!("/dir/b.rs"), cx)
2264 })
2265 .await
2266 .unwrap();
2267
2268 lsp_store.update(cx, |lsp_store, cx| {
2269 lsp_store
2270 .update_diagnostics(
2271 LanguageServerId(0),
2272 lsp::PublishDiagnosticsParams {
2273 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2274 version: None,
2275 diagnostics: vec![lsp::Diagnostic {
2276 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2277 severity: Some(lsp::DiagnosticSeverity::ERROR),
2278 message: "error 1".to_string(),
2279 ..Default::default()
2280 }],
2281 },
2282 None,
2283 DiagnosticSourceKind::Pushed,
2284 &[],
2285 cx,
2286 )
2287 .unwrap();
2288 lsp_store
2289 .update_diagnostics(
2290 LanguageServerId(0),
2291 lsp::PublishDiagnosticsParams {
2292 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2293 version: None,
2294 diagnostics: vec![lsp::Diagnostic {
2295 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2296 severity: Some(DiagnosticSeverity::WARNING),
2297 message: "error 2".to_string(),
2298 ..Default::default()
2299 }],
2300 },
2301 None,
2302 DiagnosticSourceKind::Pushed,
2303 &[],
2304 cx,
2305 )
2306 .unwrap();
2307 });
2308
2309 buffer_a.update(cx, |buffer, _| {
2310 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2311 assert_eq!(
2312 chunks
2313 .iter()
2314 .map(|(s, d)| (s.as_str(), *d))
2315 .collect::<Vec<_>>(),
2316 &[
2317 ("let ", None),
2318 ("a", Some(DiagnosticSeverity::ERROR)),
2319 (" = 1;", None),
2320 ]
2321 );
2322 });
2323 buffer_b.update(cx, |buffer, _| {
2324 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2325 assert_eq!(
2326 chunks
2327 .iter()
2328 .map(|(s, d)| (s.as_str(), *d))
2329 .collect::<Vec<_>>(),
2330 &[
2331 ("let ", None),
2332 ("b", Some(DiagnosticSeverity::WARNING)),
2333 (" = 2;", None),
2334 ]
2335 );
2336 });
2337}
2338
2339#[gpui::test]
2340async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2341 init_test(cx);
2342
2343 let fs = FakeFs::new(cx.executor());
2344 fs.insert_tree(
2345 path!("/root"),
2346 json!({
2347 "dir": {
2348 ".git": {
2349 "HEAD": "ref: refs/heads/main",
2350 },
2351 ".gitignore": "b.rs",
2352 "a.rs": "let a = 1;",
2353 "b.rs": "let b = 2;",
2354 },
2355 "other.rs": "let b = c;"
2356 }),
2357 )
2358 .await;
2359
2360 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2361 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2362 let (worktree, _) = project
2363 .update(cx, |project, cx| {
2364 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2365 })
2366 .await
2367 .unwrap();
2368 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2369
2370 let (worktree, _) = project
2371 .update(cx, |project, cx| {
2372 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2373 })
2374 .await
2375 .unwrap();
2376 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2377
2378 let server_id = LanguageServerId(0);
2379 lsp_store.update(cx, |lsp_store, cx| {
2380 lsp_store
2381 .update_diagnostics(
2382 server_id,
2383 lsp::PublishDiagnosticsParams {
2384 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2385 version: None,
2386 diagnostics: vec![lsp::Diagnostic {
2387 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2388 severity: Some(lsp::DiagnosticSeverity::ERROR),
2389 message: "unused variable 'b'".to_string(),
2390 ..Default::default()
2391 }],
2392 },
2393 None,
2394 DiagnosticSourceKind::Pushed,
2395 &[],
2396 cx,
2397 )
2398 .unwrap();
2399 lsp_store
2400 .update_diagnostics(
2401 server_id,
2402 lsp::PublishDiagnosticsParams {
2403 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2404 version: None,
2405 diagnostics: vec![lsp::Diagnostic {
2406 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2407 severity: Some(lsp::DiagnosticSeverity::ERROR),
2408 message: "unknown variable 'c'".to_string(),
2409 ..Default::default()
2410 }],
2411 },
2412 None,
2413 DiagnosticSourceKind::Pushed,
2414 &[],
2415 cx,
2416 )
2417 .unwrap();
2418 });
2419
2420 let main_ignored_buffer = project
2421 .update(cx, |project, cx| {
2422 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2423 })
2424 .await
2425 .unwrap();
2426 main_ignored_buffer.update(cx, |buffer, _| {
2427 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2428 assert_eq!(
2429 chunks
2430 .iter()
2431 .map(|(s, d)| (s.as_str(), *d))
2432 .collect::<Vec<_>>(),
2433 &[
2434 ("let ", None),
2435 ("b", Some(DiagnosticSeverity::ERROR)),
2436 (" = 2;", None),
2437 ],
2438 "Gigitnored buffers should still get in-buffer diagnostics",
2439 );
2440 });
2441 let other_buffer = project
2442 .update(cx, |project, cx| {
2443 project.open_buffer((other_worktree_id, rel_path("")), cx)
2444 })
2445 .await
2446 .unwrap();
2447 other_buffer.update(cx, |buffer, _| {
2448 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2449 assert_eq!(
2450 chunks
2451 .iter()
2452 .map(|(s, d)| (s.as_str(), *d))
2453 .collect::<Vec<_>>(),
2454 &[
2455 ("let b = ", None),
2456 ("c", Some(DiagnosticSeverity::ERROR)),
2457 (";", None),
2458 ],
2459 "Buffers from hidden projects should still get in-buffer diagnostics"
2460 );
2461 });
2462
2463 project.update(cx, |project, cx| {
2464 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2465 assert_eq!(
2466 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2467 vec![(
2468 ProjectPath {
2469 worktree_id: main_worktree_id,
2470 path: rel_path("b.rs").into(),
2471 },
2472 server_id,
2473 DiagnosticSummary {
2474 error_count: 1,
2475 warning_count: 0,
2476 }
2477 )]
2478 );
2479 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2480 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2481 });
2482}
2483
2484#[gpui::test]
2485async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2486 init_test(cx);
2487
2488 let progress_token = "the-progress-token";
2489
2490 let fs = FakeFs::new(cx.executor());
2491 fs.insert_tree(
2492 path!("/dir"),
2493 json!({
2494 "a.rs": "fn a() { A }",
2495 "b.rs": "const y: i32 = 1",
2496 }),
2497 )
2498 .await;
2499
2500 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2501 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2502
2503 language_registry.add(rust_lang());
2504 let mut fake_servers = language_registry.register_fake_lsp(
2505 "Rust",
2506 FakeLspAdapter {
2507 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2508 disk_based_diagnostics_sources: vec!["disk".into()],
2509 ..Default::default()
2510 },
2511 );
2512
2513 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2514
2515 // Cause worktree to start the fake language server
2516 let _ = project
2517 .update(cx, |project, cx| {
2518 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2519 })
2520 .await
2521 .unwrap();
2522
2523 let mut events = cx.events(&project);
2524
2525 let fake_server = fake_servers.next().await.unwrap();
2526 assert_eq!(
2527 events.next().await.unwrap(),
2528 Event::LanguageServerAdded(
2529 LanguageServerId(0),
2530 fake_server.server.name(),
2531 Some(worktree_id)
2532 ),
2533 );
2534
2535 fake_server
2536 .start_progress(format!("{}/0", progress_token))
2537 .await;
2538 assert_eq!(
2539 events.next().await.unwrap(),
2540 Event::DiskBasedDiagnosticsStarted {
2541 language_server_id: LanguageServerId(0),
2542 }
2543 );
2544
2545 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2546 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2547 version: None,
2548 diagnostics: vec![lsp::Diagnostic {
2549 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2550 severity: Some(lsp::DiagnosticSeverity::ERROR),
2551 message: "undefined variable 'A'".to_string(),
2552 ..Default::default()
2553 }],
2554 });
2555 assert_eq!(
2556 events.next().await.unwrap(),
2557 Event::DiagnosticsUpdated {
2558 language_server_id: LanguageServerId(0),
2559 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2560 }
2561 );
2562
2563 fake_server.end_progress(format!("{}/0", progress_token));
2564 assert_eq!(
2565 events.next().await.unwrap(),
2566 Event::DiskBasedDiagnosticsFinished {
2567 language_server_id: LanguageServerId(0)
2568 }
2569 );
2570
2571 let buffer = project
2572 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2573 .await
2574 .unwrap();
2575
2576 buffer.update(cx, |buffer, _| {
2577 let snapshot = buffer.snapshot();
2578 let diagnostics = snapshot
2579 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2580 .collect::<Vec<_>>();
2581 assert_eq!(
2582 diagnostics,
2583 &[DiagnosticEntryRef {
2584 range: Point::new(0, 9)..Point::new(0, 10),
2585 diagnostic: &Diagnostic {
2586 severity: lsp::DiagnosticSeverity::ERROR,
2587 message: "undefined variable 'A'".to_string(),
2588 group_id: 0,
2589 is_primary: true,
2590 source_kind: DiagnosticSourceKind::Pushed,
2591 ..Diagnostic::default()
2592 }
2593 }]
2594 )
2595 });
2596
2597 // Ensure publishing empty diagnostics twice only results in one update event.
2598 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2599 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2600 version: None,
2601 diagnostics: Default::default(),
2602 });
2603 assert_eq!(
2604 events.next().await.unwrap(),
2605 Event::DiagnosticsUpdated {
2606 language_server_id: LanguageServerId(0),
2607 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2608 }
2609 );
2610
2611 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2612 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2613 version: None,
2614 diagnostics: Default::default(),
2615 });
2616 cx.executor().run_until_parked();
2617 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2618}
2619
2620#[gpui::test]
2621async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2622 init_test(cx);
2623
2624 let progress_token = "the-progress-token";
2625
2626 let fs = FakeFs::new(cx.executor());
2627 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2628
2629 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2630
2631 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2632 language_registry.add(rust_lang());
2633 let mut fake_servers = language_registry.register_fake_lsp(
2634 "Rust",
2635 FakeLspAdapter {
2636 name: "the-language-server",
2637 disk_based_diagnostics_sources: vec!["disk".into()],
2638 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2639 ..FakeLspAdapter::default()
2640 },
2641 );
2642
2643 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2644
2645 let (buffer, _handle) = project
2646 .update(cx, |project, cx| {
2647 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2648 })
2649 .await
2650 .unwrap();
2651 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2652 // Simulate diagnostics starting to update.
2653 let fake_server = fake_servers.next().await.unwrap();
2654 cx.executor().run_until_parked();
2655 fake_server.start_progress(progress_token).await;
2656
2657 // Restart the server before the diagnostics finish updating.
2658 project.update(cx, |project, cx| {
2659 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2660 });
2661 let mut events = cx.events(&project);
2662
2663 // Simulate the newly started server sending more diagnostics.
2664 let fake_server = fake_servers.next().await.unwrap();
2665 cx.executor().run_until_parked();
2666 assert_eq!(
2667 events.next().await.unwrap(),
2668 Event::LanguageServerRemoved(LanguageServerId(0))
2669 );
2670 assert_eq!(
2671 events.next().await.unwrap(),
2672 Event::LanguageServerAdded(
2673 LanguageServerId(1),
2674 fake_server.server.name(),
2675 Some(worktree_id)
2676 )
2677 );
2678 fake_server.start_progress(progress_token).await;
2679 assert_eq!(
2680 events.next().await.unwrap(),
2681 Event::LanguageServerBufferRegistered {
2682 server_id: LanguageServerId(1),
2683 buffer_id,
2684 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2685 name: Some(fake_server.server.name())
2686 }
2687 );
2688 assert_eq!(
2689 events.next().await.unwrap(),
2690 Event::DiskBasedDiagnosticsStarted {
2691 language_server_id: LanguageServerId(1)
2692 }
2693 );
2694 project.update(cx, |project, cx| {
2695 assert_eq!(
2696 project
2697 .language_servers_running_disk_based_diagnostics(cx)
2698 .collect::<Vec<_>>(),
2699 [LanguageServerId(1)]
2700 );
2701 });
2702
2703 // All diagnostics are considered done, despite the old server's diagnostic
2704 // task never completing.
2705 fake_server.end_progress(progress_token);
2706 assert_eq!(
2707 events.next().await.unwrap(),
2708 Event::DiskBasedDiagnosticsFinished {
2709 language_server_id: LanguageServerId(1)
2710 }
2711 );
2712 project.update(cx, |project, cx| {
2713 assert_eq!(
2714 project
2715 .language_servers_running_disk_based_diagnostics(cx)
2716 .collect::<Vec<_>>(),
2717 [] as [language::LanguageServerId; 0]
2718 );
2719 });
2720}
2721
2722#[gpui::test]
2723async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2724 init_test(cx);
2725
2726 let fs = FakeFs::new(cx.executor());
2727 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2728
2729 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2730
2731 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2732 language_registry.add(rust_lang());
2733 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2734
2735 let (buffer, _) = project
2736 .update(cx, |project, cx| {
2737 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2738 })
2739 .await
2740 .unwrap();
2741
2742 // Publish diagnostics
2743 let fake_server = fake_servers.next().await.unwrap();
2744 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2745 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2746 version: None,
2747 diagnostics: vec![lsp::Diagnostic {
2748 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2749 severity: Some(lsp::DiagnosticSeverity::ERROR),
2750 message: "the message".to_string(),
2751 ..Default::default()
2752 }],
2753 });
2754
2755 cx.executor().run_until_parked();
2756 buffer.update(cx, |buffer, _| {
2757 assert_eq!(
2758 buffer
2759 .snapshot()
2760 .diagnostics_in_range::<_, usize>(0..1, false)
2761 .map(|entry| entry.diagnostic.message.clone())
2762 .collect::<Vec<_>>(),
2763 ["the message".to_string()]
2764 );
2765 });
2766 project.update(cx, |project, cx| {
2767 assert_eq!(
2768 project.diagnostic_summary(false, cx),
2769 DiagnosticSummary {
2770 error_count: 1,
2771 warning_count: 0,
2772 }
2773 );
2774 });
2775
2776 project.update(cx, |project, cx| {
2777 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2778 });
2779
2780 // The diagnostics are cleared.
2781 cx.executor().run_until_parked();
2782 buffer.update(cx, |buffer, _| {
2783 assert_eq!(
2784 buffer
2785 .snapshot()
2786 .diagnostics_in_range::<_, usize>(0..1, false)
2787 .map(|entry| entry.diagnostic.message.clone())
2788 .collect::<Vec<_>>(),
2789 Vec::<String>::new(),
2790 );
2791 });
2792 project.update(cx, |project, cx| {
2793 assert_eq!(
2794 project.diagnostic_summary(false, cx),
2795 DiagnosticSummary {
2796 error_count: 0,
2797 warning_count: 0,
2798 }
2799 );
2800 });
2801}
2802
2803#[gpui::test]
2804async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2805 init_test(cx);
2806
2807 let fs = FakeFs::new(cx.executor());
2808 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2809
2810 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2811 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2812
2813 language_registry.add(rust_lang());
2814 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2815
2816 let (buffer, _handle) = project
2817 .update(cx, |project, cx| {
2818 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2819 })
2820 .await
2821 .unwrap();
2822
2823 // Before restarting the server, report diagnostics with an unknown buffer version.
2824 let fake_server = fake_servers.next().await.unwrap();
2825 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2826 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2827 version: Some(10000),
2828 diagnostics: Vec::new(),
2829 });
2830 cx.executor().run_until_parked();
2831 project.update(cx, |project, cx| {
2832 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2833 });
2834
2835 let mut fake_server = fake_servers.next().await.unwrap();
2836 let notification = fake_server
2837 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2838 .await
2839 .text_document;
2840 assert_eq!(notification.version, 0);
2841}
2842
2843#[gpui::test]
2844async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2845 init_test(cx);
2846
2847 let progress_token = "the-progress-token";
2848
2849 let fs = FakeFs::new(cx.executor());
2850 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2851
2852 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2853
2854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2855 language_registry.add(rust_lang());
2856 let mut fake_servers = language_registry.register_fake_lsp(
2857 "Rust",
2858 FakeLspAdapter {
2859 name: "the-language-server",
2860 disk_based_diagnostics_sources: vec!["disk".into()],
2861 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2862 ..Default::default()
2863 },
2864 );
2865
2866 let (buffer, _handle) = project
2867 .update(cx, |project, cx| {
2868 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2869 })
2870 .await
2871 .unwrap();
2872
2873 // Simulate diagnostics starting to update.
2874 let mut fake_server = fake_servers.next().await.unwrap();
2875 fake_server
2876 .start_progress_with(
2877 "another-token",
2878 lsp::WorkDoneProgressBegin {
2879 cancellable: Some(false),
2880 ..Default::default()
2881 },
2882 )
2883 .await;
2884 // Ensure progress notification is fully processed before starting the next one
2885 cx.executor().run_until_parked();
2886
2887 fake_server
2888 .start_progress_with(
2889 progress_token,
2890 lsp::WorkDoneProgressBegin {
2891 cancellable: Some(true),
2892 ..Default::default()
2893 },
2894 )
2895 .await;
2896 // Ensure progress notification is fully processed before cancelling
2897 cx.executor().run_until_parked();
2898
2899 project.update(cx, |project, cx| {
2900 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2901 });
2902 cx.executor().run_until_parked();
2903
2904 let cancel_notification = fake_server
2905 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2906 .await;
2907 assert_eq!(
2908 cancel_notification.token,
2909 NumberOrString::String(progress_token.into())
2910 );
2911}
2912
2913#[gpui::test]
2914async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2915 init_test(cx);
2916
2917 let fs = FakeFs::new(cx.executor());
2918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2919 .await;
2920
2921 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2923
2924 let mut fake_rust_servers = language_registry.register_fake_lsp(
2925 "Rust",
2926 FakeLspAdapter {
2927 name: "rust-lsp",
2928 ..Default::default()
2929 },
2930 );
2931 let mut fake_js_servers = language_registry.register_fake_lsp(
2932 "JavaScript",
2933 FakeLspAdapter {
2934 name: "js-lsp",
2935 ..Default::default()
2936 },
2937 );
2938 language_registry.add(rust_lang());
2939 language_registry.add(js_lang());
2940
2941 let _rs_buffer = project
2942 .update(cx, |project, cx| {
2943 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2944 })
2945 .await
2946 .unwrap();
2947 let _js_buffer = project
2948 .update(cx, |project, cx| {
2949 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2950 })
2951 .await
2952 .unwrap();
2953
2954 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2955 assert_eq!(
2956 fake_rust_server_1
2957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2958 .await
2959 .text_document
2960 .uri
2961 .as_str(),
2962 uri!("file:///dir/a.rs")
2963 );
2964
2965 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2966 assert_eq!(
2967 fake_js_server
2968 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2969 .await
2970 .text_document
2971 .uri
2972 .as_str(),
2973 uri!("file:///dir/b.js")
2974 );
2975
2976 // Disable Rust language server, ensuring only that server gets stopped.
2977 cx.update(|cx| {
2978 SettingsStore::update_global(cx, |settings, cx| {
2979 settings.update_user_settings(cx, |settings| {
2980 settings.languages_mut().insert(
2981 "Rust".into(),
2982 LanguageSettingsContent {
2983 enable_language_server: Some(false),
2984 ..Default::default()
2985 },
2986 );
2987 });
2988 })
2989 });
2990 fake_rust_server_1
2991 .receive_notification::<lsp::notification::Exit>()
2992 .await;
2993
2994 // Enable Rust and disable JavaScript language servers, ensuring that the
2995 // former gets started again and that the latter stops.
2996 cx.update(|cx| {
2997 SettingsStore::update_global(cx, |settings, cx| {
2998 settings.update_user_settings(cx, |settings| {
2999 settings.languages_mut().insert(
3000 "Rust".into(),
3001 LanguageSettingsContent {
3002 enable_language_server: Some(true),
3003 ..Default::default()
3004 },
3005 );
3006 settings.languages_mut().insert(
3007 "JavaScript".into(),
3008 LanguageSettingsContent {
3009 enable_language_server: Some(false),
3010 ..Default::default()
3011 },
3012 );
3013 });
3014 })
3015 });
3016 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3017 assert_eq!(
3018 fake_rust_server_2
3019 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3020 .await
3021 .text_document
3022 .uri
3023 .as_str(),
3024 uri!("file:///dir/a.rs")
3025 );
3026 fake_js_server
3027 .receive_notification::<lsp::notification::Exit>()
3028 .await;
3029}
3030
3031#[gpui::test(iterations = 3)]
3032async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let text = "
3036 fn a() { A }
3037 fn b() { BB }
3038 fn c() { CCC }
3039 "
3040 .unindent();
3041
3042 let fs = FakeFs::new(cx.executor());
3043 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3044
3045 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3046 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3047
3048 language_registry.add(rust_lang());
3049 let mut fake_servers = language_registry.register_fake_lsp(
3050 "Rust",
3051 FakeLspAdapter {
3052 disk_based_diagnostics_sources: vec!["disk".into()],
3053 ..Default::default()
3054 },
3055 );
3056
3057 let buffer = project
3058 .update(cx, |project, cx| {
3059 project.open_local_buffer(path!("/dir/a.rs"), cx)
3060 })
3061 .await
3062 .unwrap();
3063
3064 let _handle = project.update(cx, |project, cx| {
3065 project.register_buffer_with_language_servers(&buffer, cx)
3066 });
3067
3068 let mut fake_server = fake_servers.next().await.unwrap();
3069 let open_notification = fake_server
3070 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3071 .await;
3072
3073 // Edit the buffer, moving the content down
3074 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3075 let change_notification_1 = fake_server
3076 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3077 .await;
3078 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3079
3080 // Report some diagnostics for the initial version of the buffer
3081 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3082 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3083 version: Some(open_notification.text_document.version),
3084 diagnostics: vec![
3085 lsp::Diagnostic {
3086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3087 severity: Some(DiagnosticSeverity::ERROR),
3088 message: "undefined variable 'A'".to_string(),
3089 source: Some("disk".to_string()),
3090 ..Default::default()
3091 },
3092 lsp::Diagnostic {
3093 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3094 severity: Some(DiagnosticSeverity::ERROR),
3095 message: "undefined variable 'BB'".to_string(),
3096 source: Some("disk".to_string()),
3097 ..Default::default()
3098 },
3099 lsp::Diagnostic {
3100 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3101 severity: Some(DiagnosticSeverity::ERROR),
3102 source: Some("disk".to_string()),
3103 message: "undefined variable 'CCC'".to_string(),
3104 ..Default::default()
3105 },
3106 ],
3107 });
3108
3109 // The diagnostics have moved down since they were created.
3110 cx.executor().run_until_parked();
3111 buffer.update(cx, |buffer, _| {
3112 assert_eq!(
3113 buffer
3114 .snapshot()
3115 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3116 .collect::<Vec<_>>(),
3117 &[
3118 DiagnosticEntry {
3119 range: Point::new(3, 9)..Point::new(3, 11),
3120 diagnostic: Diagnostic {
3121 source: Some("disk".into()),
3122 severity: DiagnosticSeverity::ERROR,
3123 message: "undefined variable 'BB'".to_string(),
3124 is_disk_based: true,
3125 group_id: 1,
3126 is_primary: true,
3127 source_kind: DiagnosticSourceKind::Pushed,
3128 ..Diagnostic::default()
3129 },
3130 },
3131 DiagnosticEntry {
3132 range: Point::new(4, 9)..Point::new(4, 12),
3133 diagnostic: Diagnostic {
3134 source: Some("disk".into()),
3135 severity: DiagnosticSeverity::ERROR,
3136 message: "undefined variable 'CCC'".to_string(),
3137 is_disk_based: true,
3138 group_id: 2,
3139 is_primary: true,
3140 source_kind: DiagnosticSourceKind::Pushed,
3141 ..Diagnostic::default()
3142 }
3143 }
3144 ]
3145 );
3146 assert_eq!(
3147 chunks_with_diagnostics(buffer, 0..buffer.len()),
3148 [
3149 ("\n\nfn a() { ".to_string(), None),
3150 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3151 (" }\nfn b() { ".to_string(), None),
3152 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3153 (" }\nfn c() { ".to_string(), None),
3154 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3155 (" }\n".to_string(), None),
3156 ]
3157 );
3158 assert_eq!(
3159 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3160 [
3161 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3162 (" }\nfn c() { ".to_string(), None),
3163 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3164 ]
3165 );
3166 });
3167
3168 // Ensure overlapping diagnostics are highlighted correctly.
3169 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3170 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3171 version: Some(open_notification.text_document.version),
3172 diagnostics: vec![
3173 lsp::Diagnostic {
3174 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3175 severity: Some(DiagnosticSeverity::ERROR),
3176 message: "undefined variable 'A'".to_string(),
3177 source: Some("disk".to_string()),
3178 ..Default::default()
3179 },
3180 lsp::Diagnostic {
3181 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3182 severity: Some(DiagnosticSeverity::WARNING),
3183 message: "unreachable statement".to_string(),
3184 source: Some("disk".to_string()),
3185 ..Default::default()
3186 },
3187 ],
3188 });
3189
3190 cx.executor().run_until_parked();
3191 buffer.update(cx, |buffer, _| {
3192 assert_eq!(
3193 buffer
3194 .snapshot()
3195 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3196 .collect::<Vec<_>>(),
3197 &[
3198 DiagnosticEntry {
3199 range: Point::new(2, 9)..Point::new(2, 12),
3200 diagnostic: Diagnostic {
3201 source: Some("disk".into()),
3202 severity: DiagnosticSeverity::WARNING,
3203 message: "unreachable statement".to_string(),
3204 is_disk_based: true,
3205 group_id: 4,
3206 is_primary: true,
3207 source_kind: DiagnosticSourceKind::Pushed,
3208 ..Diagnostic::default()
3209 }
3210 },
3211 DiagnosticEntry {
3212 range: Point::new(2, 9)..Point::new(2, 10),
3213 diagnostic: Diagnostic {
3214 source: Some("disk".into()),
3215 severity: DiagnosticSeverity::ERROR,
3216 message: "undefined variable 'A'".to_string(),
3217 is_disk_based: true,
3218 group_id: 3,
3219 is_primary: true,
3220 source_kind: DiagnosticSourceKind::Pushed,
3221 ..Diagnostic::default()
3222 },
3223 }
3224 ]
3225 );
3226 assert_eq!(
3227 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3228 [
3229 ("fn a() { ".to_string(), None),
3230 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3231 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3232 ("\n".to_string(), None),
3233 ]
3234 );
3235 assert_eq!(
3236 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3237 [
3238 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3239 ("\n".to_string(), None),
3240 ]
3241 );
3242 });
3243
3244 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3245 // changes since the last save.
3246 buffer.update(cx, |buffer, cx| {
3247 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3248 buffer.edit(
3249 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3250 None,
3251 cx,
3252 );
3253 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3254 });
3255 let change_notification_2 = fake_server
3256 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3257 .await;
3258 assert!(
3259 change_notification_2.text_document.version > change_notification_1.text_document.version
3260 );
3261
3262 // Handle out-of-order diagnostics
3263 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3264 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3265 version: Some(change_notification_2.text_document.version),
3266 diagnostics: vec![
3267 lsp::Diagnostic {
3268 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3269 severity: Some(DiagnosticSeverity::ERROR),
3270 message: "undefined variable 'BB'".to_string(),
3271 source: Some("disk".to_string()),
3272 ..Default::default()
3273 },
3274 lsp::Diagnostic {
3275 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3276 severity: Some(DiagnosticSeverity::WARNING),
3277 message: "undefined variable 'A'".to_string(),
3278 source: Some("disk".to_string()),
3279 ..Default::default()
3280 },
3281 ],
3282 });
3283
3284 cx.executor().run_until_parked();
3285 buffer.update(cx, |buffer, _| {
3286 assert_eq!(
3287 buffer
3288 .snapshot()
3289 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3290 .collect::<Vec<_>>(),
3291 &[
3292 DiagnosticEntry {
3293 range: Point::new(2, 21)..Point::new(2, 22),
3294 diagnostic: Diagnostic {
3295 source: Some("disk".into()),
3296 severity: DiagnosticSeverity::WARNING,
3297 message: "undefined variable 'A'".to_string(),
3298 is_disk_based: true,
3299 group_id: 6,
3300 is_primary: true,
3301 source_kind: DiagnosticSourceKind::Pushed,
3302 ..Diagnostic::default()
3303 }
3304 },
3305 DiagnosticEntry {
3306 range: Point::new(3, 9)..Point::new(3, 14),
3307 diagnostic: Diagnostic {
3308 source: Some("disk".into()),
3309 severity: DiagnosticSeverity::ERROR,
3310 message: "undefined variable 'BB'".to_string(),
3311 is_disk_based: true,
3312 group_id: 5,
3313 is_primary: true,
3314 source_kind: DiagnosticSourceKind::Pushed,
3315 ..Diagnostic::default()
3316 },
3317 }
3318 ]
3319 );
3320 });
3321}
3322
3323#[gpui::test]
3324async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3325 init_test(cx);
3326
3327 let text = concat!(
3328 "let one = ;\n", //
3329 "let two = \n",
3330 "let three = 3;\n",
3331 );
3332
3333 let fs = FakeFs::new(cx.executor());
3334 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3335
3336 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3337 let buffer = project
3338 .update(cx, |project, cx| {
3339 project.open_local_buffer(path!("/dir/a.rs"), cx)
3340 })
3341 .await
3342 .unwrap();
3343
3344 project.update(cx, |project, cx| {
3345 project.lsp_store.update(cx, |lsp_store, cx| {
3346 lsp_store
3347 .update_diagnostic_entries(
3348 LanguageServerId(0),
3349 PathBuf::from(path!("/dir/a.rs")),
3350 None,
3351 None,
3352 vec![
3353 DiagnosticEntry {
3354 range: Unclipped(PointUtf16::new(0, 10))
3355 ..Unclipped(PointUtf16::new(0, 10)),
3356 diagnostic: Diagnostic {
3357 severity: DiagnosticSeverity::ERROR,
3358 message: "syntax error 1".to_string(),
3359 source_kind: DiagnosticSourceKind::Pushed,
3360 ..Diagnostic::default()
3361 },
3362 },
3363 DiagnosticEntry {
3364 range: Unclipped(PointUtf16::new(1, 10))
3365 ..Unclipped(PointUtf16::new(1, 10)),
3366 diagnostic: Diagnostic {
3367 severity: DiagnosticSeverity::ERROR,
3368 message: "syntax error 2".to_string(),
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 },
3372 },
3373 ],
3374 cx,
3375 )
3376 .unwrap();
3377 })
3378 });
3379
3380 // An empty range is extended forward to include the following character.
3381 // At the end of a line, an empty range is extended backward to include
3382 // the preceding character.
3383 buffer.update(cx, |buffer, _| {
3384 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3385 assert_eq!(
3386 chunks
3387 .iter()
3388 .map(|(s, d)| (s.as_str(), *d))
3389 .collect::<Vec<_>>(),
3390 &[
3391 ("let one = ", None),
3392 (";", Some(DiagnosticSeverity::ERROR)),
3393 ("\nlet two =", None),
3394 (" ", Some(DiagnosticSeverity::ERROR)),
3395 ("\nlet three = 3;\n", None)
3396 ]
3397 );
3398 });
3399}
3400
3401#[gpui::test]
3402async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3403 init_test(cx);
3404
3405 let fs = FakeFs::new(cx.executor());
3406 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3407 .await;
3408
3409 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3410 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
3411
3412 lsp_store.update(cx, |lsp_store, cx| {
3413 lsp_store
3414 .update_diagnostic_entries(
3415 LanguageServerId(0),
3416 Path::new(path!("/dir/a.rs")).to_owned(),
3417 None,
3418 None,
3419 vec![DiagnosticEntry {
3420 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3421 diagnostic: Diagnostic {
3422 severity: DiagnosticSeverity::ERROR,
3423 is_primary: true,
3424 message: "syntax error a1".to_string(),
3425 source_kind: DiagnosticSourceKind::Pushed,
3426 ..Diagnostic::default()
3427 },
3428 }],
3429 cx,
3430 )
3431 .unwrap();
3432 lsp_store
3433 .update_diagnostic_entries(
3434 LanguageServerId(1),
3435 Path::new(path!("/dir/a.rs")).to_owned(),
3436 None,
3437 None,
3438 vec![DiagnosticEntry {
3439 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3440 diagnostic: Diagnostic {
3441 severity: DiagnosticSeverity::ERROR,
3442 is_primary: true,
3443 message: "syntax error b1".to_string(),
3444 source_kind: DiagnosticSourceKind::Pushed,
3445 ..Diagnostic::default()
3446 },
3447 }],
3448 cx,
3449 )
3450 .unwrap();
3451
3452 assert_eq!(
3453 lsp_store.diagnostic_summary(false, cx),
3454 DiagnosticSummary {
3455 error_count: 2,
3456 warning_count: 0,
3457 }
3458 );
3459 });
3460}
3461
3462#[gpui::test]
3463async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3464 init_test(cx);
3465
3466 let text = "
3467 fn a() {
3468 f1();
3469 }
3470 fn b() {
3471 f2();
3472 }
3473 fn c() {
3474 f3();
3475 }
3476 "
3477 .unindent();
3478
3479 let fs = FakeFs::new(cx.executor());
3480 fs.insert_tree(
3481 path!("/dir"),
3482 json!({
3483 "a.rs": text.clone(),
3484 }),
3485 )
3486 .await;
3487
3488 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3489 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3490
3491 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3492 language_registry.add(rust_lang());
3493 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3494
3495 let (buffer, _handle) = project
3496 .update(cx, |project, cx| {
3497 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3498 })
3499 .await
3500 .unwrap();
3501
3502 let mut fake_server = fake_servers.next().await.unwrap();
3503 let lsp_document_version = fake_server
3504 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3505 .await
3506 .text_document
3507 .version;
3508
3509 // Simulate editing the buffer after the language server computes some edits.
3510 buffer.update(cx, |buffer, cx| {
3511 buffer.edit(
3512 [(
3513 Point::new(0, 0)..Point::new(0, 0),
3514 "// above first function\n",
3515 )],
3516 None,
3517 cx,
3518 );
3519 buffer.edit(
3520 [(
3521 Point::new(2, 0)..Point::new(2, 0),
3522 " // inside first function\n",
3523 )],
3524 None,
3525 cx,
3526 );
3527 buffer.edit(
3528 [(
3529 Point::new(6, 4)..Point::new(6, 4),
3530 "// inside second function ",
3531 )],
3532 None,
3533 cx,
3534 );
3535
3536 assert_eq!(
3537 buffer.text(),
3538 "
3539 // above first function
3540 fn a() {
3541 // inside first function
3542 f1();
3543 }
3544 fn b() {
3545 // inside second function f2();
3546 }
3547 fn c() {
3548 f3();
3549 }
3550 "
3551 .unindent()
3552 );
3553 });
3554
3555 let edits = lsp_store
3556 .update(cx, |lsp_store, cx| {
3557 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3558 &buffer,
3559 vec![
3560 // replace body of first function
3561 lsp::TextEdit {
3562 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3563 new_text: "
3564 fn a() {
3565 f10();
3566 }
3567 "
3568 .unindent(),
3569 },
3570 // edit inside second function
3571 lsp::TextEdit {
3572 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3573 new_text: "00".into(),
3574 },
3575 // edit inside third function via two distinct edits
3576 lsp::TextEdit {
3577 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3578 new_text: "4000".into(),
3579 },
3580 lsp::TextEdit {
3581 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3582 new_text: "".into(),
3583 },
3584 ],
3585 LanguageServerId(0),
3586 Some(lsp_document_version),
3587 cx,
3588 )
3589 })
3590 .await
3591 .unwrap();
3592
3593 buffer.update(cx, |buffer, cx| {
3594 for (range, new_text) in edits {
3595 buffer.edit([(range, new_text)], None, cx);
3596 }
3597 assert_eq!(
3598 buffer.text(),
3599 "
3600 // above first function
3601 fn a() {
3602 // inside first function
3603 f10();
3604 }
3605 fn b() {
3606 // inside second function f200();
3607 }
3608 fn c() {
3609 f4000();
3610 }
3611 "
3612 .unindent()
3613 );
3614 });
3615}
3616
3617#[gpui::test]
3618async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3619 init_test(cx);
3620
3621 let text = "
3622 use a::b;
3623 use a::c;
3624
3625 fn f() {
3626 b();
3627 c();
3628 }
3629 "
3630 .unindent();
3631
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(
3634 path!("/dir"),
3635 json!({
3636 "a.rs": text.clone(),
3637 }),
3638 )
3639 .await;
3640
3641 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3642 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3643 let buffer = project
3644 .update(cx, |project, cx| {
3645 project.open_local_buffer(path!("/dir/a.rs"), cx)
3646 })
3647 .await
3648 .unwrap();
3649
3650 // Simulate the language server sending us a small edit in the form of a very large diff.
3651 // Rust-analyzer does this when performing a merge-imports code action.
3652 let edits = lsp_store
3653 .update(cx, |lsp_store, cx| {
3654 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3655 &buffer,
3656 [
3657 // Replace the first use statement without editing the semicolon.
3658 lsp::TextEdit {
3659 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3660 new_text: "a::{b, c}".into(),
3661 },
3662 // Reinsert the remainder of the file between the semicolon and the final
3663 // newline of the file.
3664 lsp::TextEdit {
3665 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3666 new_text: "\n\n".into(),
3667 },
3668 lsp::TextEdit {
3669 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3670 new_text: "
3671 fn f() {
3672 b();
3673 c();
3674 }"
3675 .unindent(),
3676 },
3677 // Delete everything after the first newline of the file.
3678 lsp::TextEdit {
3679 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3680 new_text: "".into(),
3681 },
3682 ],
3683 LanguageServerId(0),
3684 None,
3685 cx,
3686 )
3687 })
3688 .await
3689 .unwrap();
3690
3691 buffer.update(cx, |buffer, cx| {
3692 let edits = edits
3693 .into_iter()
3694 .map(|(range, text)| {
3695 (
3696 range.start.to_point(buffer)..range.end.to_point(buffer),
3697 text,
3698 )
3699 })
3700 .collect::<Vec<_>>();
3701
3702 assert_eq!(
3703 edits,
3704 [
3705 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3706 (Point::new(1, 0)..Point::new(2, 0), "".into())
3707 ]
3708 );
3709
3710 for (range, new_text) in edits {
3711 buffer.edit([(range, new_text)], None, cx);
3712 }
3713 assert_eq!(
3714 buffer.text(),
3715 "
3716 use a::{b, c};
3717
3718 fn f() {
3719 b();
3720 c();
3721 }
3722 "
3723 .unindent()
3724 );
3725 });
3726}
3727
3728#[gpui::test]
3729async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3730 cx: &mut gpui::TestAppContext,
3731) {
3732 init_test(cx);
3733
3734 let text = "Path()";
3735
3736 let fs = FakeFs::new(cx.executor());
3737 fs.insert_tree(
3738 path!("/dir"),
3739 json!({
3740 "a.rs": text
3741 }),
3742 )
3743 .await;
3744
3745 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3746 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3747 let buffer = project
3748 .update(cx, |project, cx| {
3749 project.open_local_buffer(path!("/dir/a.rs"), cx)
3750 })
3751 .await
3752 .unwrap();
3753
3754 // Simulate the language server sending us a pair of edits at the same location,
3755 // with an insertion following a replacement (which violates the LSP spec).
3756 let edits = lsp_store
3757 .update(cx, |lsp_store, cx| {
3758 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3759 &buffer,
3760 [
3761 lsp::TextEdit {
3762 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3763 new_text: "Path".into(),
3764 },
3765 lsp::TextEdit {
3766 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3767 new_text: "from path import Path\n\n\n".into(),
3768 },
3769 ],
3770 LanguageServerId(0),
3771 None,
3772 cx,
3773 )
3774 })
3775 .await
3776 .unwrap();
3777
3778 buffer.update(cx, |buffer, cx| {
3779 buffer.edit(edits, None, cx);
3780 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3781 });
3782}
3783
3784#[gpui::test]
3785async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3786 init_test(cx);
3787
3788 let text = "
3789 use a::b;
3790 use a::c;
3791
3792 fn f() {
3793 b();
3794 c();
3795 }
3796 "
3797 .unindent();
3798
3799 let fs = FakeFs::new(cx.executor());
3800 fs.insert_tree(
3801 path!("/dir"),
3802 json!({
3803 "a.rs": text.clone(),
3804 }),
3805 )
3806 .await;
3807
3808 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3809 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3810 let buffer = project
3811 .update(cx, |project, cx| {
3812 project.open_local_buffer(path!("/dir/a.rs"), cx)
3813 })
3814 .await
3815 .unwrap();
3816
3817 // Simulate the language server sending us edits in a non-ordered fashion,
3818 // with ranges sometimes being inverted or pointing to invalid locations.
3819 let edits = lsp_store
3820 .update(cx, |lsp_store, cx| {
3821 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3822 &buffer,
3823 [
3824 lsp::TextEdit {
3825 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3826 new_text: "\n\n".into(),
3827 },
3828 lsp::TextEdit {
3829 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3830 new_text: "a::{b, c}".into(),
3831 },
3832 lsp::TextEdit {
3833 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3834 new_text: "".into(),
3835 },
3836 lsp::TextEdit {
3837 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3838 new_text: "
3839 fn f() {
3840 b();
3841 c();
3842 }"
3843 .unindent(),
3844 },
3845 ],
3846 LanguageServerId(0),
3847 None,
3848 cx,
3849 )
3850 })
3851 .await
3852 .unwrap();
3853
3854 buffer.update(cx, |buffer, cx| {
3855 let edits = edits
3856 .into_iter()
3857 .map(|(range, text)| {
3858 (
3859 range.start.to_point(buffer)..range.end.to_point(buffer),
3860 text,
3861 )
3862 })
3863 .collect::<Vec<_>>();
3864
3865 assert_eq!(
3866 edits,
3867 [
3868 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3869 (Point::new(1, 0)..Point::new(2, 0), "".into())
3870 ]
3871 );
3872
3873 for (range, new_text) in edits {
3874 buffer.edit([(range, new_text)], None, cx);
3875 }
3876 assert_eq!(
3877 buffer.text(),
3878 "
3879 use a::{b, c};
3880
3881 fn f() {
3882 b();
3883 c();
3884 }
3885 "
3886 .unindent()
3887 );
3888 });
3889}
3890
3891fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3892 buffer: &Buffer,
3893 range: Range<T>,
3894) -> Vec<(String, Option<DiagnosticSeverity>)> {
3895 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3896 for chunk in buffer.snapshot().chunks(range, true) {
3897 if chunks
3898 .last()
3899 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3900 {
3901 chunks.last_mut().unwrap().0.push_str(chunk.text);
3902 } else {
3903 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3904 }
3905 }
3906 chunks
3907}
3908
3909#[gpui::test(iterations = 10)]
3910async fn test_definition(cx: &mut gpui::TestAppContext) {
3911 init_test(cx);
3912
3913 let fs = FakeFs::new(cx.executor());
3914 fs.insert_tree(
3915 path!("/dir"),
3916 json!({
3917 "a.rs": "const fn a() { A }",
3918 "b.rs": "const y: i32 = crate::a()",
3919 }),
3920 )
3921 .await;
3922
3923 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3924
3925 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3926 language_registry.add(rust_lang());
3927 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3928
3929 let (buffer, _handle) = project
3930 .update(cx, |project, cx| {
3931 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3932 })
3933 .await
3934 .unwrap();
3935
3936 let fake_server = fake_servers.next().await.unwrap();
3937 cx.executor().run_until_parked();
3938
3939 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3940 let params = params.text_document_position_params;
3941 assert_eq!(
3942 params.text_document.uri.to_file_path().unwrap(),
3943 Path::new(path!("/dir/b.rs")),
3944 );
3945 assert_eq!(params.position, lsp::Position::new(0, 22));
3946
3947 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3948 lsp::Location::new(
3949 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3950 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3951 ),
3952 )))
3953 });
3954 let mut definitions = project
3955 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3956 .await
3957 .unwrap()
3958 .unwrap();
3959
3960 // Assert no new language server started
3961 cx.executor().run_until_parked();
3962 assert!(fake_servers.try_next().is_err());
3963
3964 assert_eq!(definitions.len(), 1);
3965 let definition = definitions.pop().unwrap();
3966 cx.update(|cx| {
3967 let target_buffer = definition.target.buffer.read(cx);
3968 assert_eq!(
3969 target_buffer
3970 .file()
3971 .unwrap()
3972 .as_local()
3973 .unwrap()
3974 .abs_path(cx),
3975 Path::new(path!("/dir/a.rs")),
3976 );
3977 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3978 assert_eq!(
3979 list_worktrees(&project, cx),
3980 [
3981 (path!("/dir/a.rs").as_ref(), false),
3982 (path!("/dir/b.rs").as_ref(), true)
3983 ],
3984 );
3985
3986 drop(definition);
3987 });
3988 cx.update(|cx| {
3989 assert_eq!(
3990 list_worktrees(&project, cx),
3991 [(path!("/dir/b.rs").as_ref(), true)]
3992 );
3993 });
3994
3995 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3996 project
3997 .read(cx)
3998 .worktrees(cx)
3999 .map(|worktree| {
4000 let worktree = worktree.read(cx);
4001 (
4002 worktree.as_local().unwrap().abs_path().as_ref(),
4003 worktree.is_visible(),
4004 )
4005 })
4006 .collect::<Vec<_>>()
4007 }
4008}
4009
4010#[gpui::test]
4011async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4012 init_test(cx);
4013
4014 let fs = FakeFs::new(cx.executor());
4015 fs.insert_tree(
4016 path!("/dir"),
4017 json!({
4018 "a.ts": "",
4019 }),
4020 )
4021 .await;
4022
4023 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4024
4025 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4026 language_registry.add(typescript_lang());
4027 let mut fake_language_servers = language_registry.register_fake_lsp(
4028 "TypeScript",
4029 FakeLspAdapter {
4030 capabilities: lsp::ServerCapabilities {
4031 completion_provider: Some(lsp::CompletionOptions {
4032 trigger_characters: Some(vec![".".to_string()]),
4033 ..Default::default()
4034 }),
4035 ..Default::default()
4036 },
4037 ..Default::default()
4038 },
4039 );
4040
4041 let (buffer, _handle) = project
4042 .update(cx, |p, cx| {
4043 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4044 })
4045 .await
4046 .unwrap();
4047
4048 let fake_server = fake_language_servers.next().await.unwrap();
4049 cx.executor().run_until_parked();
4050
4051 // When text_edit exists, it takes precedence over insert_text and label
4052 let text = "let a = obj.fqn";
4053 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4054 let completions = project.update(cx, |project, cx| {
4055 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4056 });
4057
4058 fake_server
4059 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4060 Ok(Some(lsp::CompletionResponse::Array(vec![
4061 lsp::CompletionItem {
4062 label: "labelText".into(),
4063 insert_text: Some("insertText".into()),
4064 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4065 range: lsp::Range::new(
4066 lsp::Position::new(0, text.len() as u32 - 3),
4067 lsp::Position::new(0, text.len() as u32),
4068 ),
4069 new_text: "textEditText".into(),
4070 })),
4071 ..Default::default()
4072 },
4073 ])))
4074 })
4075 .next()
4076 .await;
4077
4078 let completions = completions
4079 .await
4080 .unwrap()
4081 .into_iter()
4082 .flat_map(|response| response.completions)
4083 .collect::<Vec<_>>();
4084 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4085
4086 assert_eq!(completions.len(), 1);
4087 assert_eq!(completions[0].new_text, "textEditText");
4088 assert_eq!(
4089 completions[0].replace_range.to_offset(&snapshot),
4090 text.len() - 3..text.len()
4091 );
4092}
4093
4094#[gpui::test]
4095async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4096 init_test(cx);
4097
4098 let fs = FakeFs::new(cx.executor());
4099 fs.insert_tree(
4100 path!("/dir"),
4101 json!({
4102 "a.ts": "",
4103 }),
4104 )
4105 .await;
4106
4107 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4108
4109 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4110 language_registry.add(typescript_lang());
4111 let mut fake_language_servers = language_registry.register_fake_lsp(
4112 "TypeScript",
4113 FakeLspAdapter {
4114 capabilities: lsp::ServerCapabilities {
4115 completion_provider: Some(lsp::CompletionOptions {
4116 trigger_characters: Some(vec![".".to_string()]),
4117 ..Default::default()
4118 }),
4119 ..Default::default()
4120 },
4121 ..Default::default()
4122 },
4123 );
4124
4125 let (buffer, _handle) = project
4126 .update(cx, |p, cx| {
4127 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4128 })
4129 .await
4130 .unwrap();
4131
4132 let fake_server = fake_language_servers.next().await.unwrap();
4133 cx.executor().run_until_parked();
4134 let text = "let a = obj.fqn";
4135
4136 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4137 {
4138 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4139 let completions = project.update(cx, |project, cx| {
4140 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4141 });
4142
4143 fake_server
4144 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4145 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4146 is_incomplete: false,
4147 item_defaults: Some(lsp::CompletionListItemDefaults {
4148 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4149 lsp::Range::new(
4150 lsp::Position::new(0, text.len() as u32 - 3),
4151 lsp::Position::new(0, text.len() as u32),
4152 ),
4153 )),
4154 ..Default::default()
4155 }),
4156 items: vec![lsp::CompletionItem {
4157 label: "labelText".into(),
4158 text_edit_text: Some("textEditText".into()),
4159 text_edit: None,
4160 ..Default::default()
4161 }],
4162 })))
4163 })
4164 .next()
4165 .await;
4166
4167 let completions = completions
4168 .await
4169 .unwrap()
4170 .into_iter()
4171 .flat_map(|response| response.completions)
4172 .collect::<Vec<_>>();
4173 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4174
4175 assert_eq!(completions.len(), 1);
4176 assert_eq!(completions[0].new_text, "textEditText");
4177 assert_eq!(
4178 completions[0].replace_range.to_offset(&snapshot),
4179 text.len() - 3..text.len()
4180 );
4181 }
4182
4183 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4184 {
4185 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4186 let completions = project.update(cx, |project, cx| {
4187 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4188 });
4189
4190 fake_server
4191 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4192 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4193 is_incomplete: false,
4194 item_defaults: Some(lsp::CompletionListItemDefaults {
4195 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4196 lsp::Range::new(
4197 lsp::Position::new(0, text.len() as u32 - 3),
4198 lsp::Position::new(0, text.len() as u32),
4199 ),
4200 )),
4201 ..Default::default()
4202 }),
4203 items: vec![lsp::CompletionItem {
4204 label: "labelText".into(),
4205 text_edit_text: None,
4206 insert_text: Some("irrelevant".into()),
4207 text_edit: None,
4208 ..Default::default()
4209 }],
4210 })))
4211 })
4212 .next()
4213 .await;
4214
4215 let completions = completions
4216 .await
4217 .unwrap()
4218 .into_iter()
4219 .flat_map(|response| response.completions)
4220 .collect::<Vec<_>>();
4221 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4222
4223 assert_eq!(completions.len(), 1);
4224 assert_eq!(completions[0].new_text, "labelText");
4225 assert_eq!(
4226 completions[0].replace_range.to_offset(&snapshot),
4227 text.len() - 3..text.len()
4228 );
4229 }
4230}
4231
4232#[gpui::test]
4233async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4234 init_test(cx);
4235
4236 let fs = FakeFs::new(cx.executor());
4237 fs.insert_tree(
4238 path!("/dir"),
4239 json!({
4240 "a.ts": "",
4241 }),
4242 )
4243 .await;
4244
4245 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4246
4247 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4248 language_registry.add(typescript_lang());
4249 let mut fake_language_servers = language_registry.register_fake_lsp(
4250 "TypeScript",
4251 FakeLspAdapter {
4252 capabilities: lsp::ServerCapabilities {
4253 completion_provider: Some(lsp::CompletionOptions {
4254 trigger_characters: Some(vec![":".to_string()]),
4255 ..Default::default()
4256 }),
4257 ..Default::default()
4258 },
4259 ..Default::default()
4260 },
4261 );
4262
4263 let (buffer, _handle) = project
4264 .update(cx, |p, cx| {
4265 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4266 })
4267 .await
4268 .unwrap();
4269
4270 let fake_server = fake_language_servers.next().await.unwrap();
4271 cx.executor().run_until_parked();
4272
4273 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4274 let text = "let a = b.fqn";
4275 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4276 let completions = project.update(cx, |project, cx| {
4277 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4278 });
4279
4280 fake_server
4281 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4282 Ok(Some(lsp::CompletionResponse::Array(vec![
4283 lsp::CompletionItem {
4284 label: "fullyQualifiedName?".into(),
4285 insert_text: Some("fullyQualifiedName".into()),
4286 ..Default::default()
4287 },
4288 ])))
4289 })
4290 .next()
4291 .await;
4292 let completions = completions
4293 .await
4294 .unwrap()
4295 .into_iter()
4296 .flat_map(|response| response.completions)
4297 .collect::<Vec<_>>();
4298 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4299 assert_eq!(completions.len(), 1);
4300 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4301 assert_eq!(
4302 completions[0].replace_range.to_offset(&snapshot),
4303 text.len() - 3..text.len()
4304 );
4305
4306 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4307 let text = "let a = \"atoms/cmp\"";
4308 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4309 let completions = project.update(cx, |project, cx| {
4310 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4311 });
4312
4313 fake_server
4314 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4315 Ok(Some(lsp::CompletionResponse::Array(vec![
4316 lsp::CompletionItem {
4317 label: "component".into(),
4318 ..Default::default()
4319 },
4320 ])))
4321 })
4322 .next()
4323 .await;
4324 let completions = completions
4325 .await
4326 .unwrap()
4327 .into_iter()
4328 .flat_map(|response| response.completions)
4329 .collect::<Vec<_>>();
4330 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4331 assert_eq!(completions.len(), 1);
4332 assert_eq!(completions[0].new_text, "component");
4333 assert_eq!(
4334 completions[0].replace_range.to_offset(&snapshot),
4335 text.len() - 4..text.len() - 1
4336 );
4337}
4338
4339#[gpui::test]
4340async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4341 init_test(cx);
4342
4343 let fs = FakeFs::new(cx.executor());
4344 fs.insert_tree(
4345 path!("/dir"),
4346 json!({
4347 "a.ts": "",
4348 }),
4349 )
4350 .await;
4351
4352 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4353
4354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4355 language_registry.add(typescript_lang());
4356 let mut fake_language_servers = language_registry.register_fake_lsp(
4357 "TypeScript",
4358 FakeLspAdapter {
4359 capabilities: lsp::ServerCapabilities {
4360 completion_provider: Some(lsp::CompletionOptions {
4361 trigger_characters: Some(vec![":".to_string()]),
4362 ..Default::default()
4363 }),
4364 ..Default::default()
4365 },
4366 ..Default::default()
4367 },
4368 );
4369
4370 let (buffer, _handle) = project
4371 .update(cx, |p, cx| {
4372 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4373 })
4374 .await
4375 .unwrap();
4376
4377 let fake_server = fake_language_servers.next().await.unwrap();
4378 cx.executor().run_until_parked();
4379
4380 let text = "let a = b.fqn";
4381 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4382 let completions = project.update(cx, |project, cx| {
4383 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4384 });
4385
4386 fake_server
4387 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4388 Ok(Some(lsp::CompletionResponse::Array(vec![
4389 lsp::CompletionItem {
4390 label: "fullyQualifiedName?".into(),
4391 insert_text: Some("fully\rQualified\r\nName".into()),
4392 ..Default::default()
4393 },
4394 ])))
4395 })
4396 .next()
4397 .await;
4398 let completions = completions
4399 .await
4400 .unwrap()
4401 .into_iter()
4402 .flat_map(|response| response.completions)
4403 .collect::<Vec<_>>();
4404 assert_eq!(completions.len(), 1);
4405 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4406}
4407
4408#[gpui::test(iterations = 10)]
4409async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4410 init_test(cx);
4411
4412 let fs = FakeFs::new(cx.executor());
4413 fs.insert_tree(
4414 path!("/dir"),
4415 json!({
4416 "a.ts": "a",
4417 }),
4418 )
4419 .await;
4420
4421 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4422
4423 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4424 language_registry.add(typescript_lang());
4425 let mut fake_language_servers = language_registry.register_fake_lsp(
4426 "TypeScript",
4427 FakeLspAdapter {
4428 capabilities: lsp::ServerCapabilities {
4429 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4430 lsp::CodeActionOptions {
4431 resolve_provider: Some(true),
4432 ..lsp::CodeActionOptions::default()
4433 },
4434 )),
4435 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4436 commands: vec!["_the/command".to_string()],
4437 ..lsp::ExecuteCommandOptions::default()
4438 }),
4439 ..lsp::ServerCapabilities::default()
4440 },
4441 ..FakeLspAdapter::default()
4442 },
4443 );
4444
4445 let (buffer, _handle) = project
4446 .update(cx, |p, cx| {
4447 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4448 })
4449 .await
4450 .unwrap();
4451
4452 let fake_server = fake_language_servers.next().await.unwrap();
4453 cx.executor().run_until_parked();
4454
4455 // Language server returns code actions that contain commands, and not edits.
4456 let actions = project.update(cx, |project, cx| {
4457 project.code_actions(&buffer, 0..0, None, cx)
4458 });
4459 fake_server
4460 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4461 Ok(Some(vec![
4462 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4463 title: "The code action".into(),
4464 data: Some(serde_json::json!({
4465 "command": "_the/command",
4466 })),
4467 ..lsp::CodeAction::default()
4468 }),
4469 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4470 title: "two".into(),
4471 ..lsp::CodeAction::default()
4472 }),
4473 ]))
4474 })
4475 .next()
4476 .await;
4477
4478 let action = actions.await.unwrap().unwrap()[0].clone();
4479 let apply = project.update(cx, |project, cx| {
4480 project.apply_code_action(buffer.clone(), action, true, cx)
4481 });
4482
4483 // Resolving the code action does not populate its edits. In absence of
4484 // edits, we must execute the given command.
4485 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4486 |mut action, _| async move {
4487 if action.data.is_some() {
4488 action.command = Some(lsp::Command {
4489 title: "The command".into(),
4490 command: "_the/command".into(),
4491 arguments: Some(vec![json!("the-argument")]),
4492 });
4493 }
4494 Ok(action)
4495 },
4496 );
4497
4498 // While executing the command, the language server sends the editor
4499 // a `workspaceEdit` request.
4500 fake_server
4501 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4502 let fake = fake_server.clone();
4503 move |params, _| {
4504 assert_eq!(params.command, "_the/command");
4505 let fake = fake.clone();
4506 async move {
4507 fake.server
4508 .request::<lsp::request::ApplyWorkspaceEdit>(
4509 lsp::ApplyWorkspaceEditParams {
4510 label: None,
4511 edit: lsp::WorkspaceEdit {
4512 changes: Some(
4513 [(
4514 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4515 vec![lsp::TextEdit {
4516 range: lsp::Range::new(
4517 lsp::Position::new(0, 0),
4518 lsp::Position::new(0, 0),
4519 ),
4520 new_text: "X".into(),
4521 }],
4522 )]
4523 .into_iter()
4524 .collect(),
4525 ),
4526 ..Default::default()
4527 },
4528 },
4529 )
4530 .await
4531 .into_response()
4532 .unwrap();
4533 Ok(Some(json!(null)))
4534 }
4535 }
4536 })
4537 .next()
4538 .await;
4539
4540 // Applying the code action returns a project transaction containing the edits
4541 // sent by the language server in its `workspaceEdit` request.
4542 let transaction = apply.await.unwrap();
4543 assert!(transaction.0.contains_key(&buffer));
4544 buffer.update(cx, |buffer, cx| {
4545 assert_eq!(buffer.text(), "Xa");
4546 buffer.undo(cx);
4547 assert_eq!(buffer.text(), "a");
4548 });
4549}
4550
4551#[gpui::test]
4552async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4553 init_test(cx);
4554 let fs = FakeFs::new(cx.background_executor.clone());
4555 let expected_contents = "content";
4556 fs.as_fake()
4557 .insert_tree(
4558 "/root",
4559 json!({
4560 "test.txt": expected_contents
4561 }),
4562 )
4563 .await;
4564
4565 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4566
4567 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4568 let worktree = project.worktrees(cx).next().unwrap();
4569 let entry_id = worktree
4570 .read(cx)
4571 .entry_for_path(rel_path("test.txt"))
4572 .unwrap()
4573 .id;
4574 (worktree, entry_id)
4575 });
4576 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4577 let _result = project
4578 .update(cx, |project, cx| {
4579 project.rename_entry(
4580 entry_id,
4581 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4582 cx,
4583 )
4584 })
4585 .await
4586 .unwrap();
4587 worktree.read_with(cx, |worktree, _| {
4588 assert!(
4589 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4590 "Old file should have been removed"
4591 );
4592 assert!(
4593 worktree
4594 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4595 .is_some(),
4596 "Whole directory hierarchy and the new file should have been created"
4597 );
4598 });
4599 assert_eq!(
4600 worktree
4601 .update(cx, |worktree, cx| {
4602 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4603 })
4604 .await
4605 .unwrap()
4606 .text,
4607 expected_contents,
4608 "Moved file's contents should be preserved"
4609 );
4610
4611 let entry_id = worktree.read_with(cx, |worktree, _| {
4612 worktree
4613 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4614 .unwrap()
4615 .id
4616 });
4617
4618 let _result = project
4619 .update(cx, |project, cx| {
4620 project.rename_entry(
4621 entry_id,
4622 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4623 cx,
4624 )
4625 })
4626 .await
4627 .unwrap();
4628 worktree.read_with(cx, |worktree, _| {
4629 assert!(
4630 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4631 "First file should not reappear"
4632 );
4633 assert!(
4634 worktree
4635 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4636 .is_none(),
4637 "Old file should have been removed"
4638 );
4639 assert!(
4640 worktree
4641 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4642 .is_some(),
4643 "No error should have occurred after moving into existing directory"
4644 );
4645 });
4646 assert_eq!(
4647 worktree
4648 .update(cx, |worktree, cx| {
4649 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4650 })
4651 .await
4652 .unwrap()
4653 .text,
4654 expected_contents,
4655 "Moved file's contents should be preserved"
4656 );
4657}
4658
4659#[gpui::test(iterations = 10)]
4660async fn test_save_file(cx: &mut gpui::TestAppContext) {
4661 init_test(cx);
4662
4663 let fs = FakeFs::new(cx.executor());
4664 fs.insert_tree(
4665 path!("/dir"),
4666 json!({
4667 "file1": "the old contents",
4668 }),
4669 )
4670 .await;
4671
4672 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4673 let buffer = project
4674 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4675 .await
4676 .unwrap();
4677 buffer.update(cx, |buffer, cx| {
4678 assert_eq!(buffer.text(), "the old contents");
4679 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4680 });
4681
4682 project
4683 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4684 .await
4685 .unwrap();
4686
4687 let new_text = fs
4688 .load(Path::new(path!("/dir/file1")))
4689 .await
4690 .unwrap()
4691 .replace("\r\n", "\n");
4692 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4693}
4694
4695#[gpui::test(iterations = 10)]
4696async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4697 // Issue: #24349
4698 init_test(cx);
4699
4700 let fs = FakeFs::new(cx.executor());
4701 fs.insert_tree(path!("/dir"), json!({})).await;
4702
4703 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4704 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4705
4706 language_registry.add(rust_lang());
4707 let mut fake_rust_servers = language_registry.register_fake_lsp(
4708 "Rust",
4709 FakeLspAdapter {
4710 name: "the-rust-language-server",
4711 capabilities: lsp::ServerCapabilities {
4712 completion_provider: Some(lsp::CompletionOptions {
4713 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4714 ..Default::default()
4715 }),
4716 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4717 lsp::TextDocumentSyncOptions {
4718 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4719 ..Default::default()
4720 },
4721 )),
4722 ..Default::default()
4723 },
4724 ..Default::default()
4725 },
4726 );
4727
4728 let buffer = project
4729 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4730 .unwrap()
4731 .await;
4732 project.update(cx, |this, cx| {
4733 this.register_buffer_with_language_servers(&buffer, cx);
4734 buffer.update(cx, |buffer, cx| {
4735 assert!(!this.has_language_servers_for(buffer, cx));
4736 })
4737 });
4738
4739 project
4740 .update(cx, |this, cx| {
4741 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4742 this.save_buffer_as(
4743 buffer.clone(),
4744 ProjectPath {
4745 worktree_id,
4746 path: rel_path("file.rs").into(),
4747 },
4748 cx,
4749 )
4750 })
4751 .await
4752 .unwrap();
4753 // A server is started up, and it is notified about Rust files.
4754 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4755 assert_eq!(
4756 fake_rust_server
4757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4758 .await
4759 .text_document,
4760 lsp::TextDocumentItem {
4761 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4762 version: 0,
4763 text: "".to_string(),
4764 language_id: "rust".to_string(),
4765 }
4766 );
4767
4768 project.update(cx, |this, cx| {
4769 buffer.update(cx, |buffer, cx| {
4770 assert!(this.has_language_servers_for(buffer, cx));
4771 })
4772 });
4773}
4774
4775#[gpui::test(iterations = 30)]
4776async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4777 init_test(cx);
4778
4779 let fs = FakeFs::new(cx.executor());
4780 fs.insert_tree(
4781 path!("/dir"),
4782 json!({
4783 "file1": "the original contents",
4784 }),
4785 )
4786 .await;
4787
4788 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4789 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4790 let buffer = project
4791 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4792 .await
4793 .unwrap();
4794
4795 // Change the buffer's file on disk, and then wait for the file change
4796 // to be detected by the worktree, so that the buffer starts reloading.
4797 fs.save(
4798 path!("/dir/file1").as_ref(),
4799 &"the first contents".into(),
4800 Default::default(),
4801 )
4802 .await
4803 .unwrap();
4804 worktree.next_event(cx).await;
4805
4806 // Change the buffer's file again. Depending on the random seed, the
4807 // previous file change may still be in progress.
4808 fs.save(
4809 path!("/dir/file1").as_ref(),
4810 &"the second contents".into(),
4811 Default::default(),
4812 )
4813 .await
4814 .unwrap();
4815 worktree.next_event(cx).await;
4816
4817 cx.executor().run_until_parked();
4818 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4819 buffer.read_with(cx, |buffer, _| {
4820 assert_eq!(buffer.text(), on_disk_text);
4821 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4822 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4823 });
4824}
4825
4826#[gpui::test(iterations = 30)]
4827async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4828 init_test(cx);
4829
4830 let fs = FakeFs::new(cx.executor());
4831 fs.insert_tree(
4832 path!("/dir"),
4833 json!({
4834 "file1": "the original contents",
4835 }),
4836 )
4837 .await;
4838
4839 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4840 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4841 let buffer = project
4842 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4843 .await
4844 .unwrap();
4845
4846 // Change the buffer's file on disk, and then wait for the file change
4847 // to be detected by the worktree, so that the buffer starts reloading.
4848 fs.save(
4849 path!("/dir/file1").as_ref(),
4850 &"the first contents".into(),
4851 Default::default(),
4852 )
4853 .await
4854 .unwrap();
4855 worktree.next_event(cx).await;
4856
4857 cx.executor()
4858 .spawn(cx.executor().simulate_random_delay())
4859 .await;
4860
4861 // Perform a noop edit, causing the buffer's version to increase.
4862 buffer.update(cx, |buffer, cx| {
4863 buffer.edit([(0..0, " ")], None, cx);
4864 buffer.undo(cx);
4865 });
4866
4867 cx.executor().run_until_parked();
4868 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4869 buffer.read_with(cx, |buffer, _| {
4870 let buffer_text = buffer.text();
4871 if buffer_text == on_disk_text {
4872 assert!(
4873 !buffer.is_dirty() && !buffer.has_conflict(),
4874 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4875 );
4876 }
4877 // If the file change occurred while the buffer was processing the first
4878 // change, the buffer will be in a conflicting state.
4879 else {
4880 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4881 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4882 }
4883 });
4884}
4885
4886#[gpui::test]
4887async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4888 init_test(cx);
4889
4890 let fs = FakeFs::new(cx.executor());
4891 fs.insert_tree(
4892 path!("/dir"),
4893 json!({
4894 "file1": "the old contents",
4895 }),
4896 )
4897 .await;
4898
4899 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4900 let buffer = project
4901 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4902 .await
4903 .unwrap();
4904 buffer.update(cx, |buffer, cx| {
4905 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4906 });
4907
4908 project
4909 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4910 .await
4911 .unwrap();
4912
4913 let new_text = fs
4914 .load(Path::new(path!("/dir/file1")))
4915 .await
4916 .unwrap()
4917 .replace("\r\n", "\n");
4918 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4919}
4920
4921#[gpui::test]
4922async fn test_save_as(cx: &mut gpui::TestAppContext) {
4923 init_test(cx);
4924
4925 let fs = FakeFs::new(cx.executor());
4926 fs.insert_tree("/dir", json!({})).await;
4927
4928 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4929
4930 let languages = project.update(cx, |project, _| project.languages().clone());
4931 languages.add(rust_lang());
4932
4933 let buffer = project.update(cx, |project, cx| {
4934 project.create_local_buffer("", None, false, cx)
4935 });
4936 buffer.update(cx, |buffer, cx| {
4937 buffer.edit([(0..0, "abc")], None, cx);
4938 assert!(buffer.is_dirty());
4939 assert!(!buffer.has_conflict());
4940 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4941 });
4942 project
4943 .update(cx, |project, cx| {
4944 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4945 let path = ProjectPath {
4946 worktree_id,
4947 path: rel_path("file1.rs").into(),
4948 };
4949 project.save_buffer_as(buffer.clone(), path, cx)
4950 })
4951 .await
4952 .unwrap();
4953 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4954
4955 cx.executor().run_until_parked();
4956 buffer.update(cx, |buffer, cx| {
4957 assert_eq!(
4958 buffer.file().unwrap().full_path(cx),
4959 Path::new("dir/file1.rs")
4960 );
4961 assert!(!buffer.is_dirty());
4962 assert!(!buffer.has_conflict());
4963 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4964 });
4965
4966 let opened_buffer = project
4967 .update(cx, |project, cx| {
4968 project.open_local_buffer("/dir/file1.rs", cx)
4969 })
4970 .await
4971 .unwrap();
4972 assert_eq!(opened_buffer, buffer);
4973}
4974
4975#[gpui::test]
4976async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4977 init_test(cx);
4978
4979 let fs = FakeFs::new(cx.executor());
4980 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4981
4982 fs.insert_tree(
4983 path!("/dir"),
4984 json!({
4985 "data_a.txt": "data about a"
4986 }),
4987 )
4988 .await;
4989
4990 let buffer = project
4991 .update(cx, |project, cx| {
4992 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4993 })
4994 .await
4995 .unwrap();
4996
4997 buffer.update(cx, |buffer, cx| {
4998 buffer.edit([(11..12, "b")], None, cx);
4999 });
5000
5001 // Save buffer's contents as a new file and confirm that the buffer's now
5002 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5003 // file associated with the buffer has now been updated to `data_b.txt`
5004 project
5005 .update(cx, |project, cx| {
5006 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5007 let new_path = ProjectPath {
5008 worktree_id,
5009 path: rel_path("data_b.txt").into(),
5010 };
5011
5012 project.save_buffer_as(buffer.clone(), new_path, cx)
5013 })
5014 .await
5015 .unwrap();
5016
5017 buffer.update(cx, |buffer, cx| {
5018 assert_eq!(
5019 buffer.file().unwrap().full_path(cx),
5020 Path::new("dir/data_b.txt")
5021 )
5022 });
5023
5024 // Open the original `data_a.txt` file, confirming that its contents are
5025 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5026 let original_buffer = project
5027 .update(cx, |project, cx| {
5028 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5029 })
5030 .await
5031 .unwrap();
5032
5033 original_buffer.update(cx, |buffer, cx| {
5034 assert_eq!(buffer.text(), "data about a");
5035 assert_eq!(
5036 buffer.file().unwrap().full_path(cx),
5037 Path::new("dir/data_a.txt")
5038 )
5039 });
5040}
5041
5042#[gpui::test(retries = 5)]
5043async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5044 use worktree::WorktreeModelHandle as _;
5045
5046 init_test(cx);
5047 cx.executor().allow_parking();
5048
5049 let dir = TempTree::new(json!({
5050 "a": {
5051 "file1": "",
5052 "file2": "",
5053 "file3": "",
5054 },
5055 "b": {
5056 "c": {
5057 "file4": "",
5058 "file5": "",
5059 }
5060 }
5061 }));
5062
5063 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5064
5065 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5066 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5067 async move { buffer.await.unwrap() }
5068 };
5069 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5070 project.update(cx, |project, cx| {
5071 let tree = project.worktrees(cx).next().unwrap();
5072 tree.read(cx)
5073 .entry_for_path(rel_path(path))
5074 .unwrap_or_else(|| panic!("no entry for path {}", path))
5075 .id
5076 })
5077 };
5078
5079 let buffer2 = buffer_for_path("a/file2", cx).await;
5080 let buffer3 = buffer_for_path("a/file3", cx).await;
5081 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5082 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5083
5084 let file2_id = id_for_path("a/file2", cx);
5085 let file3_id = id_for_path("a/file3", cx);
5086 let file4_id = id_for_path("b/c/file4", cx);
5087
5088 // Create a remote copy of this worktree.
5089 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5090 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5091
5092 let updates = Arc::new(Mutex::new(Vec::new()));
5093 tree.update(cx, |tree, cx| {
5094 let updates = updates.clone();
5095 tree.observe_updates(0, cx, move |update| {
5096 updates.lock().push(update);
5097 async { true }
5098 });
5099 });
5100
5101 let remote = cx.update(|cx| {
5102 Worktree::remote(
5103 0,
5104 ReplicaId::REMOTE_SERVER,
5105 metadata,
5106 project.read(cx).client().into(),
5107 project.read(cx).path_style(cx),
5108 cx,
5109 )
5110 });
5111
5112 cx.executor().run_until_parked();
5113
5114 cx.update(|cx| {
5115 assert!(!buffer2.read(cx).is_dirty());
5116 assert!(!buffer3.read(cx).is_dirty());
5117 assert!(!buffer4.read(cx).is_dirty());
5118 assert!(!buffer5.read(cx).is_dirty());
5119 });
5120
5121 // Rename and delete files and directories.
5122 tree.flush_fs_events(cx).await;
5123 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5124 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5125 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5126 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5127 tree.flush_fs_events(cx).await;
5128
5129 cx.update(|app| {
5130 assert_eq!(
5131 tree.read(app).paths().collect::<Vec<_>>(),
5132 vec![
5133 rel_path("a"),
5134 rel_path("a/file1"),
5135 rel_path("a/file2.new"),
5136 rel_path("b"),
5137 rel_path("d"),
5138 rel_path("d/file3"),
5139 rel_path("d/file4"),
5140 ]
5141 );
5142 });
5143
5144 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5145 assert_eq!(id_for_path("d/file3", cx), file3_id);
5146 assert_eq!(id_for_path("d/file4", cx), file4_id);
5147
5148 cx.update(|cx| {
5149 assert_eq!(
5150 buffer2.read(cx).file().unwrap().path().as_ref(),
5151 rel_path("a/file2.new")
5152 );
5153 assert_eq!(
5154 buffer3.read(cx).file().unwrap().path().as_ref(),
5155 rel_path("d/file3")
5156 );
5157 assert_eq!(
5158 buffer4.read(cx).file().unwrap().path().as_ref(),
5159 rel_path("d/file4")
5160 );
5161 assert_eq!(
5162 buffer5.read(cx).file().unwrap().path().as_ref(),
5163 rel_path("b/c/file5")
5164 );
5165
5166 assert_matches!(
5167 buffer2.read(cx).file().unwrap().disk_state(),
5168 DiskState::Present { .. }
5169 );
5170 assert_matches!(
5171 buffer3.read(cx).file().unwrap().disk_state(),
5172 DiskState::Present { .. }
5173 );
5174 assert_matches!(
5175 buffer4.read(cx).file().unwrap().disk_state(),
5176 DiskState::Present { .. }
5177 );
5178 assert_eq!(
5179 buffer5.read(cx).file().unwrap().disk_state(),
5180 DiskState::Deleted
5181 );
5182 });
5183
5184 // Update the remote worktree. Check that it becomes consistent with the
5185 // local worktree.
5186 cx.executor().run_until_parked();
5187
5188 remote.update(cx, |remote, _| {
5189 for update in updates.lock().drain(..) {
5190 remote.as_remote_mut().unwrap().update_from_remote(update);
5191 }
5192 });
5193 cx.executor().run_until_parked();
5194 remote.update(cx, |remote, _| {
5195 assert_eq!(
5196 remote.paths().collect::<Vec<_>>(),
5197 vec![
5198 rel_path("a"),
5199 rel_path("a/file1"),
5200 rel_path("a/file2.new"),
5201 rel_path("b"),
5202 rel_path("d"),
5203 rel_path("d/file3"),
5204 rel_path("d/file4"),
5205 ]
5206 );
5207 });
5208}
5209
5210#[gpui::test(iterations = 10)]
5211async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5212 init_test(cx);
5213
5214 let fs = FakeFs::new(cx.executor());
5215 fs.insert_tree(
5216 path!("/dir"),
5217 json!({
5218 "a": {
5219 "file1": "",
5220 }
5221 }),
5222 )
5223 .await;
5224
5225 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5226 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5227 let tree_id = tree.update(cx, |tree, _| tree.id());
5228
5229 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5230 project.update(cx, |project, cx| {
5231 let tree = project.worktrees(cx).next().unwrap();
5232 tree.read(cx)
5233 .entry_for_path(rel_path(path))
5234 .unwrap_or_else(|| panic!("no entry for path {}", path))
5235 .id
5236 })
5237 };
5238
5239 let dir_id = id_for_path("a", cx);
5240 let file_id = id_for_path("a/file1", cx);
5241 let buffer = project
5242 .update(cx, |p, cx| {
5243 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5244 })
5245 .await
5246 .unwrap();
5247 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5248
5249 project
5250 .update(cx, |project, cx| {
5251 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5252 })
5253 .unwrap()
5254 .await
5255 .into_included()
5256 .unwrap();
5257 cx.executor().run_until_parked();
5258
5259 assert_eq!(id_for_path("b", cx), dir_id);
5260 assert_eq!(id_for_path("b/file1", cx), file_id);
5261 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5262}
5263
5264#[gpui::test]
5265async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5266 init_test(cx);
5267
5268 let fs = FakeFs::new(cx.executor());
5269 fs.insert_tree(
5270 "/dir",
5271 json!({
5272 "a.txt": "a-contents",
5273 "b.txt": "b-contents",
5274 }),
5275 )
5276 .await;
5277
5278 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5279
5280 // Spawn multiple tasks to open paths, repeating some paths.
5281 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5282 (
5283 p.open_local_buffer("/dir/a.txt", cx),
5284 p.open_local_buffer("/dir/b.txt", cx),
5285 p.open_local_buffer("/dir/a.txt", cx),
5286 )
5287 });
5288
5289 let buffer_a_1 = buffer_a_1.await.unwrap();
5290 let buffer_a_2 = buffer_a_2.await.unwrap();
5291 let buffer_b = buffer_b.await.unwrap();
5292 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5293 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5294
5295 // There is only one buffer per path.
5296 let buffer_a_id = buffer_a_1.entity_id();
5297 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5298
5299 // Open the same path again while it is still open.
5300 drop(buffer_a_1);
5301 let buffer_a_3 = project
5302 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5303 .await
5304 .unwrap();
5305
5306 // There's still only one buffer per path.
5307 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5308}
5309
5310#[gpui::test]
5311async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5312 init_test(cx);
5313
5314 let fs = FakeFs::new(cx.executor());
5315 fs.insert_tree(
5316 path!("/dir"),
5317 json!({
5318 "file1": "abc",
5319 "file2": "def",
5320 "file3": "ghi",
5321 }),
5322 )
5323 .await;
5324
5325 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5326
5327 let buffer1 = project
5328 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5329 .await
5330 .unwrap();
5331 let events = Arc::new(Mutex::new(Vec::new()));
5332
5333 // initially, the buffer isn't dirty.
5334 buffer1.update(cx, |buffer, cx| {
5335 cx.subscribe(&buffer1, {
5336 let events = events.clone();
5337 move |_, _, event, _| match event {
5338 BufferEvent::Operation { .. } => {}
5339 _ => events.lock().push(event.clone()),
5340 }
5341 })
5342 .detach();
5343
5344 assert!(!buffer.is_dirty());
5345 assert!(events.lock().is_empty());
5346
5347 buffer.edit([(1..2, "")], None, cx);
5348 });
5349
5350 // after the first edit, the buffer is dirty, and emits a dirtied event.
5351 buffer1.update(cx, |buffer, cx| {
5352 assert!(buffer.text() == "ac");
5353 assert!(buffer.is_dirty());
5354 assert_eq!(
5355 *events.lock(),
5356 &[
5357 language::BufferEvent::Edited,
5358 language::BufferEvent::DirtyChanged
5359 ]
5360 );
5361 events.lock().clear();
5362 buffer.did_save(
5363 buffer.version(),
5364 buffer.file().unwrap().disk_state().mtime(),
5365 cx,
5366 );
5367 });
5368
5369 // after saving, the buffer is not dirty, and emits a saved event.
5370 buffer1.update(cx, |buffer, cx| {
5371 assert!(!buffer.is_dirty());
5372 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5373 events.lock().clear();
5374
5375 buffer.edit([(1..1, "B")], None, cx);
5376 buffer.edit([(2..2, "D")], None, cx);
5377 });
5378
5379 // after editing again, the buffer is dirty, and emits another dirty event.
5380 buffer1.update(cx, |buffer, cx| {
5381 assert!(buffer.text() == "aBDc");
5382 assert!(buffer.is_dirty());
5383 assert_eq!(
5384 *events.lock(),
5385 &[
5386 language::BufferEvent::Edited,
5387 language::BufferEvent::DirtyChanged,
5388 language::BufferEvent::Edited,
5389 ],
5390 );
5391 events.lock().clear();
5392
5393 // After restoring the buffer to its previously-saved state,
5394 // the buffer is not considered dirty anymore.
5395 buffer.edit([(1..3, "")], None, cx);
5396 assert!(buffer.text() == "ac");
5397 assert!(!buffer.is_dirty());
5398 });
5399
5400 assert_eq!(
5401 *events.lock(),
5402 &[
5403 language::BufferEvent::Edited,
5404 language::BufferEvent::DirtyChanged
5405 ]
5406 );
5407
5408 // When a file is deleted, it is not considered dirty.
5409 let events = Arc::new(Mutex::new(Vec::new()));
5410 let buffer2 = project
5411 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5412 .await
5413 .unwrap();
5414 buffer2.update(cx, |_, cx| {
5415 cx.subscribe(&buffer2, {
5416 let events = events.clone();
5417 move |_, _, event, _| match event {
5418 BufferEvent::Operation { .. } => {}
5419 _ => events.lock().push(event.clone()),
5420 }
5421 })
5422 .detach();
5423 });
5424
5425 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5426 .await
5427 .unwrap();
5428 cx.executor().run_until_parked();
5429 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5430 assert_eq!(
5431 mem::take(&mut *events.lock()),
5432 &[language::BufferEvent::FileHandleChanged]
5433 );
5434
5435 // Buffer becomes dirty when edited.
5436 buffer2.update(cx, |buffer, cx| {
5437 buffer.edit([(2..3, "")], None, cx);
5438 assert_eq!(buffer.is_dirty(), true);
5439 });
5440 assert_eq!(
5441 mem::take(&mut *events.lock()),
5442 &[
5443 language::BufferEvent::Edited,
5444 language::BufferEvent::DirtyChanged
5445 ]
5446 );
5447
5448 // Buffer becomes clean again when all of its content is removed, because
5449 // the file was deleted.
5450 buffer2.update(cx, |buffer, cx| {
5451 buffer.edit([(0..2, "")], None, cx);
5452 assert_eq!(buffer.is_empty(), true);
5453 assert_eq!(buffer.is_dirty(), false);
5454 });
5455 assert_eq!(
5456 *events.lock(),
5457 &[
5458 language::BufferEvent::Edited,
5459 language::BufferEvent::DirtyChanged
5460 ]
5461 );
5462
5463 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5464 let events = Arc::new(Mutex::new(Vec::new()));
5465 let buffer3 = project
5466 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5467 .await
5468 .unwrap();
5469 buffer3.update(cx, |_, cx| {
5470 cx.subscribe(&buffer3, {
5471 let events = events.clone();
5472 move |_, _, event, _| match event {
5473 BufferEvent::Operation { .. } => {}
5474 _ => events.lock().push(event.clone()),
5475 }
5476 })
5477 .detach();
5478 });
5479
5480 buffer3.update(cx, |buffer, cx| {
5481 buffer.edit([(0..0, "x")], None, cx);
5482 });
5483 events.lock().clear();
5484 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5485 .await
5486 .unwrap();
5487 cx.executor().run_until_parked();
5488 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5489 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5490}
5491
5492#[gpui::test]
5493async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5494 init_test(cx);
5495
5496 let (initial_contents, initial_offsets) =
5497 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5498 let fs = FakeFs::new(cx.executor());
5499 fs.insert_tree(
5500 path!("/dir"),
5501 json!({
5502 "the-file": initial_contents,
5503 }),
5504 )
5505 .await;
5506 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5507 let buffer = project
5508 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5509 .await
5510 .unwrap();
5511
5512 let anchors = initial_offsets
5513 .iter()
5514 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5515 .collect::<Vec<_>>();
5516
5517 // Change the file on disk, adding two new lines of text, and removing
5518 // one line.
5519 buffer.update(cx, |buffer, _| {
5520 assert!(!buffer.is_dirty());
5521 assert!(!buffer.has_conflict());
5522 });
5523
5524 let (new_contents, new_offsets) =
5525 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5526 fs.save(
5527 path!("/dir/the-file").as_ref(),
5528 &new_contents.as_str().into(),
5529 LineEnding::Unix,
5530 )
5531 .await
5532 .unwrap();
5533
5534 // Because the buffer was not modified, it is reloaded from disk. Its
5535 // contents are edited according to the diff between the old and new
5536 // file contents.
5537 cx.executor().run_until_parked();
5538 buffer.update(cx, |buffer, _| {
5539 assert_eq!(buffer.text(), new_contents);
5540 assert!(!buffer.is_dirty());
5541 assert!(!buffer.has_conflict());
5542
5543 let anchor_offsets = anchors
5544 .iter()
5545 .map(|anchor| anchor.to_offset(&*buffer))
5546 .collect::<Vec<_>>();
5547 assert_eq!(anchor_offsets, new_offsets);
5548 });
5549
5550 // Modify the buffer
5551 buffer.update(cx, |buffer, cx| {
5552 buffer.edit([(0..0, " ")], None, cx);
5553 assert!(buffer.is_dirty());
5554 assert!(!buffer.has_conflict());
5555 });
5556
5557 // Change the file on disk again, adding blank lines to the beginning.
5558 fs.save(
5559 path!("/dir/the-file").as_ref(),
5560 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5561 LineEnding::Unix,
5562 )
5563 .await
5564 .unwrap();
5565
5566 // Because the buffer is modified, it doesn't reload from disk, but is
5567 // marked as having a conflict.
5568 cx.executor().run_until_parked();
5569 buffer.update(cx, |buffer, _| {
5570 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5571 assert!(buffer.has_conflict());
5572 });
5573}
5574
5575#[gpui::test]
5576async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5577 init_test(cx);
5578
5579 let fs = FakeFs::new(cx.executor());
5580 fs.insert_tree(
5581 path!("/dir"),
5582 json!({
5583 "file1": "a\nb\nc\n",
5584 "file2": "one\r\ntwo\r\nthree\r\n",
5585 }),
5586 )
5587 .await;
5588
5589 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5590 let buffer1 = project
5591 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5592 .await
5593 .unwrap();
5594 let buffer2 = project
5595 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5596 .await
5597 .unwrap();
5598
5599 buffer1.update(cx, |buffer, _| {
5600 assert_eq!(buffer.text(), "a\nb\nc\n");
5601 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5602 });
5603 buffer2.update(cx, |buffer, _| {
5604 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5605 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5606 });
5607
5608 // Change a file's line endings on disk from unix to windows. The buffer's
5609 // state updates correctly.
5610 fs.save(
5611 path!("/dir/file1").as_ref(),
5612 &"aaa\nb\nc\n".into(),
5613 LineEnding::Windows,
5614 )
5615 .await
5616 .unwrap();
5617 cx.executor().run_until_parked();
5618 buffer1.update(cx, |buffer, _| {
5619 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5620 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5621 });
5622
5623 // Save a file with windows line endings. The file is written correctly.
5624 buffer2.update(cx, |buffer, cx| {
5625 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5626 });
5627 project
5628 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5629 .await
5630 .unwrap();
5631 assert_eq!(
5632 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5633 "one\r\ntwo\r\nthree\r\nfour\r\n",
5634 );
5635}
5636
5637#[gpui::test]
5638async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5639 init_test(cx);
5640
5641 let fs = FakeFs::new(cx.executor());
5642 fs.insert_tree(
5643 path!("/dir"),
5644 json!({
5645 "a.rs": "
5646 fn foo(mut v: Vec<usize>) {
5647 for x in &v {
5648 v.push(1);
5649 }
5650 }
5651 "
5652 .unindent(),
5653 }),
5654 )
5655 .await;
5656
5657 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5658 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5659 let buffer = project
5660 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5661 .await
5662 .unwrap();
5663
5664 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5665 let message = lsp::PublishDiagnosticsParams {
5666 uri: buffer_uri.clone(),
5667 diagnostics: vec![
5668 lsp::Diagnostic {
5669 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5670 severity: Some(DiagnosticSeverity::WARNING),
5671 message: "error 1".to_string(),
5672 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5673 location: lsp::Location {
5674 uri: buffer_uri.clone(),
5675 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5676 },
5677 message: "error 1 hint 1".to_string(),
5678 }]),
5679 ..Default::default()
5680 },
5681 lsp::Diagnostic {
5682 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5683 severity: Some(DiagnosticSeverity::HINT),
5684 message: "error 1 hint 1".to_string(),
5685 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5686 location: lsp::Location {
5687 uri: buffer_uri.clone(),
5688 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5689 },
5690 message: "original diagnostic".to_string(),
5691 }]),
5692 ..Default::default()
5693 },
5694 lsp::Diagnostic {
5695 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5696 severity: Some(DiagnosticSeverity::ERROR),
5697 message: "error 2".to_string(),
5698 related_information: Some(vec![
5699 lsp::DiagnosticRelatedInformation {
5700 location: lsp::Location {
5701 uri: buffer_uri.clone(),
5702 range: lsp::Range::new(
5703 lsp::Position::new(1, 13),
5704 lsp::Position::new(1, 15),
5705 ),
5706 },
5707 message: "error 2 hint 1".to_string(),
5708 },
5709 lsp::DiagnosticRelatedInformation {
5710 location: lsp::Location {
5711 uri: buffer_uri.clone(),
5712 range: lsp::Range::new(
5713 lsp::Position::new(1, 13),
5714 lsp::Position::new(1, 15),
5715 ),
5716 },
5717 message: "error 2 hint 2".to_string(),
5718 },
5719 ]),
5720 ..Default::default()
5721 },
5722 lsp::Diagnostic {
5723 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5724 severity: Some(DiagnosticSeverity::HINT),
5725 message: "error 2 hint 1".to_string(),
5726 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5727 location: lsp::Location {
5728 uri: buffer_uri.clone(),
5729 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5730 },
5731 message: "original diagnostic".to_string(),
5732 }]),
5733 ..Default::default()
5734 },
5735 lsp::Diagnostic {
5736 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5737 severity: Some(DiagnosticSeverity::HINT),
5738 message: "error 2 hint 2".to_string(),
5739 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5740 location: lsp::Location {
5741 uri: buffer_uri,
5742 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5743 },
5744 message: "original diagnostic".to_string(),
5745 }]),
5746 ..Default::default()
5747 },
5748 ],
5749 version: None,
5750 };
5751
5752 lsp_store
5753 .update(cx, |lsp_store, cx| {
5754 lsp_store.update_diagnostics(
5755 LanguageServerId(0),
5756 message,
5757 None,
5758 DiagnosticSourceKind::Pushed,
5759 &[],
5760 cx,
5761 )
5762 })
5763 .unwrap();
5764 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5765
5766 assert_eq!(
5767 buffer
5768 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5769 .collect::<Vec<_>>(),
5770 &[
5771 DiagnosticEntry {
5772 range: Point::new(1, 8)..Point::new(1, 9),
5773 diagnostic: Diagnostic {
5774 severity: DiagnosticSeverity::WARNING,
5775 message: "error 1".to_string(),
5776 group_id: 1,
5777 is_primary: true,
5778 source_kind: DiagnosticSourceKind::Pushed,
5779 ..Diagnostic::default()
5780 }
5781 },
5782 DiagnosticEntry {
5783 range: Point::new(1, 8)..Point::new(1, 9),
5784 diagnostic: Diagnostic {
5785 severity: DiagnosticSeverity::HINT,
5786 message: "error 1 hint 1".to_string(),
5787 group_id: 1,
5788 is_primary: false,
5789 source_kind: DiagnosticSourceKind::Pushed,
5790 ..Diagnostic::default()
5791 }
5792 },
5793 DiagnosticEntry {
5794 range: Point::new(1, 13)..Point::new(1, 15),
5795 diagnostic: Diagnostic {
5796 severity: DiagnosticSeverity::HINT,
5797 message: "error 2 hint 1".to_string(),
5798 group_id: 0,
5799 is_primary: false,
5800 source_kind: DiagnosticSourceKind::Pushed,
5801 ..Diagnostic::default()
5802 }
5803 },
5804 DiagnosticEntry {
5805 range: Point::new(1, 13)..Point::new(1, 15),
5806 diagnostic: Diagnostic {
5807 severity: DiagnosticSeverity::HINT,
5808 message: "error 2 hint 2".to_string(),
5809 group_id: 0,
5810 is_primary: false,
5811 source_kind: DiagnosticSourceKind::Pushed,
5812 ..Diagnostic::default()
5813 }
5814 },
5815 DiagnosticEntry {
5816 range: Point::new(2, 8)..Point::new(2, 17),
5817 diagnostic: Diagnostic {
5818 severity: DiagnosticSeverity::ERROR,
5819 message: "error 2".to_string(),
5820 group_id: 0,
5821 is_primary: true,
5822 source_kind: DiagnosticSourceKind::Pushed,
5823 ..Diagnostic::default()
5824 }
5825 }
5826 ]
5827 );
5828
5829 assert_eq!(
5830 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5831 &[
5832 DiagnosticEntry {
5833 range: Point::new(1, 13)..Point::new(1, 15),
5834 diagnostic: Diagnostic {
5835 severity: DiagnosticSeverity::HINT,
5836 message: "error 2 hint 1".to_string(),
5837 group_id: 0,
5838 is_primary: false,
5839 source_kind: DiagnosticSourceKind::Pushed,
5840 ..Diagnostic::default()
5841 }
5842 },
5843 DiagnosticEntry {
5844 range: Point::new(1, 13)..Point::new(1, 15),
5845 diagnostic: Diagnostic {
5846 severity: DiagnosticSeverity::HINT,
5847 message: "error 2 hint 2".to_string(),
5848 group_id: 0,
5849 is_primary: false,
5850 source_kind: DiagnosticSourceKind::Pushed,
5851 ..Diagnostic::default()
5852 }
5853 },
5854 DiagnosticEntry {
5855 range: Point::new(2, 8)..Point::new(2, 17),
5856 diagnostic: Diagnostic {
5857 severity: DiagnosticSeverity::ERROR,
5858 message: "error 2".to_string(),
5859 group_id: 0,
5860 is_primary: true,
5861 source_kind: DiagnosticSourceKind::Pushed,
5862 ..Diagnostic::default()
5863 }
5864 }
5865 ]
5866 );
5867
5868 assert_eq!(
5869 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5870 &[
5871 DiagnosticEntry {
5872 range: Point::new(1, 8)..Point::new(1, 9),
5873 diagnostic: Diagnostic {
5874 severity: DiagnosticSeverity::WARNING,
5875 message: "error 1".to_string(),
5876 group_id: 1,
5877 is_primary: true,
5878 source_kind: DiagnosticSourceKind::Pushed,
5879 ..Diagnostic::default()
5880 }
5881 },
5882 DiagnosticEntry {
5883 range: Point::new(1, 8)..Point::new(1, 9),
5884 diagnostic: Diagnostic {
5885 severity: DiagnosticSeverity::HINT,
5886 message: "error 1 hint 1".to_string(),
5887 group_id: 1,
5888 is_primary: false,
5889 source_kind: DiagnosticSourceKind::Pushed,
5890 ..Diagnostic::default()
5891 }
5892 },
5893 ]
5894 );
5895}
5896
5897#[gpui::test]
5898async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5899 init_test(cx);
5900
5901 let fs = FakeFs::new(cx.executor());
5902 fs.insert_tree(
5903 path!("/dir"),
5904 json!({
5905 "one.rs": "const ONE: usize = 1;",
5906 "two": {
5907 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5908 }
5909
5910 }),
5911 )
5912 .await;
5913 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5914
5915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5916 language_registry.add(rust_lang());
5917 let watched_paths = lsp::FileOperationRegistrationOptions {
5918 filters: vec![
5919 FileOperationFilter {
5920 scheme: Some("file".to_owned()),
5921 pattern: lsp::FileOperationPattern {
5922 glob: "**/*.rs".to_owned(),
5923 matches: Some(lsp::FileOperationPatternKind::File),
5924 options: None,
5925 },
5926 },
5927 FileOperationFilter {
5928 scheme: Some("file".to_owned()),
5929 pattern: lsp::FileOperationPattern {
5930 glob: "**/**".to_owned(),
5931 matches: Some(lsp::FileOperationPatternKind::Folder),
5932 options: None,
5933 },
5934 },
5935 ],
5936 };
5937 let mut fake_servers = language_registry.register_fake_lsp(
5938 "Rust",
5939 FakeLspAdapter {
5940 capabilities: lsp::ServerCapabilities {
5941 workspace: Some(lsp::WorkspaceServerCapabilities {
5942 workspace_folders: None,
5943 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5944 did_rename: Some(watched_paths.clone()),
5945 will_rename: Some(watched_paths),
5946 ..Default::default()
5947 }),
5948 }),
5949 ..Default::default()
5950 },
5951 ..Default::default()
5952 },
5953 );
5954
5955 let _ = project
5956 .update(cx, |project, cx| {
5957 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5958 })
5959 .await
5960 .unwrap();
5961
5962 let fake_server = fake_servers.next().await.unwrap();
5963 cx.executor().run_until_parked();
5964 let response = project.update(cx, |project, cx| {
5965 let worktree = project.worktrees(cx).next().unwrap();
5966 let entry = worktree
5967 .read(cx)
5968 .entry_for_path(rel_path("one.rs"))
5969 .unwrap();
5970 project.rename_entry(
5971 entry.id,
5972 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5973 cx,
5974 )
5975 });
5976 let expected_edit = lsp::WorkspaceEdit {
5977 changes: None,
5978 document_changes: Some(DocumentChanges::Edits({
5979 vec![TextDocumentEdit {
5980 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5981 range: lsp::Range {
5982 start: lsp::Position {
5983 line: 0,
5984 character: 1,
5985 },
5986 end: lsp::Position {
5987 line: 0,
5988 character: 3,
5989 },
5990 },
5991 new_text: "This is not a drill".to_owned(),
5992 })],
5993 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5994 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5995 version: Some(1337),
5996 },
5997 }]
5998 })),
5999 change_annotations: None,
6000 };
6001 let resolved_workspace_edit = Arc::new(OnceLock::new());
6002 fake_server
6003 .set_request_handler::<WillRenameFiles, _, _>({
6004 let resolved_workspace_edit = resolved_workspace_edit.clone();
6005 let expected_edit = expected_edit.clone();
6006 move |params, _| {
6007 let resolved_workspace_edit = resolved_workspace_edit.clone();
6008 let expected_edit = expected_edit.clone();
6009 async move {
6010 assert_eq!(params.files.len(), 1);
6011 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6012 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6013 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6014 Ok(Some(expected_edit))
6015 }
6016 }
6017 })
6018 .next()
6019 .await
6020 .unwrap();
6021 let _ = response.await.unwrap();
6022 fake_server
6023 .handle_notification::<DidRenameFiles, _>(|params, _| {
6024 assert_eq!(params.files.len(), 1);
6025 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6026 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6027 })
6028 .next()
6029 .await
6030 .unwrap();
6031 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6032}
6033
6034#[gpui::test]
6035async fn test_rename(cx: &mut gpui::TestAppContext) {
6036 // hi
6037 init_test(cx);
6038
6039 let fs = FakeFs::new(cx.executor());
6040 fs.insert_tree(
6041 path!("/dir"),
6042 json!({
6043 "one.rs": "const ONE: usize = 1;",
6044 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6045 }),
6046 )
6047 .await;
6048
6049 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6050
6051 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6052 language_registry.add(rust_lang());
6053 let mut fake_servers = language_registry.register_fake_lsp(
6054 "Rust",
6055 FakeLspAdapter {
6056 capabilities: lsp::ServerCapabilities {
6057 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6058 prepare_provider: Some(true),
6059 work_done_progress_options: Default::default(),
6060 })),
6061 ..Default::default()
6062 },
6063 ..Default::default()
6064 },
6065 );
6066
6067 let (buffer, _handle) = project
6068 .update(cx, |project, cx| {
6069 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6070 })
6071 .await
6072 .unwrap();
6073
6074 let fake_server = fake_servers.next().await.unwrap();
6075 cx.executor().run_until_parked();
6076
6077 let response = project.update(cx, |project, cx| {
6078 project.prepare_rename(buffer.clone(), 7, cx)
6079 });
6080 fake_server
6081 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6082 assert_eq!(
6083 params.text_document.uri.as_str(),
6084 uri!("file:///dir/one.rs")
6085 );
6086 assert_eq!(params.position, lsp::Position::new(0, 7));
6087 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6088 lsp::Position::new(0, 6),
6089 lsp::Position::new(0, 9),
6090 ))))
6091 })
6092 .next()
6093 .await
6094 .unwrap();
6095 let response = response.await.unwrap();
6096 let PrepareRenameResponse::Success(range) = response else {
6097 panic!("{:?}", response);
6098 };
6099 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6100 assert_eq!(range, 6..9);
6101
6102 let response = project.update(cx, |project, cx| {
6103 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6104 });
6105 fake_server
6106 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6107 assert_eq!(
6108 params.text_document_position.text_document.uri.as_str(),
6109 uri!("file:///dir/one.rs")
6110 );
6111 assert_eq!(
6112 params.text_document_position.position,
6113 lsp::Position::new(0, 7)
6114 );
6115 assert_eq!(params.new_name, "THREE");
6116 Ok(Some(lsp::WorkspaceEdit {
6117 changes: Some(
6118 [
6119 (
6120 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6121 vec![lsp::TextEdit::new(
6122 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6123 "THREE".to_string(),
6124 )],
6125 ),
6126 (
6127 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6128 vec![
6129 lsp::TextEdit::new(
6130 lsp::Range::new(
6131 lsp::Position::new(0, 24),
6132 lsp::Position::new(0, 27),
6133 ),
6134 "THREE".to_string(),
6135 ),
6136 lsp::TextEdit::new(
6137 lsp::Range::new(
6138 lsp::Position::new(0, 35),
6139 lsp::Position::new(0, 38),
6140 ),
6141 "THREE".to_string(),
6142 ),
6143 ],
6144 ),
6145 ]
6146 .into_iter()
6147 .collect(),
6148 ),
6149 ..Default::default()
6150 }))
6151 })
6152 .next()
6153 .await
6154 .unwrap();
6155 let mut transaction = response.await.unwrap().0;
6156 assert_eq!(transaction.len(), 2);
6157 assert_eq!(
6158 transaction
6159 .remove_entry(&buffer)
6160 .unwrap()
6161 .0
6162 .update(cx, |buffer, _| buffer.text()),
6163 "const THREE: usize = 1;"
6164 );
6165 assert_eq!(
6166 transaction
6167 .into_keys()
6168 .next()
6169 .unwrap()
6170 .update(cx, |buffer, _| buffer.text()),
6171 "const TWO: usize = one::THREE + one::THREE;"
6172 );
6173}
6174
6175#[gpui::test]
6176async fn test_search(cx: &mut gpui::TestAppContext) {
6177 init_test(cx);
6178
6179 let fs = FakeFs::new(cx.executor());
6180 fs.insert_tree(
6181 path!("/dir"),
6182 json!({
6183 "one.rs": "const ONE: usize = 1;",
6184 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6185 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6186 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6187 }),
6188 )
6189 .await;
6190 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6191 assert_eq!(
6192 search(
6193 &project,
6194 SearchQuery::text(
6195 "TWO",
6196 false,
6197 true,
6198 false,
6199 Default::default(),
6200 Default::default(),
6201 false,
6202 None
6203 )
6204 .unwrap(),
6205 cx
6206 )
6207 .await
6208 .unwrap(),
6209 HashMap::from_iter([
6210 (path!("dir/two.rs").to_string(), vec![6..9]),
6211 (path!("dir/three.rs").to_string(), vec![37..40])
6212 ])
6213 );
6214
6215 let buffer_4 = project
6216 .update(cx, |project, cx| {
6217 project.open_local_buffer(path!("/dir/four.rs"), cx)
6218 })
6219 .await
6220 .unwrap();
6221 buffer_4.update(cx, |buffer, cx| {
6222 let text = "two::TWO";
6223 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6224 });
6225
6226 assert_eq!(
6227 search(
6228 &project,
6229 SearchQuery::text(
6230 "TWO",
6231 false,
6232 true,
6233 false,
6234 Default::default(),
6235 Default::default(),
6236 false,
6237 None,
6238 )
6239 .unwrap(),
6240 cx
6241 )
6242 .await
6243 .unwrap(),
6244 HashMap::from_iter([
6245 (path!("dir/two.rs").to_string(), vec![6..9]),
6246 (path!("dir/three.rs").to_string(), vec![37..40]),
6247 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6248 ])
6249 );
6250}
6251
6252#[gpui::test]
6253async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6254 init_test(cx);
6255
6256 let search_query = "file";
6257
6258 let fs = FakeFs::new(cx.executor());
6259 fs.insert_tree(
6260 path!("/dir"),
6261 json!({
6262 "one.rs": r#"// Rust file one"#,
6263 "one.ts": r#"// TypeScript file one"#,
6264 "two.rs": r#"// Rust file two"#,
6265 "two.ts": r#"// TypeScript file two"#,
6266 }),
6267 )
6268 .await;
6269 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6270
6271 assert!(
6272 search(
6273 &project,
6274 SearchQuery::text(
6275 search_query,
6276 false,
6277 true,
6278 false,
6279 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6280 Default::default(),
6281 false,
6282 None
6283 )
6284 .unwrap(),
6285 cx
6286 )
6287 .await
6288 .unwrap()
6289 .is_empty(),
6290 "If no inclusions match, no files should be returned"
6291 );
6292
6293 assert_eq!(
6294 search(
6295 &project,
6296 SearchQuery::text(
6297 search_query,
6298 false,
6299 true,
6300 false,
6301 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6302 Default::default(),
6303 false,
6304 None
6305 )
6306 .unwrap(),
6307 cx
6308 )
6309 .await
6310 .unwrap(),
6311 HashMap::from_iter([
6312 (path!("dir/one.rs").to_string(), vec![8..12]),
6313 (path!("dir/two.rs").to_string(), vec![8..12]),
6314 ]),
6315 "Rust only search should give only Rust files"
6316 );
6317
6318 assert_eq!(
6319 search(
6320 &project,
6321 SearchQuery::text(
6322 search_query,
6323 false,
6324 true,
6325 false,
6326 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6327 .unwrap(),
6328 Default::default(),
6329 false,
6330 None,
6331 )
6332 .unwrap(),
6333 cx
6334 )
6335 .await
6336 .unwrap(),
6337 HashMap::from_iter([
6338 (path!("dir/one.ts").to_string(), vec![14..18]),
6339 (path!("dir/two.ts").to_string(), vec![14..18]),
6340 ]),
6341 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6342 );
6343
6344 assert_eq!(
6345 search(
6346 &project,
6347 SearchQuery::text(
6348 search_query,
6349 false,
6350 true,
6351 false,
6352 PathMatcher::new(
6353 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6354 PathStyle::local()
6355 )
6356 .unwrap(),
6357 Default::default(),
6358 false,
6359 None,
6360 )
6361 .unwrap(),
6362 cx
6363 )
6364 .await
6365 .unwrap(),
6366 HashMap::from_iter([
6367 (path!("dir/two.ts").to_string(), vec![14..18]),
6368 (path!("dir/one.rs").to_string(), vec![8..12]),
6369 (path!("dir/one.ts").to_string(), vec![14..18]),
6370 (path!("dir/two.rs").to_string(), vec![8..12]),
6371 ]),
6372 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6373 );
6374}
6375
6376#[gpui::test]
6377async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6378 init_test(cx);
6379
6380 let search_query = "file";
6381
6382 let fs = FakeFs::new(cx.executor());
6383 fs.insert_tree(
6384 path!("/dir"),
6385 json!({
6386 "one.rs": r#"// Rust file one"#,
6387 "one.ts": r#"// TypeScript file one"#,
6388 "two.rs": r#"// Rust file two"#,
6389 "two.ts": r#"// TypeScript file two"#,
6390 }),
6391 )
6392 .await;
6393 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6394
6395 assert_eq!(
6396 search(
6397 &project,
6398 SearchQuery::text(
6399 search_query,
6400 false,
6401 true,
6402 false,
6403 Default::default(),
6404 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6405 false,
6406 None,
6407 )
6408 .unwrap(),
6409 cx
6410 )
6411 .await
6412 .unwrap(),
6413 HashMap::from_iter([
6414 (path!("dir/one.rs").to_string(), vec![8..12]),
6415 (path!("dir/one.ts").to_string(), vec![14..18]),
6416 (path!("dir/two.rs").to_string(), vec![8..12]),
6417 (path!("dir/two.ts").to_string(), vec![14..18]),
6418 ]),
6419 "If no exclusions match, all files should be returned"
6420 );
6421
6422 assert_eq!(
6423 search(
6424 &project,
6425 SearchQuery::text(
6426 search_query,
6427 false,
6428 true,
6429 false,
6430 Default::default(),
6431 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6432 false,
6433 None,
6434 )
6435 .unwrap(),
6436 cx
6437 )
6438 .await
6439 .unwrap(),
6440 HashMap::from_iter([
6441 (path!("dir/one.ts").to_string(), vec![14..18]),
6442 (path!("dir/two.ts").to_string(), vec![14..18]),
6443 ]),
6444 "Rust exclusion search should give only TypeScript files"
6445 );
6446
6447 assert_eq!(
6448 search(
6449 &project,
6450 SearchQuery::text(
6451 search_query,
6452 false,
6453 true,
6454 false,
6455 Default::default(),
6456 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6457 .unwrap(),
6458 false,
6459 None,
6460 )
6461 .unwrap(),
6462 cx
6463 )
6464 .await
6465 .unwrap(),
6466 HashMap::from_iter([
6467 (path!("dir/one.rs").to_string(), vec![8..12]),
6468 (path!("dir/two.rs").to_string(), vec![8..12]),
6469 ]),
6470 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6471 );
6472
6473 assert!(
6474 search(
6475 &project,
6476 SearchQuery::text(
6477 search_query,
6478 false,
6479 true,
6480 false,
6481 Default::default(),
6482 PathMatcher::new(
6483 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6484 PathStyle::local(),
6485 )
6486 .unwrap(),
6487 false,
6488 None,
6489 )
6490 .unwrap(),
6491 cx
6492 )
6493 .await
6494 .unwrap()
6495 .is_empty(),
6496 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6497 );
6498}
6499
6500#[gpui::test]
6501async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6502 init_test(cx);
6503
6504 let search_query = "file";
6505
6506 let fs = FakeFs::new(cx.executor());
6507 fs.insert_tree(
6508 path!("/dir"),
6509 json!({
6510 "one.rs": r#"// Rust file one"#,
6511 "one.ts": r#"// TypeScript file one"#,
6512 "two.rs": r#"// Rust file two"#,
6513 "two.ts": r#"// TypeScript file two"#,
6514 }),
6515 )
6516 .await;
6517
6518 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6519 let path_style = PathStyle::local();
6520 let _buffer = project.update(cx, |project, cx| {
6521 project.create_local_buffer("file", None, false, cx)
6522 });
6523
6524 assert_eq!(
6525 search(
6526 &project,
6527 SearchQuery::text(
6528 search_query,
6529 false,
6530 true,
6531 false,
6532 Default::default(),
6533 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6534 false,
6535 None,
6536 )
6537 .unwrap(),
6538 cx
6539 )
6540 .await
6541 .unwrap(),
6542 HashMap::from_iter([
6543 (path!("dir/one.rs").to_string(), vec![8..12]),
6544 (path!("dir/one.ts").to_string(), vec![14..18]),
6545 (path!("dir/two.rs").to_string(), vec![8..12]),
6546 (path!("dir/two.ts").to_string(), vec![14..18]),
6547 ]),
6548 "If no exclusions match, all files should be returned"
6549 );
6550
6551 assert_eq!(
6552 search(
6553 &project,
6554 SearchQuery::text(
6555 search_query,
6556 false,
6557 true,
6558 false,
6559 Default::default(),
6560 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6561 false,
6562 None,
6563 )
6564 .unwrap(),
6565 cx
6566 )
6567 .await
6568 .unwrap(),
6569 HashMap::from_iter([
6570 (path!("dir/one.ts").to_string(), vec![14..18]),
6571 (path!("dir/two.ts").to_string(), vec![14..18]),
6572 ]),
6573 "Rust exclusion search should give only TypeScript files"
6574 );
6575
6576 assert_eq!(
6577 search(
6578 &project,
6579 SearchQuery::text(
6580 search_query,
6581 false,
6582 true,
6583 false,
6584 Default::default(),
6585 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6586 false,
6587 None,
6588 )
6589 .unwrap(),
6590 cx
6591 )
6592 .await
6593 .unwrap(),
6594 HashMap::from_iter([
6595 (path!("dir/one.rs").to_string(), vec![8..12]),
6596 (path!("dir/two.rs").to_string(), vec![8..12]),
6597 ]),
6598 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6599 );
6600
6601 assert!(
6602 search(
6603 &project,
6604 SearchQuery::text(
6605 search_query,
6606 false,
6607 true,
6608 false,
6609 Default::default(),
6610 PathMatcher::new(
6611 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6612 PathStyle::local(),
6613 )
6614 .unwrap(),
6615 false,
6616 None,
6617 )
6618 .unwrap(),
6619 cx
6620 )
6621 .await
6622 .unwrap()
6623 .is_empty(),
6624 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6625 );
6626}
6627
6628#[gpui::test]
6629async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6630 init_test(cx);
6631
6632 let search_query = "file";
6633
6634 let fs = FakeFs::new(cx.executor());
6635 fs.insert_tree(
6636 path!("/dir"),
6637 json!({
6638 "one.rs": r#"// Rust file one"#,
6639 "one.ts": r#"// TypeScript file one"#,
6640 "two.rs": r#"// Rust file two"#,
6641 "two.ts": r#"// TypeScript file two"#,
6642 }),
6643 )
6644 .await;
6645 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6646 assert!(
6647 search(
6648 &project,
6649 SearchQuery::text(
6650 search_query,
6651 false,
6652 true,
6653 false,
6654 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6655 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6656 false,
6657 None,
6658 )
6659 .unwrap(),
6660 cx
6661 )
6662 .await
6663 .unwrap()
6664 .is_empty(),
6665 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6666 );
6667
6668 assert!(
6669 search(
6670 &project,
6671 SearchQuery::text(
6672 search_query,
6673 false,
6674 true,
6675 false,
6676 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6677 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6678 false,
6679 None,
6680 )
6681 .unwrap(),
6682 cx
6683 )
6684 .await
6685 .unwrap()
6686 .is_empty(),
6687 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6688 );
6689
6690 assert!(
6691 search(
6692 &project,
6693 SearchQuery::text(
6694 search_query,
6695 false,
6696 true,
6697 false,
6698 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6699 .unwrap(),
6700 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6701 .unwrap(),
6702 false,
6703 None,
6704 )
6705 .unwrap(),
6706 cx
6707 )
6708 .await
6709 .unwrap()
6710 .is_empty(),
6711 "Non-matching inclusions and exclusions should not change that."
6712 );
6713
6714 assert_eq!(
6715 search(
6716 &project,
6717 SearchQuery::text(
6718 search_query,
6719 false,
6720 true,
6721 false,
6722 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6723 .unwrap(),
6724 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6725 .unwrap(),
6726 false,
6727 None,
6728 )
6729 .unwrap(),
6730 cx
6731 )
6732 .await
6733 .unwrap(),
6734 HashMap::from_iter([
6735 (path!("dir/one.ts").to_string(), vec![14..18]),
6736 (path!("dir/two.ts").to_string(), vec![14..18]),
6737 ]),
6738 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6739 );
6740}
6741
6742#[gpui::test]
6743async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6744 init_test(cx);
6745
6746 let fs = FakeFs::new(cx.executor());
6747 fs.insert_tree(
6748 path!("/worktree-a"),
6749 json!({
6750 "haystack.rs": r#"// NEEDLE"#,
6751 "haystack.ts": r#"// NEEDLE"#,
6752 }),
6753 )
6754 .await;
6755 fs.insert_tree(
6756 path!("/worktree-b"),
6757 json!({
6758 "haystack.rs": r#"// NEEDLE"#,
6759 "haystack.ts": r#"// NEEDLE"#,
6760 }),
6761 )
6762 .await;
6763
6764 let path_style = PathStyle::local();
6765 let project = Project::test(
6766 fs.clone(),
6767 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6768 cx,
6769 )
6770 .await;
6771
6772 assert_eq!(
6773 search(
6774 &project,
6775 SearchQuery::text(
6776 "NEEDLE",
6777 false,
6778 true,
6779 false,
6780 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6781 Default::default(),
6782 true,
6783 None,
6784 )
6785 .unwrap(),
6786 cx
6787 )
6788 .await
6789 .unwrap(),
6790 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6791 "should only return results from included worktree"
6792 );
6793 assert_eq!(
6794 search(
6795 &project,
6796 SearchQuery::text(
6797 "NEEDLE",
6798 false,
6799 true,
6800 false,
6801 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6802 Default::default(),
6803 true,
6804 None,
6805 )
6806 .unwrap(),
6807 cx
6808 )
6809 .await
6810 .unwrap(),
6811 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6812 "should only return results from included worktree"
6813 );
6814
6815 assert_eq!(
6816 search(
6817 &project,
6818 SearchQuery::text(
6819 "NEEDLE",
6820 false,
6821 true,
6822 false,
6823 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6824 Default::default(),
6825 false,
6826 None,
6827 )
6828 .unwrap(),
6829 cx
6830 )
6831 .await
6832 .unwrap(),
6833 HashMap::from_iter([
6834 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6835 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6836 ]),
6837 "should return results from both worktrees"
6838 );
6839}
6840
6841#[gpui::test]
6842async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6843 init_test(cx);
6844
6845 let fs = FakeFs::new(cx.background_executor.clone());
6846 fs.insert_tree(
6847 path!("/dir"),
6848 json!({
6849 ".git": {},
6850 ".gitignore": "**/target\n/node_modules\n",
6851 "target": {
6852 "index.txt": "index_key:index_value"
6853 },
6854 "node_modules": {
6855 "eslint": {
6856 "index.ts": "const eslint_key = 'eslint value'",
6857 "package.json": r#"{ "some_key": "some value" }"#,
6858 },
6859 "prettier": {
6860 "index.ts": "const prettier_key = 'prettier value'",
6861 "package.json": r#"{ "other_key": "other value" }"#,
6862 },
6863 },
6864 "package.json": r#"{ "main_key": "main value" }"#,
6865 }),
6866 )
6867 .await;
6868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6869
6870 let query = "key";
6871 assert_eq!(
6872 search(
6873 &project,
6874 SearchQuery::text(
6875 query,
6876 false,
6877 false,
6878 false,
6879 Default::default(),
6880 Default::default(),
6881 false,
6882 None,
6883 )
6884 .unwrap(),
6885 cx
6886 )
6887 .await
6888 .unwrap(),
6889 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6890 "Only one non-ignored file should have the query"
6891 );
6892
6893 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6894 let path_style = PathStyle::local();
6895 assert_eq!(
6896 search(
6897 &project,
6898 SearchQuery::text(
6899 query,
6900 false,
6901 false,
6902 true,
6903 Default::default(),
6904 Default::default(),
6905 false,
6906 None,
6907 )
6908 .unwrap(),
6909 cx
6910 )
6911 .await
6912 .unwrap(),
6913 HashMap::from_iter([
6914 (path!("dir/package.json").to_string(), vec![8..11]),
6915 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6916 (
6917 path!("dir/node_modules/prettier/package.json").to_string(),
6918 vec![9..12]
6919 ),
6920 (
6921 path!("dir/node_modules/prettier/index.ts").to_string(),
6922 vec![15..18]
6923 ),
6924 (
6925 path!("dir/node_modules/eslint/index.ts").to_string(),
6926 vec![13..16]
6927 ),
6928 (
6929 path!("dir/node_modules/eslint/package.json").to_string(),
6930 vec![8..11]
6931 ),
6932 ]),
6933 "Unrestricted search with ignored directories should find every file with the query"
6934 );
6935
6936 let files_to_include =
6937 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6938 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6940 assert_eq!(
6941 search(
6942 &project,
6943 SearchQuery::text(
6944 query,
6945 false,
6946 false,
6947 true,
6948 files_to_include,
6949 files_to_exclude,
6950 false,
6951 None,
6952 )
6953 .unwrap(),
6954 cx
6955 )
6956 .await
6957 .unwrap(),
6958 HashMap::from_iter([(
6959 path!("dir/node_modules/prettier/package.json").to_string(),
6960 vec![9..12]
6961 )]),
6962 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6963 );
6964}
6965
6966#[gpui::test]
6967async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6968 init_test(cx);
6969
6970 let fs = FakeFs::new(cx.executor());
6971 fs.insert_tree(
6972 path!("/dir"),
6973 json!({
6974 "one.rs": "// ПРИВЕТ? привет!",
6975 "two.rs": "// ПРИВЕТ.",
6976 "three.rs": "// привет",
6977 }),
6978 )
6979 .await;
6980 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6981 let unicode_case_sensitive_query = SearchQuery::text(
6982 "привет",
6983 false,
6984 true,
6985 false,
6986 Default::default(),
6987 Default::default(),
6988 false,
6989 None,
6990 );
6991 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6992 assert_eq!(
6993 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6994 .await
6995 .unwrap(),
6996 HashMap::from_iter([
6997 (path!("dir/one.rs").to_string(), vec![17..29]),
6998 (path!("dir/three.rs").to_string(), vec![3..15]),
6999 ])
7000 );
7001
7002 let unicode_case_insensitive_query = SearchQuery::text(
7003 "привет",
7004 false,
7005 false,
7006 false,
7007 Default::default(),
7008 Default::default(),
7009 false,
7010 None,
7011 );
7012 assert_matches!(
7013 unicode_case_insensitive_query,
7014 Ok(SearchQuery::Regex { .. })
7015 );
7016 assert_eq!(
7017 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7018 .await
7019 .unwrap(),
7020 HashMap::from_iter([
7021 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7022 (path!("dir/two.rs").to_string(), vec![3..15]),
7023 (path!("dir/three.rs").to_string(), vec![3..15]),
7024 ])
7025 );
7026
7027 assert_eq!(
7028 search(
7029 &project,
7030 SearchQuery::text(
7031 "привет.",
7032 false,
7033 false,
7034 false,
7035 Default::default(),
7036 Default::default(),
7037 false,
7038 None,
7039 )
7040 .unwrap(),
7041 cx
7042 )
7043 .await
7044 .unwrap(),
7045 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7046 );
7047}
7048
7049#[gpui::test]
7050async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7051 init_test(cx);
7052
7053 let fs = FakeFs::new(cx.executor());
7054 fs.insert_tree(
7055 "/one/two",
7056 json!({
7057 "three": {
7058 "a.txt": "",
7059 "four": {}
7060 },
7061 "c.rs": ""
7062 }),
7063 )
7064 .await;
7065
7066 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7067 project
7068 .update(cx, |project, cx| {
7069 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7070 project.create_entry((id, rel_path("b..")), true, cx)
7071 })
7072 .await
7073 .unwrap()
7074 .into_included()
7075 .unwrap();
7076
7077 assert_eq!(
7078 fs.paths(true),
7079 vec![
7080 PathBuf::from(path!("/")),
7081 PathBuf::from(path!("/one")),
7082 PathBuf::from(path!("/one/two")),
7083 PathBuf::from(path!("/one/two/c.rs")),
7084 PathBuf::from(path!("/one/two/three")),
7085 PathBuf::from(path!("/one/two/three/a.txt")),
7086 PathBuf::from(path!("/one/two/three/b..")),
7087 PathBuf::from(path!("/one/two/three/four")),
7088 ]
7089 );
7090}
7091
7092#[gpui::test]
7093async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7094 init_test(cx);
7095
7096 let fs = FakeFs::new(cx.executor());
7097 fs.insert_tree(
7098 path!("/dir"),
7099 json!({
7100 "a.tsx": "a",
7101 }),
7102 )
7103 .await;
7104
7105 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7106
7107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7108 language_registry.add(tsx_lang());
7109 let language_server_names = [
7110 "TypeScriptServer",
7111 "TailwindServer",
7112 "ESLintServer",
7113 "NoHoverCapabilitiesServer",
7114 ];
7115 let mut language_servers = [
7116 language_registry.register_fake_lsp(
7117 "tsx",
7118 FakeLspAdapter {
7119 name: language_server_names[0],
7120 capabilities: lsp::ServerCapabilities {
7121 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7122 ..lsp::ServerCapabilities::default()
7123 },
7124 ..FakeLspAdapter::default()
7125 },
7126 ),
7127 language_registry.register_fake_lsp(
7128 "tsx",
7129 FakeLspAdapter {
7130 name: language_server_names[1],
7131 capabilities: lsp::ServerCapabilities {
7132 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7133 ..lsp::ServerCapabilities::default()
7134 },
7135 ..FakeLspAdapter::default()
7136 },
7137 ),
7138 language_registry.register_fake_lsp(
7139 "tsx",
7140 FakeLspAdapter {
7141 name: language_server_names[2],
7142 capabilities: lsp::ServerCapabilities {
7143 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7144 ..lsp::ServerCapabilities::default()
7145 },
7146 ..FakeLspAdapter::default()
7147 },
7148 ),
7149 language_registry.register_fake_lsp(
7150 "tsx",
7151 FakeLspAdapter {
7152 name: language_server_names[3],
7153 capabilities: lsp::ServerCapabilities {
7154 hover_provider: None,
7155 ..lsp::ServerCapabilities::default()
7156 },
7157 ..FakeLspAdapter::default()
7158 },
7159 ),
7160 ];
7161
7162 let (buffer, _handle) = project
7163 .update(cx, |p, cx| {
7164 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7165 })
7166 .await
7167 .unwrap();
7168 cx.executor().run_until_parked();
7169
7170 let mut servers_with_hover_requests = HashMap::default();
7171 for i in 0..language_server_names.len() {
7172 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7173 panic!(
7174 "Failed to get language server #{i} with name {}",
7175 &language_server_names[i]
7176 )
7177 });
7178 let new_server_name = new_server.server.name();
7179 assert!(
7180 !servers_with_hover_requests.contains_key(&new_server_name),
7181 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7182 );
7183 match new_server_name.as_ref() {
7184 "TailwindServer" | "TypeScriptServer" => {
7185 servers_with_hover_requests.insert(
7186 new_server_name.clone(),
7187 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7188 move |_, _| {
7189 let name = new_server_name.clone();
7190 async move {
7191 Ok(Some(lsp::Hover {
7192 contents: lsp::HoverContents::Scalar(
7193 lsp::MarkedString::String(format!("{name} hover")),
7194 ),
7195 range: None,
7196 }))
7197 }
7198 },
7199 ),
7200 );
7201 }
7202 "ESLintServer" => {
7203 servers_with_hover_requests.insert(
7204 new_server_name,
7205 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7206 |_, _| async move { Ok(None) },
7207 ),
7208 );
7209 }
7210 "NoHoverCapabilitiesServer" => {
7211 let _never_handled = new_server
7212 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7213 panic!(
7214 "Should not call for hovers server with no corresponding capabilities"
7215 )
7216 });
7217 }
7218 unexpected => panic!("Unexpected server name: {unexpected}"),
7219 }
7220 }
7221
7222 let hover_task = project.update(cx, |project, cx| {
7223 project.hover(&buffer, Point::new(0, 0), cx)
7224 });
7225 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7226 |mut hover_request| async move {
7227 hover_request
7228 .next()
7229 .await
7230 .expect("All hover requests should have been triggered")
7231 },
7232 ))
7233 .await;
7234 assert_eq!(
7235 vec!["TailwindServer hover", "TypeScriptServer hover"],
7236 hover_task
7237 .await
7238 .into_iter()
7239 .flatten()
7240 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7241 .sorted()
7242 .collect::<Vec<_>>(),
7243 "Should receive hover responses from all related servers with hover capabilities"
7244 );
7245}
7246
7247#[gpui::test]
7248async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7249 init_test(cx);
7250
7251 let fs = FakeFs::new(cx.executor());
7252 fs.insert_tree(
7253 path!("/dir"),
7254 json!({
7255 "a.ts": "a",
7256 }),
7257 )
7258 .await;
7259
7260 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7261
7262 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7263 language_registry.add(typescript_lang());
7264 let mut fake_language_servers = language_registry.register_fake_lsp(
7265 "TypeScript",
7266 FakeLspAdapter {
7267 capabilities: lsp::ServerCapabilities {
7268 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7269 ..lsp::ServerCapabilities::default()
7270 },
7271 ..FakeLspAdapter::default()
7272 },
7273 );
7274
7275 let (buffer, _handle) = project
7276 .update(cx, |p, cx| {
7277 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7278 })
7279 .await
7280 .unwrap();
7281 cx.executor().run_until_parked();
7282
7283 let fake_server = fake_language_servers
7284 .next()
7285 .await
7286 .expect("failed to get the language server");
7287
7288 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7289 move |_, _| async move {
7290 Ok(Some(lsp::Hover {
7291 contents: lsp::HoverContents::Array(vec![
7292 lsp::MarkedString::String("".to_string()),
7293 lsp::MarkedString::String(" ".to_string()),
7294 lsp::MarkedString::String("\n\n\n".to_string()),
7295 ]),
7296 range: None,
7297 }))
7298 },
7299 );
7300
7301 let hover_task = project.update(cx, |project, cx| {
7302 project.hover(&buffer, Point::new(0, 0), cx)
7303 });
7304 let () = request_handled
7305 .next()
7306 .await
7307 .expect("All hover requests should have been triggered");
7308 assert_eq!(
7309 Vec::<String>::new(),
7310 hover_task
7311 .await
7312 .into_iter()
7313 .flatten()
7314 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7315 .sorted()
7316 .collect::<Vec<_>>(),
7317 "Empty hover parts should be ignored"
7318 );
7319}
7320
7321#[gpui::test]
7322async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7323 init_test(cx);
7324
7325 let fs = FakeFs::new(cx.executor());
7326 fs.insert_tree(
7327 path!("/dir"),
7328 json!({
7329 "a.ts": "a",
7330 }),
7331 )
7332 .await;
7333
7334 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7335
7336 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7337 language_registry.add(typescript_lang());
7338 let mut fake_language_servers = language_registry.register_fake_lsp(
7339 "TypeScript",
7340 FakeLspAdapter {
7341 capabilities: lsp::ServerCapabilities {
7342 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7343 ..lsp::ServerCapabilities::default()
7344 },
7345 ..FakeLspAdapter::default()
7346 },
7347 );
7348
7349 let (buffer, _handle) = project
7350 .update(cx, |p, cx| {
7351 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7352 })
7353 .await
7354 .unwrap();
7355 cx.executor().run_until_parked();
7356
7357 let fake_server = fake_language_servers
7358 .next()
7359 .await
7360 .expect("failed to get the language server");
7361
7362 let mut request_handled = fake_server
7363 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7364 Ok(Some(vec![
7365 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7366 title: "organize imports".to_string(),
7367 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7368 ..lsp::CodeAction::default()
7369 }),
7370 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7371 title: "fix code".to_string(),
7372 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7373 ..lsp::CodeAction::default()
7374 }),
7375 ]))
7376 });
7377
7378 let code_actions_task = project.update(cx, |project, cx| {
7379 project.code_actions(
7380 &buffer,
7381 0..buffer.read(cx).len(),
7382 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7383 cx,
7384 )
7385 });
7386
7387 let () = request_handled
7388 .next()
7389 .await
7390 .expect("The code action request should have been triggered");
7391
7392 let code_actions = code_actions_task.await.unwrap().unwrap();
7393 assert_eq!(code_actions.len(), 1);
7394 assert_eq!(
7395 code_actions[0].lsp_action.action_kind(),
7396 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7397 );
7398}
7399
7400#[gpui::test]
7401async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7402 init_test(cx);
7403
7404 let fs = FakeFs::new(cx.executor());
7405 fs.insert_tree(
7406 path!("/dir"),
7407 json!({
7408 "a.tsx": "a",
7409 }),
7410 )
7411 .await;
7412
7413 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7414
7415 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7416 language_registry.add(tsx_lang());
7417 let language_server_names = [
7418 "TypeScriptServer",
7419 "TailwindServer",
7420 "ESLintServer",
7421 "NoActionsCapabilitiesServer",
7422 ];
7423
7424 let mut language_server_rxs = [
7425 language_registry.register_fake_lsp(
7426 "tsx",
7427 FakeLspAdapter {
7428 name: language_server_names[0],
7429 capabilities: lsp::ServerCapabilities {
7430 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7431 ..lsp::ServerCapabilities::default()
7432 },
7433 ..FakeLspAdapter::default()
7434 },
7435 ),
7436 language_registry.register_fake_lsp(
7437 "tsx",
7438 FakeLspAdapter {
7439 name: language_server_names[1],
7440 capabilities: lsp::ServerCapabilities {
7441 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7442 ..lsp::ServerCapabilities::default()
7443 },
7444 ..FakeLspAdapter::default()
7445 },
7446 ),
7447 language_registry.register_fake_lsp(
7448 "tsx",
7449 FakeLspAdapter {
7450 name: language_server_names[2],
7451 capabilities: lsp::ServerCapabilities {
7452 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7453 ..lsp::ServerCapabilities::default()
7454 },
7455 ..FakeLspAdapter::default()
7456 },
7457 ),
7458 language_registry.register_fake_lsp(
7459 "tsx",
7460 FakeLspAdapter {
7461 name: language_server_names[3],
7462 capabilities: lsp::ServerCapabilities {
7463 code_action_provider: None,
7464 ..lsp::ServerCapabilities::default()
7465 },
7466 ..FakeLspAdapter::default()
7467 },
7468 ),
7469 ];
7470
7471 let (buffer, _handle) = project
7472 .update(cx, |p, cx| {
7473 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7474 })
7475 .await
7476 .unwrap();
7477 cx.executor().run_until_parked();
7478
7479 let mut servers_with_actions_requests = HashMap::default();
7480 for i in 0..language_server_names.len() {
7481 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7482 panic!(
7483 "Failed to get language server #{i} with name {}",
7484 &language_server_names[i]
7485 )
7486 });
7487 let new_server_name = new_server.server.name();
7488
7489 assert!(
7490 !servers_with_actions_requests.contains_key(&new_server_name),
7491 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7492 );
7493 match new_server_name.0.as_ref() {
7494 "TailwindServer" | "TypeScriptServer" => {
7495 servers_with_actions_requests.insert(
7496 new_server_name.clone(),
7497 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7498 move |_, _| {
7499 let name = new_server_name.clone();
7500 async move {
7501 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7502 lsp::CodeAction {
7503 title: format!("{name} code action"),
7504 ..lsp::CodeAction::default()
7505 },
7506 )]))
7507 }
7508 },
7509 ),
7510 );
7511 }
7512 "ESLintServer" => {
7513 servers_with_actions_requests.insert(
7514 new_server_name,
7515 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7516 |_, _| async move { Ok(None) },
7517 ),
7518 );
7519 }
7520 "NoActionsCapabilitiesServer" => {
7521 let _never_handled = new_server
7522 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7523 panic!(
7524 "Should not call for code actions server with no corresponding capabilities"
7525 )
7526 });
7527 }
7528 unexpected => panic!("Unexpected server name: {unexpected}"),
7529 }
7530 }
7531
7532 let code_actions_task = project.update(cx, |project, cx| {
7533 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7534 });
7535
7536 // cx.run_until_parked();
7537 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7538 |mut code_actions_request| async move {
7539 code_actions_request
7540 .next()
7541 .await
7542 .expect("All code actions requests should have been triggered")
7543 },
7544 ))
7545 .await;
7546 assert_eq!(
7547 vec!["TailwindServer code action", "TypeScriptServer code action"],
7548 code_actions_task
7549 .await
7550 .unwrap()
7551 .unwrap()
7552 .into_iter()
7553 .map(|code_action| code_action.lsp_action.title().to_owned())
7554 .sorted()
7555 .collect::<Vec<_>>(),
7556 "Should receive code actions responses from all related servers with hover capabilities"
7557 );
7558}
7559
7560#[gpui::test]
7561async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7562 init_test(cx);
7563
7564 let fs = FakeFs::new(cx.executor());
7565 fs.insert_tree(
7566 "/dir",
7567 json!({
7568 "a.rs": "let a = 1;",
7569 "b.rs": "let b = 2;",
7570 "c.rs": "let c = 2;",
7571 }),
7572 )
7573 .await;
7574
7575 let project = Project::test(
7576 fs,
7577 [
7578 "/dir/a.rs".as_ref(),
7579 "/dir/b.rs".as_ref(),
7580 "/dir/c.rs".as_ref(),
7581 ],
7582 cx,
7583 )
7584 .await;
7585
7586 // check the initial state and get the worktrees
7587 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7588 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7589 assert_eq!(worktrees.len(), 3);
7590
7591 let worktree_a = worktrees[0].read(cx);
7592 let worktree_b = worktrees[1].read(cx);
7593 let worktree_c = worktrees[2].read(cx);
7594
7595 // check they start in the right order
7596 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7597 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7598 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7599
7600 (
7601 worktrees[0].clone(),
7602 worktrees[1].clone(),
7603 worktrees[2].clone(),
7604 )
7605 });
7606
7607 // move first worktree to after the second
7608 // [a, b, c] -> [b, a, c]
7609 project
7610 .update(cx, |project, cx| {
7611 let first = worktree_a.read(cx);
7612 let second = worktree_b.read(cx);
7613 project.move_worktree(first.id(), second.id(), cx)
7614 })
7615 .expect("moving first after second");
7616
7617 // check the state after moving
7618 project.update(cx, |project, cx| {
7619 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7620 assert_eq!(worktrees.len(), 3);
7621
7622 let first = worktrees[0].read(cx);
7623 let second = worktrees[1].read(cx);
7624 let third = worktrees[2].read(cx);
7625
7626 // check they are now in the right order
7627 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7628 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7629 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7630 });
7631
7632 // move the second worktree to before the first
7633 // [b, a, c] -> [a, b, c]
7634 project
7635 .update(cx, |project, cx| {
7636 let second = worktree_a.read(cx);
7637 let first = worktree_b.read(cx);
7638 project.move_worktree(first.id(), second.id(), cx)
7639 })
7640 .expect("moving second before first");
7641
7642 // check the state after moving
7643 project.update(cx, |project, cx| {
7644 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7645 assert_eq!(worktrees.len(), 3);
7646
7647 let first = worktrees[0].read(cx);
7648 let second = worktrees[1].read(cx);
7649 let third = worktrees[2].read(cx);
7650
7651 // check they are now in the right order
7652 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7653 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7654 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7655 });
7656
7657 // move the second worktree to after the third
7658 // [a, b, c] -> [a, c, b]
7659 project
7660 .update(cx, |project, cx| {
7661 let second = worktree_b.read(cx);
7662 let third = worktree_c.read(cx);
7663 project.move_worktree(second.id(), third.id(), cx)
7664 })
7665 .expect("moving second after third");
7666
7667 // check the state after moving
7668 project.update(cx, |project, cx| {
7669 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7670 assert_eq!(worktrees.len(), 3);
7671
7672 let first = worktrees[0].read(cx);
7673 let second = worktrees[1].read(cx);
7674 let third = worktrees[2].read(cx);
7675
7676 // check they are now in the right order
7677 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7678 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7679 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7680 });
7681
7682 // move the third worktree to before the second
7683 // [a, c, b] -> [a, b, c]
7684 project
7685 .update(cx, |project, cx| {
7686 let third = worktree_c.read(cx);
7687 let second = worktree_b.read(cx);
7688 project.move_worktree(third.id(), second.id(), cx)
7689 })
7690 .expect("moving third before second");
7691
7692 // check the state after moving
7693 project.update(cx, |project, cx| {
7694 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7695 assert_eq!(worktrees.len(), 3);
7696
7697 let first = worktrees[0].read(cx);
7698 let second = worktrees[1].read(cx);
7699 let third = worktrees[2].read(cx);
7700
7701 // check they are now in the right order
7702 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7703 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7704 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7705 });
7706
7707 // move the first worktree to after the third
7708 // [a, b, c] -> [b, c, a]
7709 project
7710 .update(cx, |project, cx| {
7711 let first = worktree_a.read(cx);
7712 let third = worktree_c.read(cx);
7713 project.move_worktree(first.id(), third.id(), cx)
7714 })
7715 .expect("moving first after third");
7716
7717 // check the state after moving
7718 project.update(cx, |project, cx| {
7719 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7720 assert_eq!(worktrees.len(), 3);
7721
7722 let first = worktrees[0].read(cx);
7723 let second = worktrees[1].read(cx);
7724 let third = worktrees[2].read(cx);
7725
7726 // check they are now in the right order
7727 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7728 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7729 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7730 });
7731
7732 // move the third worktree to before the first
7733 // [b, c, a] -> [a, b, c]
7734 project
7735 .update(cx, |project, cx| {
7736 let third = worktree_a.read(cx);
7737 let first = worktree_b.read(cx);
7738 project.move_worktree(third.id(), first.id(), cx)
7739 })
7740 .expect("moving third before first");
7741
7742 // check the state after moving
7743 project.update(cx, |project, cx| {
7744 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7745 assert_eq!(worktrees.len(), 3);
7746
7747 let first = worktrees[0].read(cx);
7748 let second = worktrees[1].read(cx);
7749 let third = worktrees[2].read(cx);
7750
7751 // check they are now in the right order
7752 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7753 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7754 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7755 });
7756}
7757
7758#[gpui::test]
7759async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761
7762 let staged_contents = r#"
7763 fn main() {
7764 println!("hello world");
7765 }
7766 "#
7767 .unindent();
7768 let file_contents = r#"
7769 // print goodbye
7770 fn main() {
7771 println!("goodbye world");
7772 }
7773 "#
7774 .unindent();
7775
7776 let fs = FakeFs::new(cx.background_executor.clone());
7777 fs.insert_tree(
7778 "/dir",
7779 json!({
7780 ".git": {},
7781 "src": {
7782 "main.rs": file_contents,
7783 }
7784 }),
7785 )
7786 .await;
7787
7788 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7789
7790 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7791
7792 let buffer = project
7793 .update(cx, |project, cx| {
7794 project.open_local_buffer("/dir/src/main.rs", cx)
7795 })
7796 .await
7797 .unwrap();
7798 let unstaged_diff = project
7799 .update(cx, |project, cx| {
7800 project.open_unstaged_diff(buffer.clone(), cx)
7801 })
7802 .await
7803 .unwrap();
7804
7805 cx.run_until_parked();
7806 unstaged_diff.update(cx, |unstaged_diff, cx| {
7807 let snapshot = buffer.read(cx).snapshot();
7808 assert_hunks(
7809 unstaged_diff.snapshot(cx).hunks(&snapshot),
7810 &snapshot,
7811 &unstaged_diff.base_text_string(cx).unwrap(),
7812 &[
7813 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7814 (
7815 2..3,
7816 " println!(\"hello world\");\n",
7817 " println!(\"goodbye world\");\n",
7818 DiffHunkStatus::modified_none(),
7819 ),
7820 ],
7821 );
7822 });
7823
7824 let staged_contents = r#"
7825 // print goodbye
7826 fn main() {
7827 }
7828 "#
7829 .unindent();
7830
7831 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7832
7833 cx.run_until_parked();
7834 unstaged_diff.update(cx, |unstaged_diff, cx| {
7835 let snapshot = buffer.read(cx).snapshot();
7836 assert_hunks(
7837 unstaged_diff
7838 .snapshot(cx)
7839 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7840 &snapshot,
7841 &unstaged_diff.base_text(cx).text(),
7842 &[(
7843 2..3,
7844 "",
7845 " println!(\"goodbye world\");\n",
7846 DiffHunkStatus::added_none(),
7847 )],
7848 );
7849 });
7850}
7851
7852#[gpui::test]
7853async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7854 init_test(cx);
7855
7856 let committed_contents = r#"
7857 fn main() {
7858 println!("hello world");
7859 }
7860 "#
7861 .unindent();
7862 let staged_contents = r#"
7863 fn main() {
7864 println!("goodbye world");
7865 }
7866 "#
7867 .unindent();
7868 let file_contents = r#"
7869 // print goodbye
7870 fn main() {
7871 println!("goodbye world");
7872 }
7873 "#
7874 .unindent();
7875
7876 let fs = FakeFs::new(cx.background_executor.clone());
7877 fs.insert_tree(
7878 "/dir",
7879 json!({
7880 ".git": {},
7881 "src": {
7882 "modification.rs": file_contents,
7883 }
7884 }),
7885 )
7886 .await;
7887
7888 fs.set_head_for_repo(
7889 Path::new("/dir/.git"),
7890 &[
7891 ("src/modification.rs", committed_contents),
7892 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7893 ],
7894 "deadbeef",
7895 );
7896 fs.set_index_for_repo(
7897 Path::new("/dir/.git"),
7898 &[
7899 ("src/modification.rs", staged_contents),
7900 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7901 ],
7902 );
7903
7904 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7905 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7906 let language = rust_lang();
7907 language_registry.add(language.clone());
7908
7909 let buffer_1 = project
7910 .update(cx, |project, cx| {
7911 project.open_local_buffer("/dir/src/modification.rs", cx)
7912 })
7913 .await
7914 .unwrap();
7915 let diff_1 = project
7916 .update(cx, |project, cx| {
7917 project.open_uncommitted_diff(buffer_1.clone(), cx)
7918 })
7919 .await
7920 .unwrap();
7921 diff_1.read_with(cx, |diff, cx| {
7922 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7923 });
7924 cx.run_until_parked();
7925 diff_1.update(cx, |diff, cx| {
7926 let snapshot = buffer_1.read(cx).snapshot();
7927 assert_hunks(
7928 diff.snapshot(cx)
7929 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7930 &snapshot,
7931 &diff.base_text_string(cx).unwrap(),
7932 &[
7933 (
7934 0..1,
7935 "",
7936 "// print goodbye\n",
7937 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7938 ),
7939 (
7940 2..3,
7941 " println!(\"hello world\");\n",
7942 " println!(\"goodbye world\");\n",
7943 DiffHunkStatus::modified_none(),
7944 ),
7945 ],
7946 );
7947 });
7948
7949 // Reset HEAD to a version that differs from both the buffer and the index.
7950 let committed_contents = r#"
7951 // print goodbye
7952 fn main() {
7953 }
7954 "#
7955 .unindent();
7956 fs.set_head_for_repo(
7957 Path::new("/dir/.git"),
7958 &[
7959 ("src/modification.rs", committed_contents.clone()),
7960 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7961 ],
7962 "deadbeef",
7963 );
7964
7965 // Buffer now has an unstaged hunk.
7966 cx.run_until_parked();
7967 diff_1.update(cx, |diff, cx| {
7968 let snapshot = buffer_1.read(cx).snapshot();
7969 assert_hunks(
7970 diff.snapshot(cx)
7971 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7972 &snapshot,
7973 &diff.base_text(cx).text(),
7974 &[(
7975 2..3,
7976 "",
7977 " println!(\"goodbye world\");\n",
7978 DiffHunkStatus::added_none(),
7979 )],
7980 );
7981 });
7982
7983 // Open a buffer for a file that's been deleted.
7984 let buffer_2 = project
7985 .update(cx, |project, cx| {
7986 project.open_local_buffer("/dir/src/deletion.rs", cx)
7987 })
7988 .await
7989 .unwrap();
7990 let diff_2 = project
7991 .update(cx, |project, cx| {
7992 project.open_uncommitted_diff(buffer_2.clone(), cx)
7993 })
7994 .await
7995 .unwrap();
7996 cx.run_until_parked();
7997 diff_2.update(cx, |diff, cx| {
7998 let snapshot = buffer_2.read(cx).snapshot();
7999 assert_hunks(
8000 diff.snapshot(cx)
8001 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8002 &snapshot,
8003 &diff.base_text_string(cx).unwrap(),
8004 &[(
8005 0..0,
8006 "// the-deleted-contents\n",
8007 "",
8008 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8009 )],
8010 );
8011 });
8012
8013 // Stage the deletion of this file
8014 fs.set_index_for_repo(
8015 Path::new("/dir/.git"),
8016 &[("src/modification.rs", committed_contents.clone())],
8017 );
8018 cx.run_until_parked();
8019 diff_2.update(cx, |diff, cx| {
8020 let snapshot = buffer_2.read(cx).snapshot();
8021 assert_hunks(
8022 diff.snapshot(cx)
8023 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8024 &snapshot,
8025 &diff.base_text_string(cx).unwrap(),
8026 &[(
8027 0..0,
8028 "// the-deleted-contents\n",
8029 "",
8030 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8031 )],
8032 );
8033 });
8034}
8035
8036#[gpui::test]
8037async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8038 use DiffHunkSecondaryStatus::*;
8039 init_test(cx);
8040
8041 let committed_contents = r#"
8042 zero
8043 one
8044 two
8045 three
8046 four
8047 five
8048 "#
8049 .unindent();
8050 let file_contents = r#"
8051 one
8052 TWO
8053 three
8054 FOUR
8055 five
8056 "#
8057 .unindent();
8058
8059 let fs = FakeFs::new(cx.background_executor.clone());
8060 fs.insert_tree(
8061 "/dir",
8062 json!({
8063 ".git": {},
8064 "file.txt": file_contents.clone()
8065 }),
8066 )
8067 .await;
8068
8069 fs.set_head_and_index_for_repo(
8070 path!("/dir/.git").as_ref(),
8071 &[("file.txt", committed_contents.clone())],
8072 );
8073
8074 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8075
8076 let buffer = project
8077 .update(cx, |project, cx| {
8078 project.open_local_buffer("/dir/file.txt", cx)
8079 })
8080 .await
8081 .unwrap();
8082 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8083 let uncommitted_diff = project
8084 .update(cx, |project, cx| {
8085 project.open_uncommitted_diff(buffer.clone(), cx)
8086 })
8087 .await
8088 .unwrap();
8089 let mut diff_events = cx.events(&uncommitted_diff);
8090
8091 // The hunks are initially unstaged.
8092 uncommitted_diff.read_with(cx, |diff, cx| {
8093 assert_hunks(
8094 diff.snapshot(cx).hunks(&snapshot),
8095 &snapshot,
8096 &diff.base_text_string(cx).unwrap(),
8097 &[
8098 (
8099 0..0,
8100 "zero\n",
8101 "",
8102 DiffHunkStatus::deleted(HasSecondaryHunk),
8103 ),
8104 (
8105 1..2,
8106 "two\n",
8107 "TWO\n",
8108 DiffHunkStatus::modified(HasSecondaryHunk),
8109 ),
8110 (
8111 3..4,
8112 "four\n",
8113 "FOUR\n",
8114 DiffHunkStatus::modified(HasSecondaryHunk),
8115 ),
8116 ],
8117 );
8118 });
8119
8120 // Stage a hunk. It appears as optimistically staged.
8121 uncommitted_diff.update(cx, |diff, cx| {
8122 let range =
8123 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8124 let hunks = diff
8125 .snapshot(cx)
8126 .hunks_intersecting_range(range, &snapshot)
8127 .collect::<Vec<_>>();
8128 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8129
8130 assert_hunks(
8131 diff.snapshot(cx).hunks(&snapshot),
8132 &snapshot,
8133 &diff.base_text_string(cx).unwrap(),
8134 &[
8135 (
8136 0..0,
8137 "zero\n",
8138 "",
8139 DiffHunkStatus::deleted(HasSecondaryHunk),
8140 ),
8141 (
8142 1..2,
8143 "two\n",
8144 "TWO\n",
8145 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8146 ),
8147 (
8148 3..4,
8149 "four\n",
8150 "FOUR\n",
8151 DiffHunkStatus::modified(HasSecondaryHunk),
8152 ),
8153 ],
8154 );
8155 });
8156
8157 // The diff emits a change event for the range of the staged hunk.
8158 assert!(matches!(
8159 diff_events.next().await.unwrap(),
8160 BufferDiffEvent::HunksStagedOrUnstaged(_)
8161 ));
8162 let event = diff_events.next().await.unwrap();
8163 if let BufferDiffEvent::DiffChanged(DiffChanged {
8164 changed_range: Some(changed_range),
8165 base_text_changed_range: _,
8166 extended_range: _,
8167 }) = event
8168 {
8169 let changed_range = changed_range.to_point(&snapshot);
8170 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8171 } else {
8172 panic!("Unexpected event {event:?}");
8173 }
8174
8175 // When the write to the index completes, it appears as staged.
8176 cx.run_until_parked();
8177 uncommitted_diff.update(cx, |diff, cx| {
8178 assert_hunks(
8179 diff.snapshot(cx).hunks(&snapshot),
8180 &snapshot,
8181 &diff.base_text_string(cx).unwrap(),
8182 &[
8183 (
8184 0..0,
8185 "zero\n",
8186 "",
8187 DiffHunkStatus::deleted(HasSecondaryHunk),
8188 ),
8189 (
8190 1..2,
8191 "two\n",
8192 "TWO\n",
8193 DiffHunkStatus::modified(NoSecondaryHunk),
8194 ),
8195 (
8196 3..4,
8197 "four\n",
8198 "FOUR\n",
8199 DiffHunkStatus::modified(HasSecondaryHunk),
8200 ),
8201 ],
8202 );
8203 });
8204
8205 // The diff emits a change event for the changed index text.
8206 let event = diff_events.next().await.unwrap();
8207 if let BufferDiffEvent::DiffChanged(DiffChanged {
8208 changed_range: Some(changed_range),
8209 base_text_changed_range: _,
8210 extended_range: _,
8211 }) = event
8212 {
8213 let changed_range = changed_range.to_point(&snapshot);
8214 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8215 } else {
8216 panic!("Unexpected event {event:?}");
8217 }
8218
8219 // Simulate a problem writing to the git index.
8220 fs.set_error_message_for_index_write(
8221 "/dir/.git".as_ref(),
8222 Some("failed to write git index".into()),
8223 );
8224
8225 // Stage another hunk.
8226 uncommitted_diff.update(cx, |diff, cx| {
8227 let range =
8228 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8229 let hunks = diff
8230 .snapshot(cx)
8231 .hunks_intersecting_range(range, &snapshot)
8232 .collect::<Vec<_>>();
8233 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8234
8235 assert_hunks(
8236 diff.snapshot(cx).hunks(&snapshot),
8237 &snapshot,
8238 &diff.base_text_string(cx).unwrap(),
8239 &[
8240 (
8241 0..0,
8242 "zero\n",
8243 "",
8244 DiffHunkStatus::deleted(HasSecondaryHunk),
8245 ),
8246 (
8247 1..2,
8248 "two\n",
8249 "TWO\n",
8250 DiffHunkStatus::modified(NoSecondaryHunk),
8251 ),
8252 (
8253 3..4,
8254 "four\n",
8255 "FOUR\n",
8256 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8257 ),
8258 ],
8259 );
8260 });
8261 assert!(matches!(
8262 diff_events.next().await.unwrap(),
8263 BufferDiffEvent::HunksStagedOrUnstaged(_)
8264 ));
8265 let event = diff_events.next().await.unwrap();
8266 if let BufferDiffEvent::DiffChanged(DiffChanged {
8267 changed_range: Some(changed_range),
8268 base_text_changed_range: _,
8269 extended_range: _,
8270 }) = event
8271 {
8272 let changed_range = changed_range.to_point(&snapshot);
8273 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8274 } else {
8275 panic!("Unexpected event {event:?}");
8276 }
8277
8278 // When the write fails, the hunk returns to being unstaged.
8279 cx.run_until_parked();
8280 uncommitted_diff.update(cx, |diff, cx| {
8281 assert_hunks(
8282 diff.snapshot(cx).hunks(&snapshot),
8283 &snapshot,
8284 &diff.base_text_string(cx).unwrap(),
8285 &[
8286 (
8287 0..0,
8288 "zero\n",
8289 "",
8290 DiffHunkStatus::deleted(HasSecondaryHunk),
8291 ),
8292 (
8293 1..2,
8294 "two\n",
8295 "TWO\n",
8296 DiffHunkStatus::modified(NoSecondaryHunk),
8297 ),
8298 (
8299 3..4,
8300 "four\n",
8301 "FOUR\n",
8302 DiffHunkStatus::modified(HasSecondaryHunk),
8303 ),
8304 ],
8305 );
8306 });
8307
8308 let event = diff_events.next().await.unwrap();
8309 if let BufferDiffEvent::DiffChanged(DiffChanged {
8310 changed_range: Some(changed_range),
8311 base_text_changed_range: _,
8312 extended_range: _,
8313 }) = event
8314 {
8315 let changed_range = changed_range.to_point(&snapshot);
8316 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8317 } else {
8318 panic!("Unexpected event {event:?}");
8319 }
8320
8321 // Allow writing to the git index to succeed again.
8322 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8323
8324 // Stage two hunks with separate operations.
8325 uncommitted_diff.update(cx, |diff, cx| {
8326 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8327 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8328 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8329 });
8330
8331 // Both staged hunks appear as pending.
8332 uncommitted_diff.update(cx, |diff, cx| {
8333 assert_hunks(
8334 diff.snapshot(cx).hunks(&snapshot),
8335 &snapshot,
8336 &diff.base_text_string(cx).unwrap(),
8337 &[
8338 (
8339 0..0,
8340 "zero\n",
8341 "",
8342 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8343 ),
8344 (
8345 1..2,
8346 "two\n",
8347 "TWO\n",
8348 DiffHunkStatus::modified(NoSecondaryHunk),
8349 ),
8350 (
8351 3..4,
8352 "four\n",
8353 "FOUR\n",
8354 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8355 ),
8356 ],
8357 );
8358 });
8359
8360 // Both staging operations take effect.
8361 cx.run_until_parked();
8362 uncommitted_diff.update(cx, |diff, cx| {
8363 assert_hunks(
8364 diff.snapshot(cx).hunks(&snapshot),
8365 &snapshot,
8366 &diff.base_text_string(cx).unwrap(),
8367 &[
8368 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8369 (
8370 1..2,
8371 "two\n",
8372 "TWO\n",
8373 DiffHunkStatus::modified(NoSecondaryHunk),
8374 ),
8375 (
8376 3..4,
8377 "four\n",
8378 "FOUR\n",
8379 DiffHunkStatus::modified(NoSecondaryHunk),
8380 ),
8381 ],
8382 );
8383 });
8384}
8385
8386#[gpui::test(seeds(340, 472))]
8387async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8388 use DiffHunkSecondaryStatus::*;
8389 init_test(cx);
8390
8391 let committed_contents = r#"
8392 zero
8393 one
8394 two
8395 three
8396 four
8397 five
8398 "#
8399 .unindent();
8400 let file_contents = r#"
8401 one
8402 TWO
8403 three
8404 FOUR
8405 five
8406 "#
8407 .unindent();
8408
8409 let fs = FakeFs::new(cx.background_executor.clone());
8410 fs.insert_tree(
8411 "/dir",
8412 json!({
8413 ".git": {},
8414 "file.txt": file_contents.clone()
8415 }),
8416 )
8417 .await;
8418
8419 fs.set_head_for_repo(
8420 "/dir/.git".as_ref(),
8421 &[("file.txt", committed_contents.clone())],
8422 "deadbeef",
8423 );
8424 fs.set_index_for_repo(
8425 "/dir/.git".as_ref(),
8426 &[("file.txt", committed_contents.clone())],
8427 );
8428
8429 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8430
8431 let buffer = project
8432 .update(cx, |project, cx| {
8433 project.open_local_buffer("/dir/file.txt", cx)
8434 })
8435 .await
8436 .unwrap();
8437 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8438 let uncommitted_diff = project
8439 .update(cx, |project, cx| {
8440 project.open_uncommitted_diff(buffer.clone(), cx)
8441 })
8442 .await
8443 .unwrap();
8444
8445 // The hunks are initially unstaged.
8446 uncommitted_diff.read_with(cx, |diff, cx| {
8447 assert_hunks(
8448 diff.snapshot(cx).hunks(&snapshot),
8449 &snapshot,
8450 &diff.base_text_string(cx).unwrap(),
8451 &[
8452 (
8453 0..0,
8454 "zero\n",
8455 "",
8456 DiffHunkStatus::deleted(HasSecondaryHunk),
8457 ),
8458 (
8459 1..2,
8460 "two\n",
8461 "TWO\n",
8462 DiffHunkStatus::modified(HasSecondaryHunk),
8463 ),
8464 (
8465 3..4,
8466 "four\n",
8467 "FOUR\n",
8468 DiffHunkStatus::modified(HasSecondaryHunk),
8469 ),
8470 ],
8471 );
8472 });
8473
8474 // Pause IO events
8475 fs.pause_events();
8476
8477 // Stage the first hunk.
8478 uncommitted_diff.update(cx, |diff, cx| {
8479 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8480 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8481 assert_hunks(
8482 diff.snapshot(cx).hunks(&snapshot),
8483 &snapshot,
8484 &diff.base_text_string(cx).unwrap(),
8485 &[
8486 (
8487 0..0,
8488 "zero\n",
8489 "",
8490 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8491 ),
8492 (
8493 1..2,
8494 "two\n",
8495 "TWO\n",
8496 DiffHunkStatus::modified(HasSecondaryHunk),
8497 ),
8498 (
8499 3..4,
8500 "four\n",
8501 "FOUR\n",
8502 DiffHunkStatus::modified(HasSecondaryHunk),
8503 ),
8504 ],
8505 );
8506 });
8507
8508 // Stage the second hunk *before* receiving the FS event for the first hunk.
8509 cx.run_until_parked();
8510 uncommitted_diff.update(cx, |diff, cx| {
8511 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8512 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8513 assert_hunks(
8514 diff.snapshot(cx).hunks(&snapshot),
8515 &snapshot,
8516 &diff.base_text_string(cx).unwrap(),
8517 &[
8518 (
8519 0..0,
8520 "zero\n",
8521 "",
8522 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8523 ),
8524 (
8525 1..2,
8526 "two\n",
8527 "TWO\n",
8528 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8529 ),
8530 (
8531 3..4,
8532 "four\n",
8533 "FOUR\n",
8534 DiffHunkStatus::modified(HasSecondaryHunk),
8535 ),
8536 ],
8537 );
8538 });
8539
8540 // Process the FS event for staging the first hunk (second event is still pending).
8541 fs.flush_events(1);
8542 cx.run_until_parked();
8543
8544 // Stage the third hunk before receiving the second FS event.
8545 uncommitted_diff.update(cx, |diff, cx| {
8546 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8547 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8548 });
8549
8550 // Wait for all remaining IO.
8551 cx.run_until_parked();
8552 fs.flush_events(fs.buffered_event_count());
8553
8554 // Now all hunks are staged.
8555 cx.run_until_parked();
8556 uncommitted_diff.update(cx, |diff, cx| {
8557 assert_hunks(
8558 diff.snapshot(cx).hunks(&snapshot),
8559 &snapshot,
8560 &diff.base_text_string(cx).unwrap(),
8561 &[
8562 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8563 (
8564 1..2,
8565 "two\n",
8566 "TWO\n",
8567 DiffHunkStatus::modified(NoSecondaryHunk),
8568 ),
8569 (
8570 3..4,
8571 "four\n",
8572 "FOUR\n",
8573 DiffHunkStatus::modified(NoSecondaryHunk),
8574 ),
8575 ],
8576 );
8577 });
8578}
8579
8580#[gpui::test(iterations = 25)]
8581async fn test_staging_random_hunks(
8582 mut rng: StdRng,
8583 _executor: BackgroundExecutor,
8584 cx: &mut gpui::TestAppContext,
8585) {
8586 let operations = env::var("OPERATIONS")
8587 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8588 .unwrap_or(20);
8589
8590 use DiffHunkSecondaryStatus::*;
8591 init_test(cx);
8592
8593 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8594 let index_text = committed_text.clone();
8595 let buffer_text = (0..30)
8596 .map(|i| match i % 5 {
8597 0 => format!("line {i} (modified)\n"),
8598 _ => format!("line {i}\n"),
8599 })
8600 .collect::<String>();
8601
8602 let fs = FakeFs::new(cx.background_executor.clone());
8603 fs.insert_tree(
8604 path!("/dir"),
8605 json!({
8606 ".git": {},
8607 "file.txt": buffer_text.clone()
8608 }),
8609 )
8610 .await;
8611 fs.set_head_for_repo(
8612 path!("/dir/.git").as_ref(),
8613 &[("file.txt", committed_text.clone())],
8614 "deadbeef",
8615 );
8616 fs.set_index_for_repo(
8617 path!("/dir/.git").as_ref(),
8618 &[("file.txt", index_text.clone())],
8619 );
8620 let repo = fs
8621 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8622 .unwrap();
8623
8624 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8625 let buffer = project
8626 .update(cx, |project, cx| {
8627 project.open_local_buffer(path!("/dir/file.txt"), cx)
8628 })
8629 .await
8630 .unwrap();
8631 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8632 let uncommitted_diff = project
8633 .update(cx, |project, cx| {
8634 project.open_uncommitted_diff(buffer.clone(), cx)
8635 })
8636 .await
8637 .unwrap();
8638
8639 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8640 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8641 });
8642 assert_eq!(hunks.len(), 6);
8643
8644 for _i in 0..operations {
8645 let hunk_ix = rng.random_range(0..hunks.len());
8646 let hunk = &mut hunks[hunk_ix];
8647 let row = hunk.range.start.row;
8648
8649 if hunk.status().has_secondary_hunk() {
8650 log::info!("staging hunk at {row}");
8651 uncommitted_diff.update(cx, |diff, cx| {
8652 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8653 });
8654 hunk.secondary_status = SecondaryHunkRemovalPending;
8655 } else {
8656 log::info!("unstaging hunk at {row}");
8657 uncommitted_diff.update(cx, |diff, cx| {
8658 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8659 });
8660 hunk.secondary_status = SecondaryHunkAdditionPending;
8661 }
8662
8663 for _ in 0..rng.random_range(0..10) {
8664 log::info!("yielding");
8665 cx.executor().simulate_random_delay().await;
8666 }
8667 }
8668
8669 cx.executor().run_until_parked();
8670
8671 for hunk in &mut hunks {
8672 if hunk.secondary_status == SecondaryHunkRemovalPending {
8673 hunk.secondary_status = NoSecondaryHunk;
8674 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8675 hunk.secondary_status = HasSecondaryHunk;
8676 }
8677 }
8678
8679 log::info!(
8680 "index text:\n{}",
8681 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8682 .await
8683 .unwrap()
8684 );
8685
8686 uncommitted_diff.update(cx, |diff, cx| {
8687 let expected_hunks = hunks
8688 .iter()
8689 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8690 .collect::<Vec<_>>();
8691 let actual_hunks = diff
8692 .snapshot(cx)
8693 .hunks(&snapshot)
8694 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8695 .collect::<Vec<_>>();
8696 assert_eq!(actual_hunks, expected_hunks);
8697 });
8698}
8699
8700#[gpui::test]
8701async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8702 init_test(cx);
8703
8704 let committed_contents = r#"
8705 fn main() {
8706 println!("hello from HEAD");
8707 }
8708 "#
8709 .unindent();
8710 let file_contents = r#"
8711 fn main() {
8712 println!("hello from the working copy");
8713 }
8714 "#
8715 .unindent();
8716
8717 let fs = FakeFs::new(cx.background_executor.clone());
8718 fs.insert_tree(
8719 "/dir",
8720 json!({
8721 ".git": {},
8722 "src": {
8723 "main.rs": file_contents,
8724 }
8725 }),
8726 )
8727 .await;
8728
8729 fs.set_head_for_repo(
8730 Path::new("/dir/.git"),
8731 &[("src/main.rs", committed_contents.clone())],
8732 "deadbeef",
8733 );
8734 fs.set_index_for_repo(
8735 Path::new("/dir/.git"),
8736 &[("src/main.rs", committed_contents.clone())],
8737 );
8738
8739 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8740
8741 let buffer = project
8742 .update(cx, |project, cx| {
8743 project.open_local_buffer("/dir/src/main.rs", cx)
8744 })
8745 .await
8746 .unwrap();
8747 let uncommitted_diff = project
8748 .update(cx, |project, cx| {
8749 project.open_uncommitted_diff(buffer.clone(), cx)
8750 })
8751 .await
8752 .unwrap();
8753
8754 cx.run_until_parked();
8755 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8756 let snapshot = buffer.read(cx).snapshot();
8757 assert_hunks(
8758 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8759 &snapshot,
8760 &uncommitted_diff.base_text_string(cx).unwrap(),
8761 &[(
8762 1..2,
8763 " println!(\"hello from HEAD\");\n",
8764 " println!(\"hello from the working copy\");\n",
8765 DiffHunkStatus {
8766 kind: DiffHunkStatusKind::Modified,
8767 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8768 },
8769 )],
8770 );
8771 });
8772}
8773
8774// TODO: Should we test this on Windows also?
8775#[gpui::test]
8776#[cfg(not(windows))]
8777async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8778 use std::os::unix::fs::PermissionsExt;
8779 init_test(cx);
8780 cx.executor().allow_parking();
8781 let committed_contents = "bar\n";
8782 let file_contents = "baz\n";
8783 let root = TempTree::new(json!({
8784 "project": {
8785 "foo": committed_contents
8786 },
8787 }));
8788
8789 let work_dir = root.path().join("project");
8790 let file_path = work_dir.join("foo");
8791 let repo = git_init(work_dir.as_path());
8792 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8793 perms.set_mode(0o755);
8794 std::fs::set_permissions(&file_path, perms).unwrap();
8795 git_add("foo", &repo);
8796 git_commit("Initial commit", &repo);
8797 std::fs::write(&file_path, file_contents).unwrap();
8798
8799 let project = Project::test(
8800 Arc::new(RealFs::new(None, cx.executor())),
8801 [root.path()],
8802 cx,
8803 )
8804 .await;
8805
8806 let buffer = project
8807 .update(cx, |project, cx| {
8808 project.open_local_buffer(file_path.as_path(), cx)
8809 })
8810 .await
8811 .unwrap();
8812
8813 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8814
8815 let uncommitted_diff = project
8816 .update(cx, |project, cx| {
8817 project.open_uncommitted_diff(buffer.clone(), cx)
8818 })
8819 .await
8820 .unwrap();
8821
8822 uncommitted_diff.update(cx, |diff, cx| {
8823 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8824 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8825 });
8826
8827 cx.run_until_parked();
8828
8829 let output = smol::process::Command::new("git")
8830 .current_dir(&work_dir)
8831 .args(["diff", "--staged"])
8832 .output()
8833 .await
8834 .unwrap();
8835
8836 let staged_diff = String::from_utf8_lossy(&output.stdout);
8837
8838 assert!(
8839 !staged_diff.contains("new mode 100644"),
8840 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8841 staged_diff
8842 );
8843
8844 let output = smol::process::Command::new("git")
8845 .current_dir(&work_dir)
8846 .args(["ls-files", "-s"])
8847 .output()
8848 .await
8849 .unwrap();
8850 let index_contents = String::from_utf8_lossy(&output.stdout);
8851
8852 assert!(
8853 index_contents.contains("100755"),
8854 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8855 index_contents
8856 );
8857}
8858
8859#[gpui::test]
8860async fn test_repository_and_path_for_project_path(
8861 background_executor: BackgroundExecutor,
8862 cx: &mut gpui::TestAppContext,
8863) {
8864 init_test(cx);
8865 let fs = FakeFs::new(background_executor);
8866 fs.insert_tree(
8867 path!("/root"),
8868 json!({
8869 "c.txt": "",
8870 "dir1": {
8871 ".git": {},
8872 "deps": {
8873 "dep1": {
8874 ".git": {},
8875 "src": {
8876 "a.txt": ""
8877 }
8878 }
8879 },
8880 "src": {
8881 "b.txt": ""
8882 }
8883 },
8884 }),
8885 )
8886 .await;
8887
8888 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8889 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8890 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8891 project
8892 .update(cx, |project, cx| project.git_scans_complete(cx))
8893 .await;
8894 cx.run_until_parked();
8895
8896 project.read_with(cx, |project, cx| {
8897 let git_store = project.git_store().read(cx);
8898 let pairs = [
8899 ("c.txt", None),
8900 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8901 (
8902 "dir1/deps/dep1/src/a.txt",
8903 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8904 ),
8905 ];
8906 let expected = pairs
8907 .iter()
8908 .map(|(path, result)| {
8909 (
8910 path,
8911 result.map(|(repo, repo_path)| {
8912 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8913 }),
8914 )
8915 })
8916 .collect::<Vec<_>>();
8917 let actual = pairs
8918 .iter()
8919 .map(|(path, _)| {
8920 let project_path = (tree_id, rel_path(path)).into();
8921 let result = maybe!({
8922 let (repo, repo_path) =
8923 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8924 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8925 });
8926 (path, result)
8927 })
8928 .collect::<Vec<_>>();
8929 pretty_assertions::assert_eq!(expected, actual);
8930 });
8931
8932 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8933 .await
8934 .unwrap();
8935 cx.run_until_parked();
8936
8937 project.read_with(cx, |project, cx| {
8938 let git_store = project.git_store().read(cx);
8939 assert_eq!(
8940 git_store.repository_and_path_for_project_path(
8941 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8942 cx
8943 ),
8944 None
8945 );
8946 });
8947}
8948
8949#[gpui::test]
8950async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8951 init_test(cx);
8952 let fs = FakeFs::new(cx.background_executor.clone());
8953 let home = paths::home_dir();
8954 fs.insert_tree(
8955 home,
8956 json!({
8957 ".git": {},
8958 "project": {
8959 "a.txt": "A"
8960 },
8961 }),
8962 )
8963 .await;
8964
8965 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8966 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8967 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8968
8969 project
8970 .update(cx, |project, cx| project.git_scans_complete(cx))
8971 .await;
8972 tree.flush_fs_events(cx).await;
8973
8974 project.read_with(cx, |project, cx| {
8975 let containing = project
8976 .git_store()
8977 .read(cx)
8978 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8979 assert!(containing.is_none());
8980 });
8981
8982 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8983 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8984 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8985 project
8986 .update(cx, |project, cx| project.git_scans_complete(cx))
8987 .await;
8988 tree.flush_fs_events(cx).await;
8989
8990 project.read_with(cx, |project, cx| {
8991 let containing = project
8992 .git_store()
8993 .read(cx)
8994 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8995 assert_eq!(
8996 containing
8997 .unwrap()
8998 .0
8999 .read(cx)
9000 .work_directory_abs_path
9001 .as_ref(),
9002 home,
9003 );
9004 });
9005}
9006
9007#[gpui::test]
9008async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9009 init_test(cx);
9010 cx.executor().allow_parking();
9011
9012 let root = TempTree::new(json!({
9013 "project": {
9014 "a.txt": "a", // Modified
9015 "b.txt": "bb", // Added
9016 "c.txt": "ccc", // Unchanged
9017 "d.txt": "dddd", // Deleted
9018 },
9019 }));
9020
9021 // Set up git repository before creating the project.
9022 let work_dir = root.path().join("project");
9023 let repo = git_init(work_dir.as_path());
9024 git_add("a.txt", &repo);
9025 git_add("c.txt", &repo);
9026 git_add("d.txt", &repo);
9027 git_commit("Initial commit", &repo);
9028 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9029 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9030
9031 let project = Project::test(
9032 Arc::new(RealFs::new(None, cx.executor())),
9033 [root.path()],
9034 cx,
9035 )
9036 .await;
9037
9038 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9039 tree.flush_fs_events(cx).await;
9040 project
9041 .update(cx, |project, cx| project.git_scans_complete(cx))
9042 .await;
9043 cx.executor().run_until_parked();
9044
9045 let repository = project.read_with(cx, |project, cx| {
9046 project.repositories(cx).values().next().unwrap().clone()
9047 });
9048
9049 // Check that the right git state is observed on startup
9050 repository.read_with(cx, |repository, _| {
9051 let entries = repository.cached_status().collect::<Vec<_>>();
9052 assert_eq!(
9053 entries,
9054 [
9055 StatusEntry {
9056 repo_path: repo_path("a.txt"),
9057 status: StatusCode::Modified.worktree(),
9058 },
9059 StatusEntry {
9060 repo_path: repo_path("b.txt"),
9061 status: FileStatus::Untracked,
9062 },
9063 StatusEntry {
9064 repo_path: repo_path("d.txt"),
9065 status: StatusCode::Deleted.worktree(),
9066 },
9067 ]
9068 );
9069 });
9070
9071 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9072
9073 tree.flush_fs_events(cx).await;
9074 project
9075 .update(cx, |project, cx| project.git_scans_complete(cx))
9076 .await;
9077 cx.executor().run_until_parked();
9078
9079 repository.read_with(cx, |repository, _| {
9080 let entries = repository.cached_status().collect::<Vec<_>>();
9081 assert_eq!(
9082 entries,
9083 [
9084 StatusEntry {
9085 repo_path: repo_path("a.txt"),
9086 status: StatusCode::Modified.worktree(),
9087 },
9088 StatusEntry {
9089 repo_path: repo_path("b.txt"),
9090 status: FileStatus::Untracked,
9091 },
9092 StatusEntry {
9093 repo_path: repo_path("c.txt"),
9094 status: StatusCode::Modified.worktree(),
9095 },
9096 StatusEntry {
9097 repo_path: repo_path("d.txt"),
9098 status: StatusCode::Deleted.worktree(),
9099 },
9100 ]
9101 );
9102 });
9103
9104 git_add("a.txt", &repo);
9105 git_add("c.txt", &repo);
9106 git_remove_index(Path::new("d.txt"), &repo);
9107 git_commit("Another commit", &repo);
9108 tree.flush_fs_events(cx).await;
9109 project
9110 .update(cx, |project, cx| project.git_scans_complete(cx))
9111 .await;
9112 cx.executor().run_until_parked();
9113
9114 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9115 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9116 tree.flush_fs_events(cx).await;
9117 project
9118 .update(cx, |project, cx| project.git_scans_complete(cx))
9119 .await;
9120 cx.executor().run_until_parked();
9121
9122 repository.read_with(cx, |repository, _cx| {
9123 let entries = repository.cached_status().collect::<Vec<_>>();
9124
9125 // Deleting an untracked entry, b.txt, should leave no status
9126 // a.txt was tracked, and so should have a status
9127 assert_eq!(
9128 entries,
9129 [StatusEntry {
9130 repo_path: repo_path("a.txt"),
9131 status: StatusCode::Deleted.worktree(),
9132 }]
9133 );
9134 });
9135}
9136
9137#[gpui::test]
9138#[ignore]
9139async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9140 init_test(cx);
9141 cx.executor().allow_parking();
9142
9143 let root = TempTree::new(json!({
9144 "project": {
9145 "sub": {},
9146 "a.txt": "",
9147 },
9148 }));
9149
9150 let work_dir = root.path().join("project");
9151 let repo = git_init(work_dir.as_path());
9152 // a.txt exists in HEAD and the working copy but is deleted in the index.
9153 git_add("a.txt", &repo);
9154 git_commit("Initial commit", &repo);
9155 git_remove_index("a.txt".as_ref(), &repo);
9156 // `sub` is a nested git repository.
9157 let _sub = git_init(&work_dir.join("sub"));
9158
9159 let project = Project::test(
9160 Arc::new(RealFs::new(None, cx.executor())),
9161 [root.path()],
9162 cx,
9163 )
9164 .await;
9165
9166 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9167 tree.flush_fs_events(cx).await;
9168 project
9169 .update(cx, |project, cx| project.git_scans_complete(cx))
9170 .await;
9171 cx.executor().run_until_parked();
9172
9173 let repository = project.read_with(cx, |project, cx| {
9174 project
9175 .repositories(cx)
9176 .values()
9177 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9178 .unwrap()
9179 .clone()
9180 });
9181
9182 repository.read_with(cx, |repository, _cx| {
9183 let entries = repository.cached_status().collect::<Vec<_>>();
9184
9185 // `sub` doesn't appear in our computed statuses.
9186 // a.txt appears with a combined `DA` status.
9187 assert_eq!(
9188 entries,
9189 [StatusEntry {
9190 repo_path: repo_path("a.txt"),
9191 status: TrackedStatus {
9192 index_status: StatusCode::Deleted,
9193 worktree_status: StatusCode::Added
9194 }
9195 .into(),
9196 }]
9197 )
9198 });
9199}
9200
9201#[track_caller]
9202/// We merge lhs into rhs.
9203fn merge_pending_ops_snapshots(
9204 source: Vec<pending_op::PendingOps>,
9205 mut target: Vec<pending_op::PendingOps>,
9206) -> Vec<pending_op::PendingOps> {
9207 for s_ops in source {
9208 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9209 if ops.repo_path == s_ops.repo_path {
9210 Some(idx)
9211 } else {
9212 None
9213 }
9214 }) {
9215 let t_ops = &mut target[idx];
9216 for s_op in s_ops.ops {
9217 if let Some(op_idx) = t_ops
9218 .ops
9219 .iter()
9220 .zip(0..)
9221 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9222 {
9223 let t_op = &mut t_ops.ops[op_idx];
9224 match (s_op.job_status, t_op.job_status) {
9225 (pending_op::JobStatus::Running, _) => {}
9226 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9227 (s_st, t_st) if s_st == t_st => {}
9228 _ => unreachable!(),
9229 }
9230 } else {
9231 t_ops.ops.push(s_op);
9232 }
9233 }
9234 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9235 } else {
9236 target.push(s_ops);
9237 }
9238 }
9239 target
9240}
9241
9242#[gpui::test]
9243async fn test_repository_pending_ops_staging(
9244 executor: gpui::BackgroundExecutor,
9245 cx: &mut gpui::TestAppContext,
9246) {
9247 init_test(cx);
9248
9249 let fs = FakeFs::new(executor);
9250 fs.insert_tree(
9251 path!("/root"),
9252 json!({
9253 "my-repo": {
9254 ".git": {},
9255 "a.txt": "a",
9256 }
9257
9258 }),
9259 )
9260 .await;
9261
9262 fs.set_status_for_repo(
9263 path!("/root/my-repo/.git").as_ref(),
9264 &[("a.txt", FileStatus::Untracked)],
9265 );
9266
9267 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9268 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9269 project.update(cx, |project, cx| {
9270 let pending_ops_all = pending_ops_all.clone();
9271 cx.subscribe(project.git_store(), move |_, _, e, _| {
9272 if let GitStoreEvent::RepositoryUpdated(
9273 _,
9274 RepositoryEvent::PendingOpsChanged { pending_ops },
9275 _,
9276 ) = e
9277 {
9278 let merged = merge_pending_ops_snapshots(
9279 pending_ops.items(()),
9280 pending_ops_all.lock().items(()),
9281 );
9282 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9283 }
9284 })
9285 .detach();
9286 });
9287 project
9288 .update(cx, |project, cx| project.git_scans_complete(cx))
9289 .await;
9290
9291 let repo = project.read_with(cx, |project, cx| {
9292 project.repositories(cx).values().next().unwrap().clone()
9293 });
9294
9295 // Ensure we have no pending ops for any of the untracked files
9296 repo.read_with(cx, |repo, _cx| {
9297 assert!(repo.pending_ops().next().is_none());
9298 });
9299
9300 let mut id = 1u16;
9301
9302 let mut assert_stage = async |path: RepoPath, stage| {
9303 let git_status = if stage {
9304 pending_op::GitStatus::Staged
9305 } else {
9306 pending_op::GitStatus::Unstaged
9307 };
9308 repo.update(cx, |repo, cx| {
9309 let task = if stage {
9310 repo.stage_entries(vec![path.clone()], cx)
9311 } else {
9312 repo.unstage_entries(vec![path.clone()], cx)
9313 };
9314 let ops = repo.pending_ops_for_path(&path).unwrap();
9315 assert_eq!(
9316 ops.ops.last(),
9317 Some(&pending_op::PendingOp {
9318 id: id.into(),
9319 git_status,
9320 job_status: pending_op::JobStatus::Running
9321 })
9322 );
9323 task
9324 })
9325 .await
9326 .unwrap();
9327
9328 repo.read_with(cx, |repo, _cx| {
9329 let ops = repo.pending_ops_for_path(&path).unwrap();
9330 assert_eq!(
9331 ops.ops.last(),
9332 Some(&pending_op::PendingOp {
9333 id: id.into(),
9334 git_status,
9335 job_status: pending_op::JobStatus::Finished
9336 })
9337 );
9338 });
9339
9340 id += 1;
9341 };
9342
9343 assert_stage(repo_path("a.txt"), true).await;
9344 assert_stage(repo_path("a.txt"), false).await;
9345 assert_stage(repo_path("a.txt"), true).await;
9346 assert_stage(repo_path("a.txt"), false).await;
9347 assert_stage(repo_path("a.txt"), true).await;
9348
9349 cx.run_until_parked();
9350
9351 assert_eq!(
9352 pending_ops_all
9353 .lock()
9354 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9355 .unwrap()
9356 .ops,
9357 vec![
9358 pending_op::PendingOp {
9359 id: 1u16.into(),
9360 git_status: pending_op::GitStatus::Staged,
9361 job_status: pending_op::JobStatus::Finished
9362 },
9363 pending_op::PendingOp {
9364 id: 2u16.into(),
9365 git_status: pending_op::GitStatus::Unstaged,
9366 job_status: pending_op::JobStatus::Finished
9367 },
9368 pending_op::PendingOp {
9369 id: 3u16.into(),
9370 git_status: pending_op::GitStatus::Staged,
9371 job_status: pending_op::JobStatus::Finished
9372 },
9373 pending_op::PendingOp {
9374 id: 4u16.into(),
9375 git_status: pending_op::GitStatus::Unstaged,
9376 job_status: pending_op::JobStatus::Finished
9377 },
9378 pending_op::PendingOp {
9379 id: 5u16.into(),
9380 git_status: pending_op::GitStatus::Staged,
9381 job_status: pending_op::JobStatus::Finished
9382 }
9383 ],
9384 );
9385
9386 repo.update(cx, |repo, _cx| {
9387 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9388
9389 assert_eq!(
9390 git_statuses,
9391 [StatusEntry {
9392 repo_path: repo_path("a.txt"),
9393 status: TrackedStatus {
9394 index_status: StatusCode::Added,
9395 worktree_status: StatusCode::Unmodified
9396 }
9397 .into(),
9398 }]
9399 );
9400 });
9401}
9402
9403#[gpui::test]
9404async fn test_repository_pending_ops_long_running_staging(
9405 executor: gpui::BackgroundExecutor,
9406 cx: &mut gpui::TestAppContext,
9407) {
9408 init_test(cx);
9409
9410 let fs = FakeFs::new(executor);
9411 fs.insert_tree(
9412 path!("/root"),
9413 json!({
9414 "my-repo": {
9415 ".git": {},
9416 "a.txt": "a",
9417 }
9418
9419 }),
9420 )
9421 .await;
9422
9423 fs.set_status_for_repo(
9424 path!("/root/my-repo/.git").as_ref(),
9425 &[("a.txt", FileStatus::Untracked)],
9426 );
9427
9428 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9429 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9430 project.update(cx, |project, cx| {
9431 let pending_ops_all = pending_ops_all.clone();
9432 cx.subscribe(project.git_store(), move |_, _, e, _| {
9433 if let GitStoreEvent::RepositoryUpdated(
9434 _,
9435 RepositoryEvent::PendingOpsChanged { pending_ops },
9436 _,
9437 ) = e
9438 {
9439 let merged = merge_pending_ops_snapshots(
9440 pending_ops.items(()),
9441 pending_ops_all.lock().items(()),
9442 );
9443 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9444 }
9445 })
9446 .detach();
9447 });
9448
9449 project
9450 .update(cx, |project, cx| project.git_scans_complete(cx))
9451 .await;
9452
9453 let repo = project.read_with(cx, |project, cx| {
9454 project.repositories(cx).values().next().unwrap().clone()
9455 });
9456
9457 repo.update(cx, |repo, cx| {
9458 repo.stage_entries(vec![repo_path("a.txt")], cx)
9459 })
9460 .detach();
9461
9462 repo.update(cx, |repo, cx| {
9463 repo.stage_entries(vec![repo_path("a.txt")], cx)
9464 })
9465 .unwrap()
9466 .with_timeout(Duration::from_secs(1), &cx.executor())
9467 .await
9468 .unwrap();
9469
9470 cx.run_until_parked();
9471
9472 assert_eq!(
9473 pending_ops_all
9474 .lock()
9475 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9476 .unwrap()
9477 .ops,
9478 vec![
9479 pending_op::PendingOp {
9480 id: 1u16.into(),
9481 git_status: pending_op::GitStatus::Staged,
9482 job_status: pending_op::JobStatus::Skipped
9483 },
9484 pending_op::PendingOp {
9485 id: 2u16.into(),
9486 git_status: pending_op::GitStatus::Staged,
9487 job_status: pending_op::JobStatus::Finished
9488 }
9489 ],
9490 );
9491
9492 repo.update(cx, |repo, _cx| {
9493 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9494
9495 assert_eq!(
9496 git_statuses,
9497 [StatusEntry {
9498 repo_path: repo_path("a.txt"),
9499 status: TrackedStatus {
9500 index_status: StatusCode::Added,
9501 worktree_status: StatusCode::Unmodified
9502 }
9503 .into(),
9504 }]
9505 );
9506 });
9507}
9508
9509#[gpui::test]
9510async fn test_repository_pending_ops_stage_all(
9511 executor: gpui::BackgroundExecutor,
9512 cx: &mut gpui::TestAppContext,
9513) {
9514 init_test(cx);
9515
9516 let fs = FakeFs::new(executor);
9517 fs.insert_tree(
9518 path!("/root"),
9519 json!({
9520 "my-repo": {
9521 ".git": {},
9522 "a.txt": "a",
9523 "b.txt": "b"
9524 }
9525
9526 }),
9527 )
9528 .await;
9529
9530 fs.set_status_for_repo(
9531 path!("/root/my-repo/.git").as_ref(),
9532 &[
9533 ("a.txt", FileStatus::Untracked),
9534 ("b.txt", FileStatus::Untracked),
9535 ],
9536 );
9537
9538 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9539 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9540 project.update(cx, |project, cx| {
9541 let pending_ops_all = pending_ops_all.clone();
9542 cx.subscribe(project.git_store(), move |_, _, e, _| {
9543 if let GitStoreEvent::RepositoryUpdated(
9544 _,
9545 RepositoryEvent::PendingOpsChanged { pending_ops },
9546 _,
9547 ) = e
9548 {
9549 let merged = merge_pending_ops_snapshots(
9550 pending_ops.items(()),
9551 pending_ops_all.lock().items(()),
9552 );
9553 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9554 }
9555 })
9556 .detach();
9557 });
9558 project
9559 .update(cx, |project, cx| project.git_scans_complete(cx))
9560 .await;
9561
9562 let repo = project.read_with(cx, |project, cx| {
9563 project.repositories(cx).values().next().unwrap().clone()
9564 });
9565
9566 repo.update(cx, |repo, cx| {
9567 repo.stage_entries(vec![repo_path("a.txt")], cx)
9568 })
9569 .await
9570 .unwrap();
9571 repo.update(cx, |repo, cx| repo.stage_all(cx))
9572 .await
9573 .unwrap();
9574 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9575 .await
9576 .unwrap();
9577
9578 cx.run_until_parked();
9579
9580 assert_eq!(
9581 pending_ops_all
9582 .lock()
9583 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9584 .unwrap()
9585 .ops,
9586 vec![
9587 pending_op::PendingOp {
9588 id: 1u16.into(),
9589 git_status: pending_op::GitStatus::Staged,
9590 job_status: pending_op::JobStatus::Finished
9591 },
9592 pending_op::PendingOp {
9593 id: 2u16.into(),
9594 git_status: pending_op::GitStatus::Unstaged,
9595 job_status: pending_op::JobStatus::Finished
9596 },
9597 ],
9598 );
9599 assert_eq!(
9600 pending_ops_all
9601 .lock()
9602 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9603 .unwrap()
9604 .ops,
9605 vec![
9606 pending_op::PendingOp {
9607 id: 1u16.into(),
9608 git_status: pending_op::GitStatus::Staged,
9609 job_status: pending_op::JobStatus::Finished
9610 },
9611 pending_op::PendingOp {
9612 id: 2u16.into(),
9613 git_status: pending_op::GitStatus::Unstaged,
9614 job_status: pending_op::JobStatus::Finished
9615 },
9616 ],
9617 );
9618
9619 repo.update(cx, |repo, _cx| {
9620 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9621
9622 assert_eq!(
9623 git_statuses,
9624 [
9625 StatusEntry {
9626 repo_path: repo_path("a.txt"),
9627 status: FileStatus::Untracked,
9628 },
9629 StatusEntry {
9630 repo_path: repo_path("b.txt"),
9631 status: FileStatus::Untracked,
9632 },
9633 ]
9634 );
9635 });
9636}
9637
9638#[gpui::test]
9639async fn test_repository_subfolder_git_status(
9640 executor: gpui::BackgroundExecutor,
9641 cx: &mut gpui::TestAppContext,
9642) {
9643 init_test(cx);
9644
9645 let fs = FakeFs::new(executor);
9646 fs.insert_tree(
9647 path!("/root"),
9648 json!({
9649 "my-repo": {
9650 ".git": {},
9651 "a.txt": "a",
9652 "sub-folder-1": {
9653 "sub-folder-2": {
9654 "c.txt": "cc",
9655 "d": {
9656 "e.txt": "eee"
9657 }
9658 },
9659 }
9660 },
9661 }),
9662 )
9663 .await;
9664
9665 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9666 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9667
9668 fs.set_status_for_repo(
9669 path!("/root/my-repo/.git").as_ref(),
9670 &[(E_TXT, FileStatus::Untracked)],
9671 );
9672
9673 let project = Project::test(
9674 fs.clone(),
9675 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9676 cx,
9677 )
9678 .await;
9679
9680 project
9681 .update(cx, |project, cx| project.git_scans_complete(cx))
9682 .await;
9683 cx.run_until_parked();
9684
9685 let repository = project.read_with(cx, |project, cx| {
9686 project.repositories(cx).values().next().unwrap().clone()
9687 });
9688
9689 // Ensure that the git status is loaded correctly
9690 repository.read_with(cx, |repository, _cx| {
9691 assert_eq!(
9692 repository.work_directory_abs_path,
9693 Path::new(path!("/root/my-repo")).into()
9694 );
9695
9696 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9697 assert_eq!(
9698 repository
9699 .status_for_path(&repo_path(E_TXT))
9700 .unwrap()
9701 .status,
9702 FileStatus::Untracked
9703 );
9704 });
9705
9706 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9707 project
9708 .update(cx, |project, cx| project.git_scans_complete(cx))
9709 .await;
9710 cx.run_until_parked();
9711
9712 repository.read_with(cx, |repository, _cx| {
9713 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9714 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9715 });
9716}
9717
9718// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9719#[cfg(any())]
9720#[gpui::test]
9721async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9722 init_test(cx);
9723 cx.executor().allow_parking();
9724
9725 let root = TempTree::new(json!({
9726 "project": {
9727 "a.txt": "a",
9728 },
9729 }));
9730 let root_path = root.path();
9731
9732 let repo = git_init(&root_path.join("project"));
9733 git_add("a.txt", &repo);
9734 git_commit("init", &repo);
9735
9736 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9737
9738 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9739 tree.flush_fs_events(cx).await;
9740 project
9741 .update(cx, |project, cx| project.git_scans_complete(cx))
9742 .await;
9743 cx.executor().run_until_parked();
9744
9745 let repository = project.read_with(cx, |project, cx| {
9746 project.repositories(cx).values().next().unwrap().clone()
9747 });
9748
9749 git_branch("other-branch", &repo);
9750 git_checkout("refs/heads/other-branch", &repo);
9751 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9752 git_add("a.txt", &repo);
9753 git_commit("capitalize", &repo);
9754 let commit = repo
9755 .head()
9756 .expect("Failed to get HEAD")
9757 .peel_to_commit()
9758 .expect("HEAD is not a commit");
9759 git_checkout("refs/heads/main", &repo);
9760 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9761 git_add("a.txt", &repo);
9762 git_commit("improve letter", &repo);
9763 git_cherry_pick(&commit, &repo);
9764 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9765 .expect("No CHERRY_PICK_HEAD");
9766 pretty_assertions::assert_eq!(
9767 git_status(&repo),
9768 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9769 );
9770 tree.flush_fs_events(cx).await;
9771 project
9772 .update(cx, |project, cx| project.git_scans_complete(cx))
9773 .await;
9774 cx.executor().run_until_parked();
9775 let conflicts = repository.update(cx, |repository, _| {
9776 repository
9777 .merge_conflicts
9778 .iter()
9779 .cloned()
9780 .collect::<Vec<_>>()
9781 });
9782 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9783
9784 git_add("a.txt", &repo);
9785 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9786 git_commit("whatevs", &repo);
9787 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9788 .expect("Failed to remove CHERRY_PICK_HEAD");
9789 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9790 tree.flush_fs_events(cx).await;
9791 let conflicts = repository.update(cx, |repository, _| {
9792 repository
9793 .merge_conflicts
9794 .iter()
9795 .cloned()
9796 .collect::<Vec<_>>()
9797 });
9798 pretty_assertions::assert_eq!(conflicts, []);
9799}
9800
9801#[gpui::test]
9802async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9803 init_test(cx);
9804 let fs = FakeFs::new(cx.background_executor.clone());
9805 fs.insert_tree(
9806 path!("/root"),
9807 json!({
9808 ".git": {},
9809 ".gitignore": "*.txt\n",
9810 "a.xml": "<a></a>",
9811 "b.txt": "Some text"
9812 }),
9813 )
9814 .await;
9815
9816 fs.set_head_and_index_for_repo(
9817 path!("/root/.git").as_ref(),
9818 &[
9819 (".gitignore", "*.txt\n".into()),
9820 ("a.xml", "<a></a>".into()),
9821 ],
9822 );
9823
9824 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9825
9826 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9827 tree.flush_fs_events(cx).await;
9828 project
9829 .update(cx, |project, cx| project.git_scans_complete(cx))
9830 .await;
9831 cx.executor().run_until_parked();
9832
9833 let repository = project.read_with(cx, |project, cx| {
9834 project.repositories(cx).values().next().unwrap().clone()
9835 });
9836
9837 // One file is unmodified, the other is ignored.
9838 cx.read(|cx| {
9839 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9840 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9841 });
9842
9843 // Change the gitignore, and stage the newly non-ignored file.
9844 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9845 .await
9846 .unwrap();
9847 fs.set_index_for_repo(
9848 Path::new(path!("/root/.git")),
9849 &[
9850 (".gitignore", "*.txt\n".into()),
9851 ("a.xml", "<a></a>".into()),
9852 ("b.txt", "Some text".into()),
9853 ],
9854 );
9855
9856 cx.executor().run_until_parked();
9857 cx.read(|cx| {
9858 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9859 assert_entry_git_state(
9860 tree.read(cx),
9861 repository.read(cx),
9862 "b.txt",
9863 Some(StatusCode::Added),
9864 false,
9865 );
9866 });
9867}
9868
9869// NOTE:
9870// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9871// a directory which some program has already open.
9872// This is a limitation of the Windows.
9873// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9874// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9875#[gpui::test]
9876#[cfg_attr(target_os = "windows", ignore)]
9877async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9878 init_test(cx);
9879 cx.executor().allow_parking();
9880 let root = TempTree::new(json!({
9881 "projects": {
9882 "project1": {
9883 "a": "",
9884 "b": "",
9885 }
9886 },
9887
9888 }));
9889 let root_path = root.path();
9890
9891 let repo = git_init(&root_path.join("projects/project1"));
9892 git_add("a", &repo);
9893 git_commit("init", &repo);
9894 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9895
9896 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9897
9898 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9899 tree.flush_fs_events(cx).await;
9900 project
9901 .update(cx, |project, cx| project.git_scans_complete(cx))
9902 .await;
9903 cx.executor().run_until_parked();
9904
9905 let repository = project.read_with(cx, |project, cx| {
9906 project.repositories(cx).values().next().unwrap().clone()
9907 });
9908
9909 repository.read_with(cx, |repository, _| {
9910 assert_eq!(
9911 repository.work_directory_abs_path.as_ref(),
9912 root_path.join("projects/project1").as_path()
9913 );
9914 assert_eq!(
9915 repository
9916 .status_for_path(&repo_path("a"))
9917 .map(|entry| entry.status),
9918 Some(StatusCode::Modified.worktree()),
9919 );
9920 assert_eq!(
9921 repository
9922 .status_for_path(&repo_path("b"))
9923 .map(|entry| entry.status),
9924 Some(FileStatus::Untracked),
9925 );
9926 });
9927
9928 std::fs::rename(
9929 root_path.join("projects/project1"),
9930 root_path.join("projects/project2"),
9931 )
9932 .unwrap();
9933 tree.flush_fs_events(cx).await;
9934
9935 repository.read_with(cx, |repository, _| {
9936 assert_eq!(
9937 repository.work_directory_abs_path.as_ref(),
9938 root_path.join("projects/project2").as_path()
9939 );
9940 assert_eq!(
9941 repository.status_for_path(&repo_path("a")).unwrap().status,
9942 StatusCode::Modified.worktree(),
9943 );
9944 assert_eq!(
9945 repository.status_for_path(&repo_path("b")).unwrap().status,
9946 FileStatus::Untracked,
9947 );
9948 });
9949}
9950
9951// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9952// you can't rename a directory which some program has already open. This is a
9953// limitation of the Windows. See:
9954// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9955// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9956#[gpui::test]
9957#[cfg_attr(target_os = "windows", ignore)]
9958async fn test_file_status(cx: &mut gpui::TestAppContext) {
9959 init_test(cx);
9960 cx.executor().allow_parking();
9961 const IGNORE_RULE: &str = "**/target";
9962
9963 let root = TempTree::new(json!({
9964 "project": {
9965 "a.txt": "a",
9966 "b.txt": "bb",
9967 "c": {
9968 "d": {
9969 "e.txt": "eee"
9970 }
9971 },
9972 "f.txt": "ffff",
9973 "target": {
9974 "build_file": "???"
9975 },
9976 ".gitignore": IGNORE_RULE
9977 },
9978
9979 }));
9980 let root_path = root.path();
9981
9982 const A_TXT: &str = "a.txt";
9983 const B_TXT: &str = "b.txt";
9984 const E_TXT: &str = "c/d/e.txt";
9985 const F_TXT: &str = "f.txt";
9986 const DOTGITIGNORE: &str = ".gitignore";
9987 const BUILD_FILE: &str = "target/build_file";
9988
9989 // Set up git repository before creating the worktree.
9990 let work_dir = root.path().join("project");
9991 let mut repo = git_init(work_dir.as_path());
9992 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9993 git_add(A_TXT, &repo);
9994 git_add(E_TXT, &repo);
9995 git_add(DOTGITIGNORE, &repo);
9996 git_commit("Initial commit", &repo);
9997
9998 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9999
10000 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10001 tree.flush_fs_events(cx).await;
10002 project
10003 .update(cx, |project, cx| project.git_scans_complete(cx))
10004 .await;
10005 cx.executor().run_until_parked();
10006
10007 let repository = project.read_with(cx, |project, cx| {
10008 project.repositories(cx).values().next().unwrap().clone()
10009 });
10010
10011 // Check that the right git state is observed on startup
10012 repository.read_with(cx, |repository, _cx| {
10013 assert_eq!(
10014 repository.work_directory_abs_path.as_ref(),
10015 root_path.join("project").as_path()
10016 );
10017
10018 assert_eq!(
10019 repository
10020 .status_for_path(&repo_path(B_TXT))
10021 .unwrap()
10022 .status,
10023 FileStatus::Untracked,
10024 );
10025 assert_eq!(
10026 repository
10027 .status_for_path(&repo_path(F_TXT))
10028 .unwrap()
10029 .status,
10030 FileStatus::Untracked,
10031 );
10032 });
10033
10034 // Modify a file in the working copy.
10035 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10036 tree.flush_fs_events(cx).await;
10037 project
10038 .update(cx, |project, cx| project.git_scans_complete(cx))
10039 .await;
10040 cx.executor().run_until_parked();
10041
10042 // The worktree detects that the file's git status has changed.
10043 repository.read_with(cx, |repository, _| {
10044 assert_eq!(
10045 repository
10046 .status_for_path(&repo_path(A_TXT))
10047 .unwrap()
10048 .status,
10049 StatusCode::Modified.worktree(),
10050 );
10051 });
10052
10053 // Create a commit in the git repository.
10054 git_add(A_TXT, &repo);
10055 git_add(B_TXT, &repo);
10056 git_commit("Committing modified and added", &repo);
10057 tree.flush_fs_events(cx).await;
10058 project
10059 .update(cx, |project, cx| project.git_scans_complete(cx))
10060 .await;
10061 cx.executor().run_until_parked();
10062
10063 // The worktree detects that the files' git status have changed.
10064 repository.read_with(cx, |repository, _cx| {
10065 assert_eq!(
10066 repository
10067 .status_for_path(&repo_path(F_TXT))
10068 .unwrap()
10069 .status,
10070 FileStatus::Untracked,
10071 );
10072 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10073 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10074 });
10075
10076 // Modify files in the working copy and perform git operations on other files.
10077 git_reset(0, &repo);
10078 git_remove_index(Path::new(B_TXT), &repo);
10079 git_stash(&mut repo);
10080 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10081 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10082 tree.flush_fs_events(cx).await;
10083 project
10084 .update(cx, |project, cx| project.git_scans_complete(cx))
10085 .await;
10086 cx.executor().run_until_parked();
10087
10088 // Check that more complex repo changes are tracked
10089 repository.read_with(cx, |repository, _cx| {
10090 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10091 assert_eq!(
10092 repository
10093 .status_for_path(&repo_path(B_TXT))
10094 .unwrap()
10095 .status,
10096 FileStatus::Untracked,
10097 );
10098 assert_eq!(
10099 repository
10100 .status_for_path(&repo_path(E_TXT))
10101 .unwrap()
10102 .status,
10103 StatusCode::Modified.worktree(),
10104 );
10105 });
10106
10107 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10108 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10109 std::fs::write(
10110 work_dir.join(DOTGITIGNORE),
10111 [IGNORE_RULE, "f.txt"].join("\n"),
10112 )
10113 .unwrap();
10114
10115 git_add(Path::new(DOTGITIGNORE), &repo);
10116 git_commit("Committing modified git ignore", &repo);
10117
10118 tree.flush_fs_events(cx).await;
10119 cx.executor().run_until_parked();
10120
10121 let mut renamed_dir_name = "first_directory/second_directory";
10122 const RENAMED_FILE: &str = "rf.txt";
10123
10124 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10125 std::fs::write(
10126 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10127 "new-contents",
10128 )
10129 .unwrap();
10130
10131 tree.flush_fs_events(cx).await;
10132 project
10133 .update(cx, |project, cx| project.git_scans_complete(cx))
10134 .await;
10135 cx.executor().run_until_parked();
10136
10137 repository.read_with(cx, |repository, _cx| {
10138 assert_eq!(
10139 repository
10140 .status_for_path(&RepoPath::from_rel_path(
10141 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10142 ))
10143 .unwrap()
10144 .status,
10145 FileStatus::Untracked,
10146 );
10147 });
10148
10149 renamed_dir_name = "new_first_directory/second_directory";
10150
10151 std::fs::rename(
10152 work_dir.join("first_directory"),
10153 work_dir.join("new_first_directory"),
10154 )
10155 .unwrap();
10156
10157 tree.flush_fs_events(cx).await;
10158 project
10159 .update(cx, |project, cx| project.git_scans_complete(cx))
10160 .await;
10161 cx.executor().run_until_parked();
10162
10163 repository.read_with(cx, |repository, _cx| {
10164 assert_eq!(
10165 repository
10166 .status_for_path(&RepoPath::from_rel_path(
10167 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10168 ))
10169 .unwrap()
10170 .status,
10171 FileStatus::Untracked,
10172 );
10173 });
10174}
10175
10176#[gpui::test]
10177#[ignore]
10178async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10179 init_test(cx);
10180 cx.executor().allow_parking();
10181
10182 const IGNORE_RULE: &str = "**/target";
10183
10184 let root = TempTree::new(json!({
10185 "project": {
10186 "src": {
10187 "main.rs": "fn main() {}"
10188 },
10189 "target": {
10190 "debug": {
10191 "important_text.txt": "important text",
10192 },
10193 },
10194 ".gitignore": IGNORE_RULE
10195 },
10196
10197 }));
10198 let root_path = root.path();
10199
10200 // Set up git repository before creating the worktree.
10201 let work_dir = root.path().join("project");
10202 let repo = git_init(work_dir.as_path());
10203 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10204 git_add("src/main.rs", &repo);
10205 git_add(".gitignore", &repo);
10206 git_commit("Initial commit", &repo);
10207
10208 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10209 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10210 let project_events = Arc::new(Mutex::new(Vec::new()));
10211 project.update(cx, |project, cx| {
10212 let repo_events = repository_updates.clone();
10213 cx.subscribe(project.git_store(), move |_, _, e, _| {
10214 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10215 repo_events.lock().push(e.clone());
10216 }
10217 })
10218 .detach();
10219 let project_events = project_events.clone();
10220 cx.subscribe_self(move |_, e, _| {
10221 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10222 project_events.lock().extend(
10223 updates
10224 .iter()
10225 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10226 .filter(|(path, _)| path != "fs-event-sentinel"),
10227 );
10228 }
10229 })
10230 .detach();
10231 });
10232
10233 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10234 tree.flush_fs_events(cx).await;
10235 tree.update(cx, |tree, cx| {
10236 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10237 })
10238 .await
10239 .unwrap();
10240 tree.update(cx, |tree, _| {
10241 assert_eq!(
10242 tree.entries(true, 0)
10243 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10244 .collect::<Vec<_>>(),
10245 vec![
10246 (rel_path(""), false),
10247 (rel_path("project/"), false),
10248 (rel_path("project/.gitignore"), false),
10249 (rel_path("project/src"), false),
10250 (rel_path("project/src/main.rs"), false),
10251 (rel_path("project/target"), true),
10252 (rel_path("project/target/debug"), true),
10253 (rel_path("project/target/debug/important_text.txt"), true),
10254 ]
10255 );
10256 });
10257
10258 assert_eq!(
10259 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10260 vec![
10261 RepositoryEvent::StatusesChanged,
10262 RepositoryEvent::MergeHeadsChanged,
10263 ],
10264 "Initial worktree scan should produce a repo update event"
10265 );
10266 assert_eq!(
10267 project_events.lock().drain(..).collect::<Vec<_>>(),
10268 vec![
10269 ("project/target".to_string(), PathChange::Loaded),
10270 ("project/target/debug".to_string(), PathChange::Loaded),
10271 (
10272 "project/target/debug/important_text.txt".to_string(),
10273 PathChange::Loaded
10274 ),
10275 ],
10276 "Initial project changes should show that all not-ignored and all opened files are loaded"
10277 );
10278
10279 let deps_dir = work_dir.join("target").join("debug").join("deps");
10280 std::fs::create_dir_all(&deps_dir).unwrap();
10281 tree.flush_fs_events(cx).await;
10282 project
10283 .update(cx, |project, cx| project.git_scans_complete(cx))
10284 .await;
10285 cx.executor().run_until_parked();
10286 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10287 tree.flush_fs_events(cx).await;
10288 project
10289 .update(cx, |project, cx| project.git_scans_complete(cx))
10290 .await;
10291 cx.executor().run_until_parked();
10292 std::fs::remove_dir_all(&deps_dir).unwrap();
10293 tree.flush_fs_events(cx).await;
10294 project
10295 .update(cx, |project, cx| project.git_scans_complete(cx))
10296 .await;
10297 cx.executor().run_until_parked();
10298
10299 tree.update(cx, |tree, _| {
10300 assert_eq!(
10301 tree.entries(true, 0)
10302 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10303 .collect::<Vec<_>>(),
10304 vec![
10305 (rel_path(""), false),
10306 (rel_path("project/"), false),
10307 (rel_path("project/.gitignore"), false),
10308 (rel_path("project/src"), false),
10309 (rel_path("project/src/main.rs"), false),
10310 (rel_path("project/target"), true),
10311 (rel_path("project/target/debug"), true),
10312 (rel_path("project/target/debug/important_text.txt"), true),
10313 ],
10314 "No stray temp files should be left after the flycheck changes"
10315 );
10316 });
10317
10318 assert_eq!(
10319 repository_updates
10320 .lock()
10321 .iter()
10322 .cloned()
10323 .collect::<Vec<_>>(),
10324 Vec::new(),
10325 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10326 );
10327 assert_eq!(
10328 project_events.lock().as_slice(),
10329 vec![
10330 ("project/target/debug/deps".to_string(), PathChange::Added),
10331 ("project/target/debug/deps".to_string(), PathChange::Removed),
10332 ],
10333 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10334 No updates for more nested directories should happen as those are ignored",
10335 );
10336}
10337
10338// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10339// to different timings/ordering of events.
10340#[ignore]
10341#[gpui::test]
10342async fn test_odd_events_for_ignored_dirs(
10343 executor: BackgroundExecutor,
10344 cx: &mut gpui::TestAppContext,
10345) {
10346 init_test(cx);
10347 let fs = FakeFs::new(executor);
10348 fs.insert_tree(
10349 path!("/root"),
10350 json!({
10351 ".git": {},
10352 ".gitignore": "**/target/",
10353 "src": {
10354 "main.rs": "fn main() {}",
10355 },
10356 "target": {
10357 "debug": {
10358 "foo.txt": "foo",
10359 "deps": {}
10360 }
10361 }
10362 }),
10363 )
10364 .await;
10365 fs.set_head_and_index_for_repo(
10366 path!("/root/.git").as_ref(),
10367 &[
10368 (".gitignore", "**/target/".into()),
10369 ("src/main.rs", "fn main() {}".into()),
10370 ],
10371 );
10372
10373 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10374 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10375 let project_events = Arc::new(Mutex::new(Vec::new()));
10376 project.update(cx, |project, cx| {
10377 let repository_updates = repository_updates.clone();
10378 cx.subscribe(project.git_store(), move |_, _, e, _| {
10379 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10380 repository_updates.lock().push(e.clone());
10381 }
10382 })
10383 .detach();
10384 let project_events = project_events.clone();
10385 cx.subscribe_self(move |_, e, _| {
10386 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10387 project_events.lock().extend(
10388 updates
10389 .iter()
10390 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10391 .filter(|(path, _)| path != "fs-event-sentinel"),
10392 );
10393 }
10394 })
10395 .detach();
10396 });
10397
10398 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10399 tree.update(cx, |tree, cx| {
10400 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10401 })
10402 .await
10403 .unwrap();
10404 tree.flush_fs_events(cx).await;
10405 project
10406 .update(cx, |project, cx| project.git_scans_complete(cx))
10407 .await;
10408 cx.run_until_parked();
10409 tree.update(cx, |tree, _| {
10410 assert_eq!(
10411 tree.entries(true, 0)
10412 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10413 .collect::<Vec<_>>(),
10414 vec![
10415 (rel_path(""), false),
10416 (rel_path(".gitignore"), false),
10417 (rel_path("src"), false),
10418 (rel_path("src/main.rs"), false),
10419 (rel_path("target"), true),
10420 (rel_path("target/debug"), true),
10421 (rel_path("target/debug/deps"), true),
10422 (rel_path("target/debug/foo.txt"), true),
10423 ]
10424 );
10425 });
10426
10427 assert_eq!(
10428 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10429 vec![
10430 RepositoryEvent::MergeHeadsChanged,
10431 RepositoryEvent::BranchChanged,
10432 RepositoryEvent::StatusesChanged,
10433 RepositoryEvent::StatusesChanged,
10434 ],
10435 "Initial worktree scan should produce a repo update event"
10436 );
10437 assert_eq!(
10438 project_events.lock().drain(..).collect::<Vec<_>>(),
10439 vec![
10440 ("target".to_string(), PathChange::Loaded),
10441 ("target/debug".to_string(), PathChange::Loaded),
10442 ("target/debug/deps".to_string(), PathChange::Loaded),
10443 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10444 ],
10445 "All non-ignored entries and all opened firs should be getting a project event",
10446 );
10447
10448 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10449 // This may happen multiple times during a single flycheck, but once is enough for testing.
10450 fs.emit_fs_event("/root/target/debug/deps", None);
10451 tree.flush_fs_events(cx).await;
10452 project
10453 .update(cx, |project, cx| project.git_scans_complete(cx))
10454 .await;
10455 cx.executor().run_until_parked();
10456
10457 assert_eq!(
10458 repository_updates
10459 .lock()
10460 .iter()
10461 .cloned()
10462 .collect::<Vec<_>>(),
10463 Vec::new(),
10464 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10465 );
10466 assert_eq!(
10467 project_events.lock().as_slice(),
10468 Vec::new(),
10469 "No further project events should happen, as only ignored dirs received FS events",
10470 );
10471}
10472
10473#[gpui::test]
10474async fn test_repos_in_invisible_worktrees(
10475 executor: BackgroundExecutor,
10476 cx: &mut gpui::TestAppContext,
10477) {
10478 init_test(cx);
10479 let fs = FakeFs::new(executor);
10480 fs.insert_tree(
10481 path!("/root"),
10482 json!({
10483 "dir1": {
10484 ".git": {},
10485 "dep1": {
10486 ".git": {},
10487 "src": {
10488 "a.txt": "",
10489 },
10490 },
10491 "b.txt": "",
10492 },
10493 }),
10494 )
10495 .await;
10496
10497 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10498 let _visible_worktree =
10499 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10500 project
10501 .update(cx, |project, cx| project.git_scans_complete(cx))
10502 .await;
10503
10504 let repos = project.read_with(cx, |project, cx| {
10505 project
10506 .repositories(cx)
10507 .values()
10508 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10509 .collect::<Vec<_>>()
10510 });
10511 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10512
10513 let (_invisible_worktree, _) = project
10514 .update(cx, |project, cx| {
10515 project.worktree_store.update(cx, |worktree_store, cx| {
10516 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10517 })
10518 })
10519 .await
10520 .expect("failed to create worktree");
10521 project
10522 .update(cx, |project, cx| project.git_scans_complete(cx))
10523 .await;
10524
10525 let repos = project.read_with(cx, |project, cx| {
10526 project
10527 .repositories(cx)
10528 .values()
10529 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10530 .collect::<Vec<_>>()
10531 });
10532 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10533}
10534
10535#[gpui::test(iterations = 10)]
10536async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10537 init_test(cx);
10538 cx.update(|cx| {
10539 cx.update_global::<SettingsStore, _>(|store, cx| {
10540 store.update_user_settings(cx, |settings| {
10541 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10542 });
10543 });
10544 });
10545 let fs = FakeFs::new(cx.background_executor.clone());
10546 fs.insert_tree(
10547 path!("/root"),
10548 json!({
10549 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10550 "tree": {
10551 ".git": {},
10552 ".gitignore": "ignored-dir\n",
10553 "tracked-dir": {
10554 "tracked-file1": "",
10555 "ancestor-ignored-file1": "",
10556 },
10557 "ignored-dir": {
10558 "ignored-file1": ""
10559 }
10560 }
10561 }),
10562 )
10563 .await;
10564 fs.set_head_and_index_for_repo(
10565 path!("/root/tree/.git").as_ref(),
10566 &[
10567 (".gitignore", "ignored-dir\n".into()),
10568 ("tracked-dir/tracked-file1", "".into()),
10569 ],
10570 );
10571
10572 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10573
10574 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10575 tree.flush_fs_events(cx).await;
10576 project
10577 .update(cx, |project, cx| project.git_scans_complete(cx))
10578 .await;
10579 cx.executor().run_until_parked();
10580
10581 let repository = project.read_with(cx, |project, cx| {
10582 project.repositories(cx).values().next().unwrap().clone()
10583 });
10584
10585 tree.read_with(cx, |tree, _| {
10586 tree.as_local()
10587 .unwrap()
10588 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10589 })
10590 .recv()
10591 .await;
10592
10593 cx.read(|cx| {
10594 assert_entry_git_state(
10595 tree.read(cx),
10596 repository.read(cx),
10597 "tracked-dir/tracked-file1",
10598 None,
10599 false,
10600 );
10601 assert_entry_git_state(
10602 tree.read(cx),
10603 repository.read(cx),
10604 "tracked-dir/ancestor-ignored-file1",
10605 None,
10606 false,
10607 );
10608 assert_entry_git_state(
10609 tree.read(cx),
10610 repository.read(cx),
10611 "ignored-dir/ignored-file1",
10612 None,
10613 true,
10614 );
10615 });
10616
10617 fs.create_file(
10618 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10619 Default::default(),
10620 )
10621 .await
10622 .unwrap();
10623 fs.set_index_for_repo(
10624 path!("/root/tree/.git").as_ref(),
10625 &[
10626 (".gitignore", "ignored-dir\n".into()),
10627 ("tracked-dir/tracked-file1", "".into()),
10628 ("tracked-dir/tracked-file2", "".into()),
10629 ],
10630 );
10631 fs.create_file(
10632 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10633 Default::default(),
10634 )
10635 .await
10636 .unwrap();
10637 fs.create_file(
10638 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10639 Default::default(),
10640 )
10641 .await
10642 .unwrap();
10643
10644 cx.executor().run_until_parked();
10645 cx.read(|cx| {
10646 assert_entry_git_state(
10647 tree.read(cx),
10648 repository.read(cx),
10649 "tracked-dir/tracked-file2",
10650 Some(StatusCode::Added),
10651 false,
10652 );
10653 assert_entry_git_state(
10654 tree.read(cx),
10655 repository.read(cx),
10656 "tracked-dir/ancestor-ignored-file2",
10657 None,
10658 false,
10659 );
10660 assert_entry_git_state(
10661 tree.read(cx),
10662 repository.read(cx),
10663 "ignored-dir/ignored-file2",
10664 None,
10665 true,
10666 );
10667 assert!(
10668 tree.read(cx)
10669 .entry_for_path(&rel_path(".git"))
10670 .unwrap()
10671 .is_ignored
10672 );
10673 });
10674}
10675
10676#[gpui::test]
10677async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10678 init_test(cx);
10679
10680 let fs = FakeFs::new(cx.executor());
10681 fs.insert_tree(
10682 path!("/project"),
10683 json!({
10684 ".git": {
10685 "worktrees": {
10686 "some-worktree": {
10687 "commondir": "../..\n",
10688 // For is_git_dir
10689 "HEAD": "",
10690 "config": ""
10691 }
10692 },
10693 "modules": {
10694 "subdir": {
10695 "some-submodule": {
10696 // For is_git_dir
10697 "HEAD": "",
10698 "config": "",
10699 }
10700 }
10701 }
10702 },
10703 "src": {
10704 "a.txt": "A",
10705 },
10706 "some-worktree": {
10707 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10708 "src": {
10709 "b.txt": "B",
10710 }
10711 },
10712 "subdir": {
10713 "some-submodule": {
10714 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10715 "c.txt": "C",
10716 }
10717 }
10718 }),
10719 )
10720 .await;
10721
10722 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10723 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10724 scan_complete.await;
10725
10726 let mut repositories = project.update(cx, |project, cx| {
10727 project
10728 .repositories(cx)
10729 .values()
10730 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10731 .collect::<Vec<_>>()
10732 });
10733 repositories.sort();
10734 pretty_assertions::assert_eq!(
10735 repositories,
10736 [
10737 Path::new(path!("/project")).into(),
10738 Path::new(path!("/project/some-worktree")).into(),
10739 Path::new(path!("/project/subdir/some-submodule")).into(),
10740 ]
10741 );
10742
10743 // Generate a git-related event for the worktree and check that it's refreshed.
10744 fs.with_git_state(
10745 path!("/project/some-worktree/.git").as_ref(),
10746 true,
10747 |state| {
10748 state
10749 .head_contents
10750 .insert(repo_path("src/b.txt"), "b".to_owned());
10751 state
10752 .index_contents
10753 .insert(repo_path("src/b.txt"), "b".to_owned());
10754 },
10755 )
10756 .unwrap();
10757 cx.run_until_parked();
10758
10759 let buffer = project
10760 .update(cx, |project, cx| {
10761 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10762 })
10763 .await
10764 .unwrap();
10765 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10766 let (repo, _) = project
10767 .git_store()
10768 .read(cx)
10769 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10770 .unwrap();
10771 pretty_assertions::assert_eq!(
10772 repo.read(cx).work_directory_abs_path,
10773 Path::new(path!("/project/some-worktree")).into(),
10774 );
10775 let barrier = repo.update(cx, |repo, _| repo.barrier());
10776 (repo.clone(), barrier)
10777 });
10778 barrier.await.unwrap();
10779 worktree_repo.update(cx, |repo, _| {
10780 pretty_assertions::assert_eq!(
10781 repo.status_for_path(&repo_path("src/b.txt"))
10782 .unwrap()
10783 .status,
10784 StatusCode::Modified.worktree(),
10785 );
10786 });
10787
10788 // The same for the submodule.
10789 fs.with_git_state(
10790 path!("/project/subdir/some-submodule/.git").as_ref(),
10791 true,
10792 |state| {
10793 state
10794 .head_contents
10795 .insert(repo_path("c.txt"), "c".to_owned());
10796 state
10797 .index_contents
10798 .insert(repo_path("c.txt"), "c".to_owned());
10799 },
10800 )
10801 .unwrap();
10802 cx.run_until_parked();
10803
10804 let buffer = project
10805 .update(cx, |project, cx| {
10806 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10807 })
10808 .await
10809 .unwrap();
10810 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10811 let (repo, _) = project
10812 .git_store()
10813 .read(cx)
10814 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10815 .unwrap();
10816 pretty_assertions::assert_eq!(
10817 repo.read(cx).work_directory_abs_path,
10818 Path::new(path!("/project/subdir/some-submodule")).into(),
10819 );
10820 let barrier = repo.update(cx, |repo, _| repo.barrier());
10821 (repo.clone(), barrier)
10822 });
10823 barrier.await.unwrap();
10824 submodule_repo.update(cx, |repo, _| {
10825 pretty_assertions::assert_eq!(
10826 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10827 StatusCode::Modified.worktree(),
10828 );
10829 });
10830}
10831
10832#[gpui::test]
10833async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10834 init_test(cx);
10835 let fs = FakeFs::new(cx.background_executor.clone());
10836 fs.insert_tree(
10837 path!("/root"),
10838 json!({
10839 "project": {
10840 ".git": {},
10841 "child1": {
10842 "a.txt": "A",
10843 },
10844 "child2": {
10845 "b.txt": "B",
10846 }
10847 }
10848 }),
10849 )
10850 .await;
10851
10852 let project = Project::test(
10853 fs.clone(),
10854 [
10855 path!("/root/project/child1").as_ref(),
10856 path!("/root/project/child2").as_ref(),
10857 ],
10858 cx,
10859 )
10860 .await;
10861
10862 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10863 tree.flush_fs_events(cx).await;
10864 project
10865 .update(cx, |project, cx| project.git_scans_complete(cx))
10866 .await;
10867 cx.executor().run_until_parked();
10868
10869 let repos = project.read_with(cx, |project, cx| {
10870 project
10871 .repositories(cx)
10872 .values()
10873 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10874 .collect::<Vec<_>>()
10875 });
10876 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10877}
10878
10879#[gpui::test]
10880async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10881 init_test(cx);
10882
10883 let file_1_committed = String::from(r#"file_1_committed"#);
10884 let file_1_staged = String::from(r#"file_1_staged"#);
10885 let file_2_committed = String::from(r#"file_2_committed"#);
10886 let file_2_staged = String::from(r#"file_2_staged"#);
10887 let buffer_contents = String::from(r#"buffer"#);
10888
10889 let fs = FakeFs::new(cx.background_executor.clone());
10890 fs.insert_tree(
10891 path!("/dir"),
10892 json!({
10893 ".git": {},
10894 "src": {
10895 "file_1.rs": file_1_committed.clone(),
10896 "file_2.rs": file_2_committed.clone(),
10897 }
10898 }),
10899 )
10900 .await;
10901
10902 fs.set_head_for_repo(
10903 path!("/dir/.git").as_ref(),
10904 &[
10905 ("src/file_1.rs", file_1_committed.clone()),
10906 ("src/file_2.rs", file_2_committed.clone()),
10907 ],
10908 "deadbeef",
10909 );
10910 fs.set_index_for_repo(
10911 path!("/dir/.git").as_ref(),
10912 &[
10913 ("src/file_1.rs", file_1_staged.clone()),
10914 ("src/file_2.rs", file_2_staged.clone()),
10915 ],
10916 );
10917
10918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10919
10920 let buffer = project
10921 .update(cx, |project, cx| {
10922 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10923 })
10924 .await
10925 .unwrap();
10926
10927 buffer.update(cx, |buffer, cx| {
10928 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10929 });
10930
10931 let unstaged_diff = project
10932 .update(cx, |project, cx| {
10933 project.open_unstaged_diff(buffer.clone(), cx)
10934 })
10935 .await
10936 .unwrap();
10937
10938 cx.run_until_parked();
10939
10940 unstaged_diff.update(cx, |unstaged_diff, cx| {
10941 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10942 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10943 });
10944
10945 // Save the buffer as `file_2.rs`, which should trigger the
10946 // `BufferChangedFilePath` event.
10947 project
10948 .update(cx, |project, cx| {
10949 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10950 let path = ProjectPath {
10951 worktree_id,
10952 path: rel_path("src/file_2.rs").into(),
10953 };
10954 project.save_buffer_as(buffer.clone(), path, cx)
10955 })
10956 .await
10957 .unwrap();
10958
10959 cx.run_until_parked();
10960
10961 // Verify that the diff bases have been updated to file_2's contents due to
10962 // the `BufferChangedFilePath` event being handled.
10963 unstaged_diff.update(cx, |unstaged_diff, cx| {
10964 let snapshot = buffer.read(cx).snapshot();
10965 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10966 assert_eq!(
10967 base_text, file_2_staged,
10968 "Diff bases should be automatically updated to file_2 staged content"
10969 );
10970
10971 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10972 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10973 });
10974
10975 let uncommitted_diff = project
10976 .update(cx, |project, cx| {
10977 project.open_uncommitted_diff(buffer.clone(), cx)
10978 })
10979 .await
10980 .unwrap();
10981
10982 cx.run_until_parked();
10983
10984 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10985 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10986 assert_eq!(
10987 base_text, file_2_committed,
10988 "Uncommitted diff should compare against file_2 committed content"
10989 );
10990 });
10991}
10992
10993async fn search(
10994 project: &Entity<Project>,
10995 query: SearchQuery,
10996 cx: &mut gpui::TestAppContext,
10997) -> Result<HashMap<String, Vec<Range<usize>>>> {
10998 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10999 let mut results = HashMap::default();
11000 while let Ok(search_result) = search_rx.rx.recv().await {
11001 match search_result {
11002 SearchResult::Buffer { buffer, ranges } => {
11003 results.entry(buffer).or_insert(ranges);
11004 }
11005 SearchResult::LimitReached => {}
11006 }
11007 }
11008 Ok(results
11009 .into_iter()
11010 .map(|(buffer, ranges)| {
11011 buffer.update(cx, |buffer, cx| {
11012 let path = buffer
11013 .file()
11014 .unwrap()
11015 .full_path(cx)
11016 .to_string_lossy()
11017 .to_string();
11018 let ranges = ranges
11019 .into_iter()
11020 .map(|range| range.to_offset(buffer))
11021 .collect::<Vec<_>>();
11022 (path, ranges)
11023 })
11024 })
11025 .collect())
11026}
11027
11028pub fn init_test(cx: &mut gpui::TestAppContext) {
11029 zlog::init_test();
11030
11031 cx.update(|cx| {
11032 let settings_store = SettingsStore::test(cx);
11033 cx.set_global(settings_store);
11034 release_channel::init(semver::Version::new(0, 0, 0), cx);
11035 });
11036}
11037
11038fn json_lang() -> Arc<Language> {
11039 Arc::new(Language::new(
11040 LanguageConfig {
11041 name: "JSON".into(),
11042 matcher: LanguageMatcher {
11043 path_suffixes: vec!["json".to_string()],
11044 ..Default::default()
11045 },
11046 ..Default::default()
11047 },
11048 None,
11049 ))
11050}
11051
11052fn js_lang() -> Arc<Language> {
11053 Arc::new(Language::new(
11054 LanguageConfig {
11055 name: "JavaScript".into(),
11056 matcher: LanguageMatcher {
11057 path_suffixes: vec!["js".to_string()],
11058 ..Default::default()
11059 },
11060 ..Default::default()
11061 },
11062 None,
11063 ))
11064}
11065
11066fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11067 struct PythonMootToolchainLister(Arc<FakeFs>);
11068 #[async_trait]
11069 impl ToolchainLister for PythonMootToolchainLister {
11070 async fn list(
11071 &self,
11072 worktree_root: PathBuf,
11073 subroot_relative_path: Arc<RelPath>,
11074 _: Option<HashMap<String, String>>,
11075 _: &dyn Fs,
11076 ) -> ToolchainList {
11077 // This lister will always return a path .venv directories within ancestors
11078 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11079 let mut toolchains = vec![];
11080 for ancestor in ancestors {
11081 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11082 if self.0.is_dir(&venv_path).await {
11083 toolchains.push(Toolchain {
11084 name: SharedString::new("Python Venv"),
11085 path: venv_path.to_string_lossy().into_owned().into(),
11086 language_name: LanguageName(SharedString::new_static("Python")),
11087 as_json: serde_json::Value::Null,
11088 })
11089 }
11090 }
11091 ToolchainList {
11092 toolchains,
11093 ..Default::default()
11094 }
11095 }
11096 async fn resolve(
11097 &self,
11098 _: PathBuf,
11099 _: Option<HashMap<String, String>>,
11100 _: &dyn Fs,
11101 ) -> anyhow::Result<Toolchain> {
11102 Err(anyhow::anyhow!("Not implemented"))
11103 }
11104 fn meta(&self) -> ToolchainMetadata {
11105 ToolchainMetadata {
11106 term: SharedString::new_static("Virtual Environment"),
11107 new_toolchain_placeholder: SharedString::new_static(
11108 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11109 ),
11110 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11111 }
11112 }
11113 fn activation_script(
11114 &self,
11115 _: &Toolchain,
11116 _: ShellKind,
11117 _: &gpui::App,
11118 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11119 Box::pin(async { vec![] })
11120 }
11121 }
11122 Arc::new(
11123 Language::new(
11124 LanguageConfig {
11125 name: "Python".into(),
11126 matcher: LanguageMatcher {
11127 path_suffixes: vec!["py".to_string()],
11128 ..Default::default()
11129 },
11130 ..Default::default()
11131 },
11132 None, // We're not testing Python parsing with this language.
11133 )
11134 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11135 "pyproject.toml",
11136 ))))
11137 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11138 )
11139}
11140
11141fn typescript_lang() -> Arc<Language> {
11142 Arc::new(Language::new(
11143 LanguageConfig {
11144 name: "TypeScript".into(),
11145 matcher: LanguageMatcher {
11146 path_suffixes: vec!["ts".to_string()],
11147 ..Default::default()
11148 },
11149 ..Default::default()
11150 },
11151 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11152 ))
11153}
11154
11155fn tsx_lang() -> Arc<Language> {
11156 Arc::new(Language::new(
11157 LanguageConfig {
11158 name: "tsx".into(),
11159 matcher: LanguageMatcher {
11160 path_suffixes: vec!["tsx".to_string()],
11161 ..Default::default()
11162 },
11163 ..Default::default()
11164 },
11165 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11166 ))
11167}
11168
11169fn get_all_tasks(
11170 project: &Entity<Project>,
11171 task_contexts: Arc<TaskContexts>,
11172 cx: &mut App,
11173) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11174 let new_tasks = project.update(cx, |project, cx| {
11175 project.task_store.update(cx, |task_store, cx| {
11176 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11177 this.used_and_current_resolved_tasks(task_contexts, cx)
11178 })
11179 })
11180 });
11181
11182 cx.background_spawn(async move {
11183 let (mut old, new) = new_tasks.await;
11184 old.extend(new);
11185 old
11186 })
11187}
11188
11189#[track_caller]
11190fn assert_entry_git_state(
11191 tree: &Worktree,
11192 repository: &Repository,
11193 path: &str,
11194 index_status: Option<StatusCode>,
11195 is_ignored: bool,
11196) {
11197 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11198 let entry = tree
11199 .entry_for_path(&rel_path(path))
11200 .unwrap_or_else(|| panic!("entry {path} not found"));
11201 let status = repository
11202 .status_for_path(&repo_path(path))
11203 .map(|entry| entry.status);
11204 let expected = index_status.map(|index_status| {
11205 TrackedStatus {
11206 index_status,
11207 worktree_status: StatusCode::Unmodified,
11208 }
11209 .into()
11210 });
11211 assert_eq!(
11212 status, expected,
11213 "expected {path} to have git status: {expected:?}"
11214 );
11215 assert_eq!(
11216 entry.is_ignored, is_ignored,
11217 "expected {path} to have is_ignored: {is_ignored}"
11218 );
11219}
11220
11221#[track_caller]
11222fn git_init(path: &Path) -> git2::Repository {
11223 let mut init_opts = RepositoryInitOptions::new();
11224 init_opts.initial_head("main");
11225 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11226}
11227
11228#[track_caller]
11229fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11230 let path = path.as_ref();
11231 let mut index = repo.index().expect("Failed to get index");
11232 index.add_path(path).expect("Failed to add file");
11233 index.write().expect("Failed to write index");
11234}
11235
11236#[track_caller]
11237fn git_remove_index(path: &Path, repo: &git2::Repository) {
11238 let mut index = repo.index().expect("Failed to get index");
11239 index.remove_path(path).expect("Failed to add file");
11240 index.write().expect("Failed to write index");
11241}
11242
11243#[track_caller]
11244fn git_commit(msg: &'static str, repo: &git2::Repository) {
11245 use git2::Signature;
11246
11247 let signature = Signature::now("test", "test@zed.dev").unwrap();
11248 let oid = repo.index().unwrap().write_tree().unwrap();
11249 let tree = repo.find_tree(oid).unwrap();
11250 if let Ok(head) = repo.head() {
11251 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11252
11253 let parent_commit = parent_obj.as_commit().unwrap();
11254
11255 repo.commit(
11256 Some("HEAD"),
11257 &signature,
11258 &signature,
11259 msg,
11260 &tree,
11261 &[parent_commit],
11262 )
11263 .expect("Failed to commit with parent");
11264 } else {
11265 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11266 .expect("Failed to commit");
11267 }
11268}
11269
11270#[cfg(any())]
11271#[track_caller]
11272fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11273 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11274}
11275
11276#[track_caller]
11277fn git_stash(repo: &mut git2::Repository) {
11278 use git2::Signature;
11279
11280 let signature = Signature::now("test", "test@zed.dev").unwrap();
11281 repo.stash_save(&signature, "N/A", None)
11282 .expect("Failed to stash");
11283}
11284
11285#[track_caller]
11286fn git_reset(offset: usize, repo: &git2::Repository) {
11287 let head = repo.head().expect("Couldn't get repo head");
11288 let object = head.peel(git2::ObjectType::Commit).unwrap();
11289 let commit = object.as_commit().unwrap();
11290 let new_head = commit
11291 .parents()
11292 .inspect(|parnet| {
11293 parnet.message();
11294 })
11295 .nth(offset)
11296 .expect("Not enough history");
11297 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11298 .expect("Could not reset");
11299}
11300
11301#[cfg(any())]
11302#[track_caller]
11303fn git_branch(name: &str, repo: &git2::Repository) {
11304 let head = repo
11305 .head()
11306 .expect("Couldn't get repo head")
11307 .peel_to_commit()
11308 .expect("HEAD is not a commit");
11309 repo.branch(name, &head, false).expect("Failed to commit");
11310}
11311
11312#[cfg(any())]
11313#[track_caller]
11314fn git_checkout(name: &str, repo: &git2::Repository) {
11315 repo.set_head(name).expect("Failed to set head");
11316 repo.checkout_head(None).expect("Failed to check out head");
11317}
11318
11319#[cfg(any())]
11320#[track_caller]
11321fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11322 repo.statuses(None)
11323 .unwrap()
11324 .iter()
11325 .map(|status| (status.path().unwrap().to_string(), status.status()))
11326 .collect()
11327}
11328
11329#[gpui::test]
11330async fn test_find_project_path_abs(
11331 background_executor: BackgroundExecutor,
11332 cx: &mut gpui::TestAppContext,
11333) {
11334 // find_project_path should work with absolute paths
11335 init_test(cx);
11336
11337 let fs = FakeFs::new(background_executor);
11338 fs.insert_tree(
11339 path!("/root"),
11340 json!({
11341 "project1": {
11342 "file1.txt": "content1",
11343 "subdir": {
11344 "file2.txt": "content2"
11345 }
11346 },
11347 "project2": {
11348 "file3.txt": "content3"
11349 }
11350 }),
11351 )
11352 .await;
11353
11354 let project = Project::test(
11355 fs.clone(),
11356 [
11357 path!("/root/project1").as_ref(),
11358 path!("/root/project2").as_ref(),
11359 ],
11360 cx,
11361 )
11362 .await;
11363
11364 // Make sure the worktrees are fully initialized
11365 project
11366 .update(cx, |project, cx| project.git_scans_complete(cx))
11367 .await;
11368 cx.run_until_parked();
11369
11370 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11371 project.read_with(cx, |project, cx| {
11372 let worktrees: Vec<_> = project.worktrees(cx).collect();
11373 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11374 let id1 = worktrees[0].read(cx).id();
11375 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11376 let id2 = worktrees[1].read(cx).id();
11377 (abs_path1, id1, abs_path2, id2)
11378 });
11379
11380 project.update(cx, |project, cx| {
11381 let abs_path = project1_abs_path.join("file1.txt");
11382 let found_path = project.find_project_path(abs_path, cx).unwrap();
11383 assert_eq!(found_path.worktree_id, project1_id);
11384 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11385
11386 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11387 let found_path = project.find_project_path(abs_path, cx).unwrap();
11388 assert_eq!(found_path.worktree_id, project1_id);
11389 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11390
11391 let abs_path = project2_abs_path.join("file3.txt");
11392 let found_path = project.find_project_path(abs_path, cx).unwrap();
11393 assert_eq!(found_path.worktree_id, project2_id);
11394 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11395
11396 let abs_path = project1_abs_path.join("nonexistent.txt");
11397 let found_path = project.find_project_path(abs_path, cx);
11398 assert!(
11399 found_path.is_some(),
11400 "Should find project path for nonexistent file in worktree"
11401 );
11402
11403 // Test with an absolute path outside any worktree
11404 let abs_path = Path::new("/some/other/path");
11405 let found_path = project.find_project_path(abs_path, cx);
11406 assert!(
11407 found_path.is_none(),
11408 "Should not find project path for path outside any worktree"
11409 );
11410 });
11411}
11412
11413#[gpui::test]
11414async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11415 init_test(cx);
11416
11417 let fs = FakeFs::new(cx.executor());
11418 fs.insert_tree(
11419 path!("/root"),
11420 json!({
11421 "a": {
11422 ".git": {},
11423 "src": {
11424 "main.rs": "fn main() {}",
11425 }
11426 },
11427 "b": {
11428 ".git": {},
11429 "src": {
11430 "main.rs": "fn main() {}",
11431 },
11432 "script": {
11433 "run.sh": "#!/bin/bash"
11434 }
11435 }
11436 }),
11437 )
11438 .await;
11439
11440 let project = Project::test(
11441 fs.clone(),
11442 [
11443 path!("/root/a").as_ref(),
11444 path!("/root/b/script").as_ref(),
11445 path!("/root/b").as_ref(),
11446 ],
11447 cx,
11448 )
11449 .await;
11450 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11451 scan_complete.await;
11452
11453 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11454 assert_eq!(worktrees.len(), 3);
11455
11456 let worktree_id_by_abs_path = worktrees
11457 .into_iter()
11458 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11459 .collect::<HashMap<_, _>>();
11460 let worktree_id = worktree_id_by_abs_path
11461 .get(Path::new(path!("/root/b/script")))
11462 .unwrap();
11463
11464 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11465 assert_eq!(repos.len(), 2);
11466
11467 project.update(cx, |project, cx| {
11468 project.remove_worktree(*worktree_id, cx);
11469 });
11470 cx.run_until_parked();
11471
11472 let mut repo_paths = project
11473 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11474 .values()
11475 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11476 .collect::<Vec<_>>();
11477 repo_paths.sort();
11478
11479 pretty_assertions::assert_eq!(
11480 repo_paths,
11481 [
11482 Path::new(path!("/root/a")).into(),
11483 Path::new(path!("/root/b")).into(),
11484 ]
11485 );
11486
11487 let active_repo_path = project
11488 .read_with(cx, |p, cx| {
11489 p.active_repository(cx)
11490 .map(|r| r.read(cx).work_directory_abs_path.clone())
11491 })
11492 .unwrap();
11493 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11494
11495 let worktree_id = worktree_id_by_abs_path
11496 .get(Path::new(path!("/root/a")))
11497 .unwrap();
11498 project.update(cx, |project, cx| {
11499 project.remove_worktree(*worktree_id, cx);
11500 });
11501 cx.run_until_parked();
11502
11503 let active_repo_path = project
11504 .read_with(cx, |p, cx| {
11505 p.active_repository(cx)
11506 .map(|r| r.read(cx).work_directory_abs_path.clone())
11507 })
11508 .unwrap();
11509 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11510
11511 let worktree_id = worktree_id_by_abs_path
11512 .get(Path::new(path!("/root/b")))
11513 .unwrap();
11514 project.update(cx, |project, cx| {
11515 project.remove_worktree(*worktree_id, cx);
11516 });
11517 cx.run_until_parked();
11518
11519 let active_repo_path = project.read_with(cx, |p, cx| {
11520 p.active_repository(cx)
11521 .map(|r| r.read(cx).work_directory_abs_path.clone())
11522 });
11523 assert!(active_repo_path.is_none());
11524}
11525
11526#[gpui::test]
11527async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11528 use DiffHunkSecondaryStatus::*;
11529 init_test(cx);
11530
11531 let committed_contents = r#"
11532 one
11533 two
11534 three
11535 "#
11536 .unindent();
11537 let file_contents = r#"
11538 one
11539 TWO
11540 three
11541 "#
11542 .unindent();
11543
11544 let fs = FakeFs::new(cx.background_executor.clone());
11545 fs.insert_tree(
11546 path!("/dir"),
11547 json!({
11548 ".git": {},
11549 "file.txt": file_contents.clone()
11550 }),
11551 )
11552 .await;
11553
11554 fs.set_head_and_index_for_repo(
11555 path!("/dir/.git").as_ref(),
11556 &[("file.txt", committed_contents.clone())],
11557 );
11558
11559 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11560
11561 let buffer = project
11562 .update(cx, |project, cx| {
11563 project.open_local_buffer(path!("/dir/file.txt"), cx)
11564 })
11565 .await
11566 .unwrap();
11567 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11568 let uncommitted_diff = project
11569 .update(cx, |project, cx| {
11570 project.open_uncommitted_diff(buffer.clone(), cx)
11571 })
11572 .await
11573 .unwrap();
11574
11575 // The hunk is initially unstaged.
11576 uncommitted_diff.read_with(cx, |diff, cx| {
11577 assert_hunks(
11578 diff.snapshot(cx).hunks(&snapshot),
11579 &snapshot,
11580 &diff.base_text_string(cx).unwrap(),
11581 &[(
11582 1..2,
11583 "two\n",
11584 "TWO\n",
11585 DiffHunkStatus::modified(HasSecondaryHunk),
11586 )],
11587 );
11588 });
11589
11590 // Get the repository handle.
11591 let repo = project.read_with(cx, |project, cx| {
11592 project.repositories(cx).values().next().unwrap().clone()
11593 });
11594
11595 // Stage the file.
11596 let stage_task = repo.update(cx, |repo, cx| {
11597 repo.stage_entries(vec![repo_path("file.txt")], cx)
11598 });
11599
11600 // Run a few ticks to let the job start and mark hunks as pending,
11601 // but don't run_until_parked which would complete the entire operation.
11602 for _ in 0..10 {
11603 cx.executor().tick();
11604 let [hunk]: [_; 1] = uncommitted_diff
11605 .read_with(cx, |diff, cx| {
11606 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11607 })
11608 .try_into()
11609 .unwrap();
11610 match hunk.secondary_status {
11611 HasSecondaryHunk => {}
11612 SecondaryHunkRemovalPending => break,
11613 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11614 _ => panic!("unexpected hunk state"),
11615 }
11616 }
11617 uncommitted_diff.read_with(cx, |diff, cx| {
11618 assert_hunks(
11619 diff.snapshot(cx).hunks(&snapshot),
11620 &snapshot,
11621 &diff.base_text_string(cx).unwrap(),
11622 &[(
11623 1..2,
11624 "two\n",
11625 "TWO\n",
11626 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11627 )],
11628 );
11629 });
11630
11631 // Let the staging complete.
11632 stage_task.await.unwrap();
11633 cx.run_until_parked();
11634
11635 // The hunk is now fully staged.
11636 uncommitted_diff.read_with(cx, |diff, cx| {
11637 assert_hunks(
11638 diff.snapshot(cx).hunks(&snapshot),
11639 &snapshot,
11640 &diff.base_text_string(cx).unwrap(),
11641 &[(
11642 1..2,
11643 "two\n",
11644 "TWO\n",
11645 DiffHunkStatus::modified(NoSecondaryHunk),
11646 )],
11647 );
11648 });
11649
11650 // Simulate a commit by updating HEAD to match the current file contents.
11651 // The FakeGitRepository's commit method is a no-op, so we need to manually
11652 // update HEAD to simulate the commit completing.
11653 fs.set_head_for_repo(
11654 path!("/dir/.git").as_ref(),
11655 &[("file.txt", file_contents.clone())],
11656 "newhead",
11657 );
11658 cx.run_until_parked();
11659
11660 // After committing, there are no more hunks.
11661 uncommitted_diff.read_with(cx, |diff, cx| {
11662 assert_hunks(
11663 diff.snapshot(cx).hunks(&snapshot),
11664 &snapshot,
11665 &diff.base_text_string(cx).unwrap(),
11666 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11667 );
11668 });
11669}
11670
11671#[gpui::test]
11672async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11673 init_test(cx);
11674
11675 // Configure read_only_files setting
11676 cx.update(|cx| {
11677 cx.update_global::<SettingsStore, _>(|store, cx| {
11678 store.update_user_settings(cx, |settings| {
11679 settings.project.worktree.read_only_files = Some(vec![
11680 "**/generated/**".to_string(),
11681 "**/*.gen.rs".to_string(),
11682 ]);
11683 });
11684 });
11685 });
11686
11687 let fs = FakeFs::new(cx.background_executor.clone());
11688 fs.insert_tree(
11689 path!("/root"),
11690 json!({
11691 "src": {
11692 "main.rs": "fn main() {}",
11693 "types.gen.rs": "// Generated file",
11694 },
11695 "generated": {
11696 "schema.rs": "// Auto-generated schema",
11697 }
11698 }),
11699 )
11700 .await;
11701
11702 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11703
11704 // Open a regular file - should be read-write
11705 let regular_buffer = project
11706 .update(cx, |project, cx| {
11707 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11708 })
11709 .await
11710 .unwrap();
11711
11712 regular_buffer.read_with(cx, |buffer, _| {
11713 assert!(!buffer.read_only(), "Regular file should not be read-only");
11714 });
11715
11716 // Open a file matching *.gen.rs pattern - should be read-only
11717 let gen_buffer = project
11718 .update(cx, |project, cx| {
11719 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11720 })
11721 .await
11722 .unwrap();
11723
11724 gen_buffer.read_with(cx, |buffer, _| {
11725 assert!(
11726 buffer.read_only(),
11727 "File matching *.gen.rs pattern should be read-only"
11728 );
11729 });
11730
11731 // Open a file in generated directory - should be read-only
11732 let generated_buffer = project
11733 .update(cx, |project, cx| {
11734 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11735 })
11736 .await
11737 .unwrap();
11738
11739 generated_buffer.read_with(cx, |buffer, _| {
11740 assert!(
11741 buffer.read_only(),
11742 "File in generated directory should be read-only"
11743 );
11744 });
11745}
11746
11747#[gpui::test]
11748async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11749 init_test(cx);
11750
11751 // Explicitly set read_only_files to empty (default behavior)
11752 cx.update(|cx| {
11753 cx.update_global::<SettingsStore, _>(|store, cx| {
11754 store.update_user_settings(cx, |settings| {
11755 settings.project.worktree.read_only_files = Some(vec![]);
11756 });
11757 });
11758 });
11759
11760 let fs = FakeFs::new(cx.background_executor.clone());
11761 fs.insert_tree(
11762 path!("/root"),
11763 json!({
11764 "src": {
11765 "main.rs": "fn main() {}",
11766 },
11767 "generated": {
11768 "schema.rs": "// Auto-generated schema",
11769 }
11770 }),
11771 )
11772 .await;
11773
11774 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11775
11776 // All files should be read-write when read_only_files is empty
11777 let main_buffer = project
11778 .update(cx, |project, cx| {
11779 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11780 })
11781 .await
11782 .unwrap();
11783
11784 main_buffer.read_with(cx, |buffer, _| {
11785 assert!(
11786 !buffer.read_only(),
11787 "Files should not be read-only when read_only_files is empty"
11788 );
11789 });
11790
11791 let generated_buffer = project
11792 .update(cx, |project, cx| {
11793 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11794 })
11795 .await
11796 .unwrap();
11797
11798 generated_buffer.read_with(cx, |buffer, _| {
11799 assert!(
11800 !buffer.read_only(),
11801 "Generated files should not be read-only when read_only_files is empty"
11802 );
11803 });
11804}
11805
11806#[gpui::test]
11807async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11808 init_test(cx);
11809
11810 // Configure to make lock files read-only
11811 cx.update(|cx| {
11812 cx.update_global::<SettingsStore, _>(|store, cx| {
11813 store.update_user_settings(cx, |settings| {
11814 settings.project.worktree.read_only_files = Some(vec![
11815 "**/*.lock".to_string(),
11816 "**/package-lock.json".to_string(),
11817 ]);
11818 });
11819 });
11820 });
11821
11822 let fs = FakeFs::new(cx.background_executor.clone());
11823 fs.insert_tree(
11824 path!("/root"),
11825 json!({
11826 "Cargo.lock": "# Lock file",
11827 "Cargo.toml": "[package]",
11828 "package-lock.json": "{}",
11829 "package.json": "{}",
11830 }),
11831 )
11832 .await;
11833
11834 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11835
11836 // Cargo.lock should be read-only
11837 let cargo_lock = project
11838 .update(cx, |project, cx| {
11839 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11840 })
11841 .await
11842 .unwrap();
11843
11844 cargo_lock.read_with(cx, |buffer, _| {
11845 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11846 });
11847
11848 // Cargo.toml should be read-write
11849 let cargo_toml = project
11850 .update(cx, |project, cx| {
11851 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11852 })
11853 .await
11854 .unwrap();
11855
11856 cargo_toml.read_with(cx, |buffer, _| {
11857 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11858 });
11859
11860 // package-lock.json should be read-only
11861 let package_lock = project
11862 .update(cx, |project, cx| {
11863 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11864 })
11865 .await
11866 .unwrap();
11867
11868 package_lock.read_with(cx, |buffer, _| {
11869 assert!(buffer.read_only(), "package-lock.json should be read-only");
11870 });
11871
11872 // package.json should be read-write
11873 let package_json = project
11874 .update(cx, |project, cx| {
11875 project.open_local_buffer(path!("/root/package.json"), cx)
11876 })
11877 .await
11878 .unwrap();
11879
11880 package_json.read_with(cx, |buffer, _| {
11881 assert!(!buffer.read_only(), "package.json should not be read-only");
11882 });
11883}