1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use fs::FakeFs;
29use futures::{StreamExt, future};
30use git::{
31 GitHostingProviderRegistry,
32 repository::{RepoPath, repo_path},
33 status::{FileStatus, StatusCode, TrackedStatus},
34};
35use git2::RepositoryInitOptions;
36use gpui::{
37 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
38 UpdateGlobal,
39};
40use itertools::Itertools;
41use language::{
42 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
43 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
44 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
45 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
46 language_settings::{LanguageSettingsContent, language_settings},
47 markdown_lang, rust_lang, tree_sitter_typescript,
48};
49use lsp::{
50 CodeActionKind, DiagnosticSeverity, DocumentChanges, FileOperationFilter, LanguageServerId,
51 LanguageServerName, NumberOrString, TextDocumentEdit, Uri, WillRenameFiles,
52 notification::DidRenameFiles,
53};
54use parking_lot::Mutex;
55use paths::{config_dir, global_gitignore_path, tasks_file};
56use postage::stream::Stream as _;
57use pretty_assertions::{assert_eq, assert_matches};
58use project::{
59 Event, TaskContexts,
60 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
61 search::{SearchQuery, SearchResult},
62 task_store::{TaskSettingsLocation, TaskStore},
63 *,
64};
65use rand::{Rng as _, rngs::StdRng};
66use serde_json::json;
67use settings::SettingsStore;
68#[cfg(not(windows))]
69use std::os;
70use std::{
71 env, mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{Path, PathBuf},
75 str::FromStr,
76 sync::{Arc, OnceLock},
77 task::Poll,
78 time::Duration,
79};
80use sum_tree::SumTree;
81use task::{ResolvedTask, ShellKind, TaskContext};
82use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
83use unindent::Unindent as _;
84use util::{
85 TryFutureExt as _, assert_set_eq, maybe, path,
86 paths::{PathMatcher, PathStyle},
87 rel_path::{RelPath, rel_path},
88 test::{TempTree, marked_text_offsets},
89 uri,
90};
91use worktree::WorktreeModelHandle as _;
92
93#[gpui::test]
94async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
95 cx.executor().allow_parking();
96
97 let (tx, mut rx) = futures::channel::mpsc::unbounded();
98 let _thread = std::thread::spawn(move || {
99 #[cfg(not(target_os = "windows"))]
100 std::fs::metadata("/tmp").unwrap();
101 #[cfg(target_os = "windows")]
102 std::fs::metadata("C:/Windows").unwrap();
103 std::thread::sleep(Duration::from_millis(1000));
104 tx.unbounded_send(1).unwrap();
105 });
106 rx.next().await.unwrap();
107}
108
109#[gpui::test]
110async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
111 cx.executor().allow_parking();
112
113 let io_task = smol::unblock(move || {
114 println!("sleeping on thread {:?}", std::thread::current().id());
115 std::thread::sleep(Duration::from_millis(10));
116 1
117 });
118
119 let task = cx.foreground_executor().spawn(async move {
120 io_task.await;
121 });
122
123 task.await;
124}
125
126// NOTE:
127// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
128// we assume that they are not supported out of the box.
129#[cfg(not(windows))]
130#[gpui::test]
131async fn test_symlinks(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133 cx.executor().allow_parking();
134
135 let dir = TempTree::new(json!({
136 "root": {
137 "apple": "",
138 "banana": {
139 "carrot": {
140 "date": "",
141 "endive": "",
142 }
143 },
144 "fennel": {
145 "grape": "",
146 }
147 }
148 }));
149
150 let root_link_path = dir.path().join("root_link");
151 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
152 os::unix::fs::symlink(
153 dir.path().join("root/fennel"),
154 dir.path().join("root/finnochio"),
155 )
156 .unwrap();
157
158 let project = Project::test(
159 Arc::new(RealFs::new(None, cx.executor())),
160 [root_link_path.as_ref()],
161 cx,
162 )
163 .await;
164
165 project.update(cx, |project, cx| {
166 let tree = project.worktrees(cx).next().unwrap().read(cx);
167 assert_eq!(tree.file_count(), 5);
168 assert_eq!(
169 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
170 tree.entry_for_path(rel_path("finnochio/grape"))
171 .unwrap()
172 .inode
173 );
174 });
175}
176
177#[gpui::test]
178async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
179 init_test(cx);
180
181 let dir = TempTree::new(json!({
182 ".editorconfig": r#"
183 root = true
184 [*.rs]
185 indent_style = tab
186 indent_size = 3
187 end_of_line = lf
188 insert_final_newline = true
189 trim_trailing_whitespace = true
190 max_line_length = 120
191 [*.js]
192 tab_width = 10
193 max_line_length = off
194 "#,
195 ".zed": {
196 "settings.json": r#"{
197 "tab_size": 8,
198 "hard_tabs": false,
199 "ensure_final_newline_on_save": false,
200 "remove_trailing_whitespace_on_save": false,
201 "preferred_line_length": 64,
202 "soft_wrap": "editor_width",
203 }"#,
204 },
205 "a.rs": "fn a() {\n A\n}",
206 "b": {
207 ".editorconfig": r#"
208 [*.rs]
209 indent_size = 2
210 max_line_length = off,
211 "#,
212 "b.rs": "fn b() {\n B\n}",
213 },
214 "c.js": "def c\n C\nend",
215 "README.json": "tabs are better\n",
216 }));
217
218 let path = dir.path();
219 let fs = FakeFs::new(cx.executor());
220 fs.insert_tree_from_real_fs(path, path).await;
221 let project = Project::test(fs, [path], cx).await;
222
223 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
224 language_registry.add(js_lang());
225 language_registry.add(json_lang());
226 language_registry.add(rust_lang());
227
228 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
229
230 cx.executor().run_until_parked();
231
232 cx.update(|cx| {
233 let tree = worktree.read(cx);
234 let settings_for = |path: &str| {
235 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
236 let file = File::for_entry(file_entry, worktree.clone());
237 let file_language = project
238 .read(cx)
239 .languages()
240 .load_language_for_file_path(file.path.as_std_path());
241 let file_language = cx
242 .foreground_executor()
243 .block_on(file_language)
244 .expect("Failed to get file language");
245 let file = file as _;
246 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
247 };
248
249 let settings_a = settings_for("a.rs");
250 let settings_b = settings_for("b/b.rs");
251 let settings_c = settings_for("c.js");
252 let settings_readme = settings_for("README.json");
253
254 // .editorconfig overrides .zed/settings
255 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
256 assert_eq!(settings_a.hard_tabs, true);
257 assert_eq!(settings_a.ensure_final_newline_on_save, true);
258 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
259 assert_eq!(settings_a.preferred_line_length, 120);
260
261 // .editorconfig in b/ overrides .editorconfig in root
262 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
263
264 // "indent_size" is not set, so "tab_width" is used
265 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
266
267 // When max_line_length is "off", default to .zed/settings.json
268 assert_eq!(settings_b.preferred_line_length, 64);
269 assert_eq!(settings_c.preferred_line_length, 64);
270
271 // README.md should not be affected by .editorconfig's globe "*.rs"
272 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
273 });
274}
275
276#[gpui::test]
277async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
278 init_test(cx);
279
280 let fs = FakeFs::new(cx.executor());
281 fs.insert_tree(
282 path!("/grandparent"),
283 json!({
284 ".editorconfig": "[*]\nindent_size = 4\n",
285 "parent": {
286 ".editorconfig": "[*.rs]\nindent_size = 2\n",
287 "worktree": {
288 ".editorconfig": "[*.md]\nindent_size = 3\n",
289 "main.rs": "fn main() {}",
290 "README.md": "# README",
291 "other.txt": "other content",
292 }
293 }
294 }),
295 )
296 .await;
297
298 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
299
300 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
301 language_registry.add(rust_lang());
302 language_registry.add(markdown_lang());
303
304 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
305
306 cx.executor().run_until_parked();
307
308 cx.update(|cx| {
309 let tree = worktree.read(cx);
310 let settings_for = |path: &str| {
311 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
312 let file = File::for_entry(file_entry, worktree.clone());
313 let file_language = project
314 .read(cx)
315 .languages()
316 .load_language_for_file_path(file.path.as_std_path());
317 let file_language = cx
318 .foreground_executor()
319 .block_on(file_language)
320 .expect("Failed to get file language");
321 let file = file as _;
322 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
323 };
324
325 let settings_rs = settings_for("main.rs");
326 let settings_md = settings_for("README.md");
327 let settings_txt = settings_for("other.txt");
328
329 // main.rs gets indent_size = 2 from parent's external .editorconfig
330 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
331
332 // README.md gets indent_size = 3 from internal worktree .editorconfig
333 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
334
335 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
336 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
337 });
338}
339
340#[gpui::test]
341async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
342 init_test(cx);
343
344 let fs = FakeFs::new(cx.executor());
345 fs.insert_tree(
346 path!("/parent"),
347 json!({
348 ".editorconfig": "[*]\nindent_size = 99\n",
349 "worktree": {
350 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
351 "file.rs": "fn main() {}",
352 }
353 }),
354 )
355 .await;
356
357 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
358
359 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
360 language_registry.add(rust_lang());
361
362 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
363
364 cx.executor().run_until_parked();
365
366 cx.update(|cx| {
367 let tree = worktree.read(cx);
368 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
369 let file = File::for_entry(file_entry, worktree.clone());
370 let file_language = project
371 .read(cx)
372 .languages()
373 .load_language_for_file_path(file.path.as_std_path());
374 let file_language = cx
375 .foreground_executor()
376 .block_on(file_language)
377 .expect("Failed to get file language");
378 let file = file as _;
379 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
380
381 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
382 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
383 });
384}
385
386#[gpui::test]
387async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
388 init_test(cx);
389
390 let fs = FakeFs::new(cx.executor());
391 fs.insert_tree(
392 path!("/grandparent"),
393 json!({
394 ".editorconfig": "[*]\nindent_size = 99\n",
395 "parent": {
396 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
397 "worktree": {
398 "file.rs": "fn main() {}",
399 }
400 }
401 }),
402 )
403 .await;
404
405 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
406
407 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
408 language_registry.add(rust_lang());
409
410 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
411
412 cx.executor().run_until_parked();
413
414 cx.update(|cx| {
415 let tree = worktree.read(cx);
416 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
417 let file = File::for_entry(file_entry, worktree.clone());
418 let file_language = project
419 .read(cx)
420 .languages()
421 .load_language_for_file_path(file.path.as_std_path());
422 let file_language = cx
423 .foreground_executor()
424 .block_on(file_language)
425 .expect("Failed to get file language");
426 let file = file as _;
427 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
428
429 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
430 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
431 });
432}
433
434#[gpui::test]
435async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
436 init_test(cx);
437
438 let fs = FakeFs::new(cx.executor());
439 fs.insert_tree(
440 path!("/parent"),
441 json!({
442 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
443 "worktree_a": {
444 "file.rs": "fn a() {}",
445 ".editorconfig": "[*]\ninsert_final_newline = true\n",
446 },
447 "worktree_b": {
448 "file.rs": "fn b() {}",
449 ".editorconfig": "[*]\ninsert_final_newline = false\n",
450 }
451 }),
452 )
453 .await;
454
455 let project = Project::test(
456 fs,
457 [
458 path!("/parent/worktree_a").as_ref(),
459 path!("/parent/worktree_b").as_ref(),
460 ],
461 cx,
462 )
463 .await;
464
465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
466 language_registry.add(rust_lang());
467
468 cx.executor().run_until_parked();
469
470 cx.update(|cx| {
471 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
472 assert_eq!(worktrees.len(), 2);
473
474 for worktree in worktrees {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings =
488 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
489
490 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
491 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
492 }
493 });
494}
495
496#[gpui::test]
497async fn test_external_editorconfig_not_loaded_without_internal_config(
498 cx: &mut gpui::TestAppContext,
499) {
500 init_test(cx);
501
502 let fs = FakeFs::new(cx.executor());
503 fs.insert_tree(
504 path!("/parent"),
505 json!({
506 ".editorconfig": "[*]\nindent_size = 99\n",
507 "worktree": {
508 "file.rs": "fn main() {}",
509 }
510 }),
511 )
512 .await;
513
514 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
515
516 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
517 language_registry.add(rust_lang());
518
519 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
520
521 cx.executor().run_until_parked();
522
523 cx.update(|cx| {
524 let tree = worktree.read(cx);
525 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
526 let file = File::for_entry(file_entry, worktree.clone());
527 let file_language = project
528 .read(cx)
529 .languages()
530 .load_language_for_file_path(file.path.as_std_path());
531 let file_language = cx
532 .foreground_executor()
533 .block_on(file_language)
534 .expect("Failed to get file language");
535 let file = file as _;
536 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
537
538 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
539 // because without an internal .editorconfig, external configs are not loaded
540 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
541 });
542}
543
544#[gpui::test]
545async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
546 init_test(cx);
547
548 let fs = FakeFs::new(cx.executor());
549 fs.insert_tree(
550 path!("/parent"),
551 json!({
552 ".editorconfig": "[*]\nindent_size = 4\n",
553 "worktree": {
554 ".editorconfig": "[*]\n",
555 "file.rs": "fn main() {}",
556 }
557 }),
558 )
559 .await;
560
561 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
562
563 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
564 language_registry.add(rust_lang());
565
566 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
567
568 cx.executor().run_until_parked();
569
570 cx.update(|cx| {
571 let tree = worktree.read(cx);
572 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
573 let file = File::for_entry(file_entry, worktree.clone());
574 let file_language = project
575 .read(cx)
576 .languages()
577 .load_language_for_file_path(file.path.as_std_path());
578 let file_language = cx
579 .foreground_executor()
580 .block_on(file_language)
581 .expect("Failed to get file language");
582 let file = file as _;
583 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
584
585 // Test initial settings: tab_size = 4 from parent's external .editorconfig
586 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
587 });
588
589 fs.atomic_write(
590 PathBuf::from(path!("/parent/.editorconfig")),
591 "[*]\nindent_size = 8\n".to_owned(),
592 )
593 .await
594 .unwrap();
595
596 cx.executor().run_until_parked();
597
598 cx.update(|cx| {
599 let tree = worktree.read(cx);
600 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
601 let file = File::for_entry(file_entry, worktree.clone());
602 let file_language = project
603 .read(cx)
604 .languages()
605 .load_language_for_file_path(file.path.as_std_path());
606 let file_language = cx
607 .foreground_executor()
608 .block_on(file_language)
609 .expect("Failed to get file language");
610 let file = file as _;
611 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
612
613 // Test settings updated: tab_size = 8
614 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
615 });
616}
617
618#[gpui::test]
619async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
620 init_test(cx);
621
622 let fs = FakeFs::new(cx.executor());
623 fs.insert_tree(
624 path!("/parent"),
625 json!({
626 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
627 "existing_worktree": {
628 ".editorconfig": "[*]\n",
629 "file.rs": "fn a() {}",
630 },
631 "new_worktree": {
632 ".editorconfig": "[*]\n",
633 "file.rs": "fn b() {}",
634 }
635 }),
636 )
637 .await;
638
639 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
640
641 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
642 language_registry.add(rust_lang());
643
644 cx.executor().run_until_parked();
645
646 cx.update(|cx| {
647 let worktree = project.read(cx).worktrees(cx).next().unwrap();
648 let tree = worktree.read(cx);
649 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
650 let file = File::for_entry(file_entry, worktree.clone());
651 let file_language = project
652 .read(cx)
653 .languages()
654 .load_language_for_file_path(file.path.as_std_path());
655 let file_language = cx
656 .foreground_executor()
657 .block_on(file_language)
658 .expect("Failed to get file language");
659 let file = file as _;
660 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
661
662 // Test existing worktree has tab_size = 7
663 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
664 });
665
666 let (new_worktree, _) = project
667 .update(cx, |project, cx| {
668 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
669 })
670 .await
671 .unwrap();
672
673 cx.executor().run_until_parked();
674
675 cx.update(|cx| {
676 let tree = new_worktree.read(cx);
677 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
678 let file = File::for_entry(file_entry, new_worktree.clone());
679 let file_language = project
680 .read(cx)
681 .languages()
682 .load_language_for_file_path(file.path.as_std_path());
683 let file_language = cx
684 .foreground_executor()
685 .block_on(file_language)
686 .expect("Failed to get file language");
687 let file = file as _;
688 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
689
690 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
691 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
692 });
693}
694
695#[gpui::test]
696async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
697 init_test(cx);
698
699 let fs = FakeFs::new(cx.executor());
700 fs.insert_tree(
701 path!("/parent"),
702 json!({
703 ".editorconfig": "[*]\nindent_size = 6\n",
704 "worktree": {
705 ".editorconfig": "[*]\n",
706 "file.rs": "fn main() {}",
707 }
708 }),
709 )
710 .await;
711
712 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
713
714 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
715 language_registry.add(rust_lang());
716
717 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
718 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
719
720 cx.executor().run_until_parked();
721
722 cx.update(|cx| {
723 let store = cx.global::<SettingsStore>();
724 let (worktree_ids, external_paths, watcher_paths) =
725 store.editorconfig_store.read(cx).test_state();
726
727 // Test external config is loaded
728 assert!(worktree_ids.contains(&worktree_id));
729 assert!(!external_paths.is_empty());
730 assert!(!watcher_paths.is_empty());
731 });
732
733 project.update(cx, |project, cx| {
734 project.remove_worktree(worktree_id, cx);
735 });
736
737 cx.executor().run_until_parked();
738
739 cx.update(|cx| {
740 let store = cx.global::<SettingsStore>();
741 let (worktree_ids, external_paths, watcher_paths) =
742 store.editorconfig_store.read(cx).test_state();
743
744 // Test worktree state, external configs, and watchers all removed
745 assert!(!worktree_ids.contains(&worktree_id));
746 assert!(external_paths.is_empty());
747 assert!(watcher_paths.is_empty());
748 });
749}
750
751#[gpui::test]
752async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
753 cx: &mut gpui::TestAppContext,
754) {
755 init_test(cx);
756
757 let fs = FakeFs::new(cx.executor());
758 fs.insert_tree(
759 path!("/parent"),
760 json!({
761 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
762 "worktree_a": {
763 ".editorconfig": "[*]\n",
764 "file.rs": "fn a() {}",
765 },
766 "worktree_b": {
767 ".editorconfig": "[*]\n",
768 "file.rs": "fn b() {}",
769 }
770 }),
771 )
772 .await;
773
774 let project = Project::test(
775 fs,
776 [
777 path!("/parent/worktree_a").as_ref(),
778 path!("/parent/worktree_b").as_ref(),
779 ],
780 cx,
781 )
782 .await;
783
784 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
785 language_registry.add(rust_lang());
786
787 cx.executor().run_until_parked();
788
789 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
790 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
791 assert_eq!(worktrees.len(), 2);
792
793 let worktree_a = &worktrees[0];
794 let worktree_b = &worktrees[1];
795 let worktree_a_id = worktree_a.read(cx).id();
796 let worktree_b_id = worktree_b.read(cx).id();
797 (worktree_a_id, worktree_b.clone(), worktree_b_id)
798 });
799
800 cx.update(|cx| {
801 let store = cx.global::<SettingsStore>();
802 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
803
804 // Test both worktrees have settings and share external config
805 assert!(worktree_ids.contains(&worktree_a_id));
806 assert!(worktree_ids.contains(&worktree_b_id));
807 assert_eq!(external_paths.len(), 1); // single shared external config
808 });
809
810 project.update(cx, |project, cx| {
811 project.remove_worktree(worktree_a_id, cx);
812 });
813
814 cx.executor().run_until_parked();
815
816 cx.update(|cx| {
817 let store = cx.global::<SettingsStore>();
818 let (worktree_ids, external_paths, watcher_paths) =
819 store.editorconfig_store.read(cx).test_state();
820
821 // Test worktree_a is gone but external config remains for worktree_b
822 assert!(!worktree_ids.contains(&worktree_a_id));
823 assert!(worktree_ids.contains(&worktree_b_id));
824 // External config should still exist because worktree_b uses it
825 assert_eq!(external_paths.len(), 1);
826 assert_eq!(watcher_paths.len(), 1);
827 });
828
829 cx.update(|cx| {
830 let tree = worktree_b.read(cx);
831 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
832 let file = File::for_entry(file_entry, worktree_b.clone());
833 let file_language = project
834 .read(cx)
835 .languages()
836 .load_language_for_file_path(file.path.as_std_path());
837 let file_language = cx
838 .foreground_executor()
839 .block_on(file_language)
840 .expect("Failed to get file language");
841 let file = file as _;
842 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
843
844 // Test worktree_b still has correct settings
845 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
846 });
847}
848
849#[gpui::test]
850async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
851 init_test(cx);
852 cx.update(|cx| {
853 GitHostingProviderRegistry::default_global(cx);
854 git_hosting_providers::init(cx);
855 });
856
857 let fs = FakeFs::new(cx.executor());
858 let str_path = path!("/dir");
859 let path = Path::new(str_path);
860
861 fs.insert_tree(
862 path!("/dir"),
863 json!({
864 ".zed": {
865 "settings.json": r#"{
866 "git_hosting_providers": [
867 {
868 "provider": "gitlab",
869 "base_url": "https://google.com",
870 "name": "foo"
871 }
872 ]
873 }"#
874 },
875 }),
876 )
877 .await;
878
879 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
880 let (_worktree, _) =
881 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
882 cx.executor().run_until_parked();
883
884 cx.update(|cx| {
885 let provider = GitHostingProviderRegistry::global(cx);
886 assert!(
887 provider
888 .list_hosting_providers()
889 .into_iter()
890 .any(|provider| provider.name() == "foo")
891 );
892 });
893
894 fs.atomic_write(
895 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
896 "{}".into(),
897 )
898 .await
899 .unwrap();
900
901 cx.run_until_parked();
902
903 cx.update(|cx| {
904 let provider = GitHostingProviderRegistry::global(cx);
905 assert!(
906 !provider
907 .list_hosting_providers()
908 .into_iter()
909 .any(|provider| provider.name() == "foo")
910 );
911 });
912}
913
914#[gpui::test]
915async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
916 init_test(cx);
917 TaskStore::init(None);
918
919 let fs = FakeFs::new(cx.executor());
920 fs.insert_tree(
921 path!("/dir"),
922 json!({
923 ".zed": {
924 "settings.json": r#"{ "tab_size": 8 }"#,
925 "tasks.json": r#"[{
926 "label": "cargo check all",
927 "command": "cargo",
928 "args": ["check", "--all"]
929 },]"#,
930 },
931 "a": {
932 "a.rs": "fn a() {\n A\n}"
933 },
934 "b": {
935 ".zed": {
936 "settings.json": r#"{ "tab_size": 2 }"#,
937 "tasks.json": r#"[{
938 "label": "cargo check",
939 "command": "cargo",
940 "args": ["check"]
941 },]"#,
942 },
943 "b.rs": "fn b() {\n B\n}"
944 }
945 }),
946 )
947 .await;
948
949 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
950 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
951
952 cx.executor().run_until_parked();
953 let worktree_id = cx.update(|cx| {
954 project.update(cx, |project, cx| {
955 project.worktrees(cx).next().unwrap().read(cx).id()
956 })
957 });
958
959 let mut task_contexts = TaskContexts::default();
960 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
961 let task_contexts = Arc::new(task_contexts);
962
963 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
964 id: worktree_id,
965 directory_in_worktree: rel_path(".zed").into(),
966 id_base: "local worktree tasks from directory \".zed\"".into(),
967 };
968
969 let all_tasks = cx
970 .update(|cx| {
971 let tree = worktree.read(cx);
972
973 let file_a = File::for_entry(
974 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
975 worktree.clone(),
976 ) as _;
977 let settings_a = language_settings(None, Some(&file_a), cx);
978 let file_b = File::for_entry(
979 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
980 worktree.clone(),
981 ) as _;
982 let settings_b = language_settings(None, Some(&file_b), cx);
983
984 assert_eq!(settings_a.tab_size.get(), 8);
985 assert_eq!(settings_b.tab_size.get(), 2);
986
987 get_all_tasks(&project, task_contexts.clone(), cx)
988 })
989 .await
990 .into_iter()
991 .map(|(source_kind, task)| {
992 let resolved = task.resolved;
993 (
994 source_kind,
995 task.resolved_label,
996 resolved.args,
997 resolved.env,
998 )
999 })
1000 .collect::<Vec<_>>();
1001 assert_eq!(
1002 all_tasks,
1003 vec![
1004 (
1005 TaskSourceKind::Worktree {
1006 id: worktree_id,
1007 directory_in_worktree: rel_path("b/.zed").into(),
1008 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1009 },
1010 "cargo check".to_string(),
1011 vec!["check".to_string()],
1012 HashMap::default(),
1013 ),
1014 (
1015 topmost_local_task_source_kind.clone(),
1016 "cargo check all".to_string(),
1017 vec!["check".to_string(), "--all".to_string()],
1018 HashMap::default(),
1019 ),
1020 ]
1021 );
1022
1023 let (_, resolved_task) = cx
1024 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1025 .await
1026 .into_iter()
1027 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1028 .expect("should have one global task");
1029 project.update(cx, |project, cx| {
1030 let task_inventory = project
1031 .task_store()
1032 .read(cx)
1033 .task_inventory()
1034 .cloned()
1035 .unwrap();
1036 task_inventory.update(cx, |inventory, _| {
1037 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1038 inventory
1039 .update_file_based_tasks(
1040 TaskSettingsLocation::Global(tasks_file()),
1041 Some(
1042 &json!([{
1043 "label": "cargo check unstable",
1044 "command": "cargo",
1045 "args": [
1046 "check",
1047 "--all",
1048 "--all-targets"
1049 ],
1050 "env": {
1051 "RUSTFLAGS": "-Zunstable-options"
1052 }
1053 }])
1054 .to_string(),
1055 ),
1056 )
1057 .unwrap();
1058 });
1059 });
1060 cx.run_until_parked();
1061
1062 let all_tasks = cx
1063 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1064 .await
1065 .into_iter()
1066 .map(|(source_kind, task)| {
1067 let resolved = task.resolved;
1068 (
1069 source_kind,
1070 task.resolved_label,
1071 resolved.args,
1072 resolved.env,
1073 )
1074 })
1075 .collect::<Vec<_>>();
1076 assert_eq!(
1077 all_tasks,
1078 vec![
1079 (
1080 topmost_local_task_source_kind.clone(),
1081 "cargo check all".to_string(),
1082 vec!["check".to_string(), "--all".to_string()],
1083 HashMap::default(),
1084 ),
1085 (
1086 TaskSourceKind::Worktree {
1087 id: worktree_id,
1088 directory_in_worktree: rel_path("b/.zed").into(),
1089 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1090 },
1091 "cargo check".to_string(),
1092 vec!["check".to_string()],
1093 HashMap::default(),
1094 ),
1095 (
1096 TaskSourceKind::AbsPath {
1097 abs_path: paths::tasks_file().clone(),
1098 id_base: "global tasks.json".into(),
1099 },
1100 "cargo check unstable".to_string(),
1101 vec![
1102 "check".to_string(),
1103 "--all".to_string(),
1104 "--all-targets".to_string(),
1105 ],
1106 HashMap::from_iter(Some((
1107 "RUSTFLAGS".to_string(),
1108 "-Zunstable-options".to_string()
1109 ))),
1110 ),
1111 ]
1112 );
1113}
1114
1115#[gpui::test]
1116async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1117 init_test(cx);
1118 TaskStore::init(None);
1119
1120 let fs = FakeFs::new(cx.executor());
1121 fs.insert_tree(
1122 path!("/dir"),
1123 json!({
1124 ".zed": {
1125 "tasks.json": r#"[{
1126 "label": "test worktree root",
1127 "command": "echo $ZED_WORKTREE_ROOT"
1128 }]"#,
1129 },
1130 "a": {
1131 "a.rs": "fn a() {\n A\n}"
1132 },
1133 }),
1134 )
1135 .await;
1136
1137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1138 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1139
1140 cx.executor().run_until_parked();
1141 let worktree_id = cx.update(|cx| {
1142 project.update(cx, |project, cx| {
1143 project.worktrees(cx).next().unwrap().read(cx).id()
1144 })
1145 });
1146
1147 let active_non_worktree_item_tasks = cx
1148 .update(|cx| {
1149 get_all_tasks(
1150 &project,
1151 Arc::new(TaskContexts {
1152 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1153 active_worktree_context: None,
1154 other_worktree_contexts: Vec::new(),
1155 lsp_task_sources: HashMap::default(),
1156 latest_selection: None,
1157 }),
1158 cx,
1159 )
1160 })
1161 .await;
1162 assert!(
1163 active_non_worktree_item_tasks.is_empty(),
1164 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1165 );
1166
1167 let active_worktree_tasks = cx
1168 .update(|cx| {
1169 get_all_tasks(
1170 &project,
1171 Arc::new(TaskContexts {
1172 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1173 active_worktree_context: Some((worktree_id, {
1174 let mut worktree_context = TaskContext::default();
1175 worktree_context
1176 .task_variables
1177 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1178 worktree_context
1179 })),
1180 other_worktree_contexts: Vec::new(),
1181 lsp_task_sources: HashMap::default(),
1182 latest_selection: None,
1183 }),
1184 cx,
1185 )
1186 })
1187 .await;
1188 assert_eq!(
1189 active_worktree_tasks
1190 .into_iter()
1191 .map(|(source_kind, task)| {
1192 let resolved = task.resolved;
1193 (source_kind, resolved.command.unwrap())
1194 })
1195 .collect::<Vec<_>>(),
1196 vec![(
1197 TaskSourceKind::Worktree {
1198 id: worktree_id,
1199 directory_in_worktree: rel_path(".zed").into(),
1200 id_base: "local worktree tasks from directory \".zed\"".into(),
1201 },
1202 "echo /dir".to_string(),
1203 )]
1204 );
1205}
1206
1207#[gpui::test]
1208async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1209 cx: &mut gpui::TestAppContext,
1210) {
1211 pub(crate) struct PyprojectTomlManifestProvider;
1212
1213 impl ManifestProvider for PyprojectTomlManifestProvider {
1214 fn name(&self) -> ManifestName {
1215 SharedString::new_static("pyproject.toml").into()
1216 }
1217
1218 fn search(
1219 &self,
1220 ManifestQuery {
1221 path,
1222 depth,
1223 delegate,
1224 }: ManifestQuery,
1225 ) -> Option<Arc<RelPath>> {
1226 for path in path.ancestors().take(depth) {
1227 let p = path.join(rel_path("pyproject.toml"));
1228 if delegate.exists(&p, Some(false)) {
1229 return Some(path.into());
1230 }
1231 }
1232
1233 None
1234 }
1235 }
1236
1237 init_test(cx);
1238 let fs = FakeFs::new(cx.executor());
1239
1240 fs.insert_tree(
1241 path!("/the-root"),
1242 json!({
1243 ".zed": {
1244 "settings.json": r#"
1245 {
1246 "languages": {
1247 "Python": {
1248 "language_servers": ["ty"]
1249 }
1250 }
1251 }"#
1252 },
1253 "project-a": {
1254 ".venv": {},
1255 "file.py": "",
1256 "pyproject.toml": ""
1257 },
1258 "project-b": {
1259 ".venv": {},
1260 "source_file.py":"",
1261 "another_file.py": "",
1262 "pyproject.toml": ""
1263 }
1264 }),
1265 )
1266 .await;
1267 cx.update(|cx| {
1268 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1269 });
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1273 let _fake_python_server = language_registry.register_fake_lsp(
1274 "Python",
1275 FakeLspAdapter {
1276 name: "ty",
1277 capabilities: lsp::ServerCapabilities {
1278 ..Default::default()
1279 },
1280 ..Default::default()
1281 },
1282 );
1283
1284 language_registry.add(python_lang(fs.clone()));
1285 let (first_buffer, _handle) = project
1286 .update(cx, |project, cx| {
1287 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1288 })
1289 .await
1290 .unwrap();
1291 cx.executor().run_until_parked();
1292 let servers = project.update(cx, |project, cx| {
1293 project.lsp_store().update(cx, |this, cx| {
1294 first_buffer.update(cx, |buffer, cx| {
1295 this.running_language_servers_for_local_buffer(buffer, cx)
1296 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1297 .collect::<Vec<_>>()
1298 })
1299 })
1300 });
1301 cx.executor().run_until_parked();
1302 assert_eq!(servers.len(), 1);
1303 let (adapter, server) = servers.into_iter().next().unwrap();
1304 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1305 assert_eq!(server.server_id(), LanguageServerId(0));
1306 // `workspace_folders` are set to the rooting point.
1307 assert_eq!(
1308 server.workspace_folders(),
1309 BTreeSet::from_iter(
1310 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1311 )
1312 );
1313
1314 let (second_project_buffer, _other_handle) = project
1315 .update(cx, |project, cx| {
1316 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1317 })
1318 .await
1319 .unwrap();
1320 cx.executor().run_until_parked();
1321 let servers = project.update(cx, |project, cx| {
1322 project.lsp_store().update(cx, |this, cx| {
1323 second_project_buffer.update(cx, |buffer, cx| {
1324 this.running_language_servers_for_local_buffer(buffer, cx)
1325 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1326 .collect::<Vec<_>>()
1327 })
1328 })
1329 });
1330 cx.executor().run_until_parked();
1331 assert_eq!(servers.len(), 1);
1332 let (adapter, server) = servers.into_iter().next().unwrap();
1333 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1334 // We're not using venvs at all here, so both folders should fall under the same root.
1335 assert_eq!(server.server_id(), LanguageServerId(0));
1336 // Now, let's select a different toolchain for one of subprojects.
1337
1338 let Toolchains {
1339 toolchains: available_toolchains_for_b,
1340 root_path,
1341 ..
1342 } = project
1343 .update(cx, |this, cx| {
1344 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1345 this.available_toolchains(
1346 ProjectPath {
1347 worktree_id,
1348 path: rel_path("project-b/source_file.py").into(),
1349 },
1350 LanguageName::new_static("Python"),
1351 cx,
1352 )
1353 })
1354 .await
1355 .expect("A toolchain to be discovered");
1356 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1357 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1358 let currently_active_toolchain = project
1359 .update(cx, |this, cx| {
1360 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1361 this.active_toolchain(
1362 ProjectPath {
1363 worktree_id,
1364 path: rel_path("project-b/source_file.py").into(),
1365 },
1366 LanguageName::new_static("Python"),
1367 cx,
1368 )
1369 })
1370 .await;
1371
1372 assert!(currently_active_toolchain.is_none());
1373 let _ = project
1374 .update(cx, |this, cx| {
1375 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1376 this.activate_toolchain(
1377 ProjectPath {
1378 worktree_id,
1379 path: root_path,
1380 },
1381 available_toolchains_for_b
1382 .toolchains
1383 .into_iter()
1384 .next()
1385 .unwrap(),
1386 cx,
1387 )
1388 })
1389 .await
1390 .unwrap();
1391 cx.run_until_parked();
1392 let servers = project.update(cx, |project, cx| {
1393 project.lsp_store().update(cx, |this, cx| {
1394 second_project_buffer.update(cx, |buffer, cx| {
1395 this.running_language_servers_for_local_buffer(buffer, cx)
1396 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1397 .collect::<Vec<_>>()
1398 })
1399 })
1400 });
1401 cx.executor().run_until_parked();
1402 assert_eq!(servers.len(), 1);
1403 let (adapter, server) = servers.into_iter().next().unwrap();
1404 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1405 // There's a new language server in town.
1406 assert_eq!(server.server_id(), LanguageServerId(1));
1407}
1408
1409#[gpui::test]
1410async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1411 init_test(cx);
1412
1413 let fs = FakeFs::new(cx.executor());
1414 fs.insert_tree(
1415 path!("/dir"),
1416 json!({
1417 "test.rs": "const A: i32 = 1;",
1418 "test2.rs": "",
1419 "Cargo.toml": "a = 1",
1420 "package.json": "{\"a\": 1}",
1421 }),
1422 )
1423 .await;
1424
1425 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1426 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1427
1428 let mut fake_rust_servers = language_registry.register_fake_lsp(
1429 "Rust",
1430 FakeLspAdapter {
1431 name: "the-rust-language-server",
1432 capabilities: lsp::ServerCapabilities {
1433 completion_provider: Some(lsp::CompletionOptions {
1434 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1435 ..Default::default()
1436 }),
1437 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1438 lsp::TextDocumentSyncOptions {
1439 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1440 ..Default::default()
1441 },
1442 )),
1443 ..Default::default()
1444 },
1445 ..Default::default()
1446 },
1447 );
1448 let mut fake_json_servers = language_registry.register_fake_lsp(
1449 "JSON",
1450 FakeLspAdapter {
1451 name: "the-json-language-server",
1452 capabilities: lsp::ServerCapabilities {
1453 completion_provider: Some(lsp::CompletionOptions {
1454 trigger_characters: Some(vec![":".to_string()]),
1455 ..Default::default()
1456 }),
1457 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1458 lsp::TextDocumentSyncOptions {
1459 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1460 ..Default::default()
1461 },
1462 )),
1463 ..Default::default()
1464 },
1465 ..Default::default()
1466 },
1467 );
1468
1469 // Open a buffer without an associated language server.
1470 let (toml_buffer, _handle) = project
1471 .update(cx, |project, cx| {
1472 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1473 })
1474 .await
1475 .unwrap();
1476
1477 // Open a buffer with an associated language server before the language for it has been loaded.
1478 let (rust_buffer, _handle2) = project
1479 .update(cx, |project, cx| {
1480 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1481 })
1482 .await
1483 .unwrap();
1484 rust_buffer.update(cx, |buffer, _| {
1485 assert_eq!(buffer.language().map(|l| l.name()), None);
1486 });
1487
1488 // Now we add the languages to the project, and ensure they get assigned to all
1489 // the relevant open buffers.
1490 language_registry.add(json_lang());
1491 language_registry.add(rust_lang());
1492 cx.executor().run_until_parked();
1493 rust_buffer.update(cx, |buffer, _| {
1494 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1495 });
1496
1497 // A server is started up, and it is notified about Rust files.
1498 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1499 assert_eq!(
1500 fake_rust_server
1501 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1502 .await
1503 .text_document,
1504 lsp::TextDocumentItem {
1505 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1506 version: 0,
1507 text: "const A: i32 = 1;".to_string(),
1508 language_id: "rust".to_string(),
1509 }
1510 );
1511
1512 // The buffer is configured based on the language server's capabilities.
1513 rust_buffer.update(cx, |buffer, _| {
1514 assert_eq!(
1515 buffer
1516 .completion_triggers()
1517 .iter()
1518 .cloned()
1519 .collect::<Vec<_>>(),
1520 &[".".to_string(), "::".to_string()]
1521 );
1522 });
1523 toml_buffer.update(cx, |buffer, _| {
1524 assert!(buffer.completion_triggers().is_empty());
1525 });
1526
1527 // Edit a buffer. The changes are reported to the language server.
1528 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1529 assert_eq!(
1530 fake_rust_server
1531 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1532 .await
1533 .text_document,
1534 lsp::VersionedTextDocumentIdentifier::new(
1535 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1536 1
1537 )
1538 );
1539
1540 // Open a third buffer with a different associated language server.
1541 let (json_buffer, _json_handle) = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 // A json language server is started up and is only notified about the json buffer.
1549 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1550 assert_eq!(
1551 fake_json_server
1552 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1553 .await
1554 .text_document,
1555 lsp::TextDocumentItem {
1556 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1557 version: 0,
1558 text: "{\"a\": 1}".to_string(),
1559 language_id: "json".to_string(),
1560 }
1561 );
1562
1563 // This buffer is configured based on the second language server's
1564 // capabilities.
1565 json_buffer.update(cx, |buffer, _| {
1566 assert_eq!(
1567 buffer
1568 .completion_triggers()
1569 .iter()
1570 .cloned()
1571 .collect::<Vec<_>>(),
1572 &[":".to_string()]
1573 );
1574 });
1575
1576 // When opening another buffer whose language server is already running,
1577 // it is also configured based on the existing language server's capabilities.
1578 let (rust_buffer2, _handle4) = project
1579 .update(cx, |project, cx| {
1580 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1581 })
1582 .await
1583 .unwrap();
1584 rust_buffer2.update(cx, |buffer, _| {
1585 assert_eq!(
1586 buffer
1587 .completion_triggers()
1588 .iter()
1589 .cloned()
1590 .collect::<Vec<_>>(),
1591 &[".".to_string(), "::".to_string()]
1592 );
1593 });
1594
1595 // Changes are reported only to servers matching the buffer's language.
1596 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1597 rust_buffer2.update(cx, |buffer, cx| {
1598 buffer.edit([(0..0, "let x = 1;")], None, cx)
1599 });
1600 assert_eq!(
1601 fake_rust_server
1602 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1603 .await
1604 .text_document,
1605 lsp::VersionedTextDocumentIdentifier::new(
1606 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1607 1
1608 )
1609 );
1610
1611 // Save notifications are reported to all servers.
1612 project
1613 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1614 .await
1615 .unwrap();
1616 assert_eq!(
1617 fake_rust_server
1618 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1619 .await
1620 .text_document,
1621 lsp::TextDocumentIdentifier::new(
1622 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1623 )
1624 );
1625 assert_eq!(
1626 fake_json_server
1627 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1628 .await
1629 .text_document,
1630 lsp::TextDocumentIdentifier::new(
1631 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1632 )
1633 );
1634
1635 // Renames are reported only to servers matching the buffer's language.
1636 fs.rename(
1637 Path::new(path!("/dir/test2.rs")),
1638 Path::new(path!("/dir/test3.rs")),
1639 Default::default(),
1640 )
1641 .await
1642 .unwrap();
1643 assert_eq!(
1644 fake_rust_server
1645 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1646 .await
1647 .text_document,
1648 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1649 );
1650 assert_eq!(
1651 fake_rust_server
1652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1653 .await
1654 .text_document,
1655 lsp::TextDocumentItem {
1656 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1657 version: 0,
1658 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1659 language_id: "rust".to_string(),
1660 },
1661 );
1662
1663 rust_buffer2.update(cx, |buffer, cx| {
1664 buffer.update_diagnostics(
1665 LanguageServerId(0),
1666 DiagnosticSet::from_sorted_entries(
1667 vec![DiagnosticEntry {
1668 diagnostic: Default::default(),
1669 range: Anchor::MIN..Anchor::MAX,
1670 }],
1671 &buffer.snapshot(),
1672 ),
1673 cx,
1674 );
1675 assert_eq!(
1676 buffer
1677 .snapshot()
1678 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1679 .count(),
1680 1
1681 );
1682 });
1683
1684 // When the rename changes the extension of the file, the buffer gets closed on the old
1685 // language server and gets opened on the new one.
1686 fs.rename(
1687 Path::new(path!("/dir/test3.rs")),
1688 Path::new(path!("/dir/test3.json")),
1689 Default::default(),
1690 )
1691 .await
1692 .unwrap();
1693 assert_eq!(
1694 fake_rust_server
1695 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1696 .await
1697 .text_document,
1698 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1699 );
1700 assert_eq!(
1701 fake_json_server
1702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1703 .await
1704 .text_document,
1705 lsp::TextDocumentItem {
1706 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1707 version: 0,
1708 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1709 language_id: "json".to_string(),
1710 },
1711 );
1712
1713 // We clear the diagnostics, since the language has changed.
1714 rust_buffer2.update(cx, |buffer, _| {
1715 assert_eq!(
1716 buffer
1717 .snapshot()
1718 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1719 .count(),
1720 0
1721 );
1722 });
1723
1724 // The renamed file's version resets after changing language server.
1725 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1726 assert_eq!(
1727 fake_json_server
1728 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1729 .await
1730 .text_document,
1731 lsp::VersionedTextDocumentIdentifier::new(
1732 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1733 1
1734 )
1735 );
1736
1737 // Restart language servers
1738 project.update(cx, |project, cx| {
1739 project.restart_language_servers_for_buffers(
1740 vec![rust_buffer.clone(), json_buffer.clone()],
1741 HashSet::default(),
1742 cx,
1743 );
1744 });
1745
1746 let mut rust_shutdown_requests = fake_rust_server
1747 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1748 let mut json_shutdown_requests = fake_json_server
1749 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1750 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1751
1752 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1753 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1754
1755 // Ensure rust document is reopened in new rust language server
1756 assert_eq!(
1757 fake_rust_server
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document,
1761 lsp::TextDocumentItem {
1762 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1763 version: 0,
1764 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1765 language_id: "rust".to_string(),
1766 }
1767 );
1768
1769 // Ensure json documents are reopened in new json language server
1770 assert_set_eq!(
1771 [
1772 fake_json_server
1773 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1774 .await
1775 .text_document,
1776 fake_json_server
1777 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1778 .await
1779 .text_document,
1780 ],
1781 [
1782 lsp::TextDocumentItem {
1783 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1784 version: 0,
1785 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1786 language_id: "json".to_string(),
1787 },
1788 lsp::TextDocumentItem {
1789 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1790 version: 0,
1791 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1792 language_id: "json".to_string(),
1793 }
1794 ]
1795 );
1796
1797 // Close notifications are reported only to servers matching the buffer's language.
1798 cx.update(|_| drop(_json_handle));
1799 let close_message = lsp::DidCloseTextDocumentParams {
1800 text_document: lsp::TextDocumentIdentifier::new(
1801 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1802 ),
1803 };
1804 assert_eq!(
1805 fake_json_server
1806 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1807 .await,
1808 close_message,
1809 );
1810}
1811
1812#[gpui::test]
1813async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1814 init_test(cx);
1815
1816 let settings_json_contents = json!({
1817 "languages": {
1818 "Rust": {
1819 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1820 }
1821 },
1822 "lsp": {
1823 "my_fake_lsp": {
1824 "binary": {
1825 // file exists, so this is treated as a relative path
1826 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1827 }
1828 },
1829 "lsp_on_path": {
1830 "binary": {
1831 // file doesn't exist, so it will fall back on PATH env var
1832 "path": path!("lsp_on_path.exe").to_string(),
1833 }
1834 }
1835 },
1836 });
1837
1838 let fs = FakeFs::new(cx.executor());
1839 fs.insert_tree(
1840 path!("/the-root"),
1841 json!({
1842 ".zed": {
1843 "settings.json": settings_json_contents.to_string(),
1844 },
1845 ".relative_path": {
1846 "to": {
1847 "my_fake_lsp.exe": "",
1848 },
1849 },
1850 "src": {
1851 "main.rs": "",
1852 }
1853 }),
1854 )
1855 .await;
1856
1857 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1859 language_registry.add(rust_lang());
1860
1861 let mut my_fake_lsp = language_registry.register_fake_lsp(
1862 "Rust",
1863 FakeLspAdapter {
1864 name: "my_fake_lsp",
1865 ..Default::default()
1866 },
1867 );
1868 let mut lsp_on_path = language_registry.register_fake_lsp(
1869 "Rust",
1870 FakeLspAdapter {
1871 name: "lsp_on_path",
1872 ..Default::default()
1873 },
1874 );
1875
1876 cx.run_until_parked();
1877
1878 // Start the language server by opening a buffer with a compatible file extension.
1879 project
1880 .update(cx, |project, cx| {
1881 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1882 })
1883 .await
1884 .unwrap();
1885
1886 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1887 assert_eq!(
1888 lsp_path.to_string_lossy(),
1889 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1890 );
1891
1892 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1893 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1894}
1895
1896#[gpui::test]
1897async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1898 init_test(cx);
1899
1900 let settings_json_contents = json!({
1901 "languages": {
1902 "Rust": {
1903 "language_servers": ["tilde_lsp"]
1904 }
1905 },
1906 "lsp": {
1907 "tilde_lsp": {
1908 "binary": {
1909 "path": "~/.local/bin/rust-analyzer",
1910 }
1911 }
1912 },
1913 });
1914
1915 let fs = FakeFs::new(cx.executor());
1916 fs.insert_tree(
1917 path!("/root"),
1918 json!({
1919 ".zed": {
1920 "settings.json": settings_json_contents.to_string(),
1921 },
1922 "src": {
1923 "main.rs": "fn main() {}",
1924 }
1925 }),
1926 )
1927 .await;
1928
1929 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1930 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1931 language_registry.add(rust_lang());
1932
1933 let mut tilde_lsp = language_registry.register_fake_lsp(
1934 "Rust",
1935 FakeLspAdapter {
1936 name: "tilde_lsp",
1937 ..Default::default()
1938 },
1939 );
1940 cx.run_until_parked();
1941
1942 project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1950 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1951 assert_eq!(
1952 lsp_path, expected_path,
1953 "Tilde path should expand to home directory"
1954 );
1955}
1956
1957#[gpui::test]
1958async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1959 init_test(cx);
1960
1961 let fs = FakeFs::new(cx.executor());
1962 fs.insert_tree(
1963 path!("/the-root"),
1964 json!({
1965 ".gitignore": "target\n",
1966 "Cargo.lock": "",
1967 "src": {
1968 "a.rs": "",
1969 "b.rs": "",
1970 },
1971 "target": {
1972 "x": {
1973 "out": {
1974 "x.rs": ""
1975 }
1976 },
1977 "y": {
1978 "out": {
1979 "y.rs": "",
1980 }
1981 },
1982 "z": {
1983 "out": {
1984 "z.rs": ""
1985 }
1986 }
1987 }
1988 }),
1989 )
1990 .await;
1991 fs.insert_tree(
1992 path!("/the-registry"),
1993 json!({
1994 "dep1": {
1995 "src": {
1996 "dep1.rs": "",
1997 }
1998 },
1999 "dep2": {
2000 "src": {
2001 "dep2.rs": "",
2002 }
2003 },
2004 }),
2005 )
2006 .await;
2007 fs.insert_tree(
2008 path!("/the/stdlib"),
2009 json!({
2010 "LICENSE": "",
2011 "src": {
2012 "string.rs": "",
2013 }
2014 }),
2015 )
2016 .await;
2017
2018 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2019 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2020 (project.languages().clone(), project.lsp_store())
2021 });
2022 language_registry.add(rust_lang());
2023 let mut fake_servers = language_registry.register_fake_lsp(
2024 "Rust",
2025 FakeLspAdapter {
2026 name: "the-language-server",
2027 ..Default::default()
2028 },
2029 );
2030
2031 cx.executor().run_until_parked();
2032
2033 // Start the language server by opening a buffer with a compatible file extension.
2034 project
2035 .update(cx, |project, cx| {
2036 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2037 })
2038 .await
2039 .unwrap();
2040
2041 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2042 project.update(cx, |project, cx| {
2043 let worktree = project.worktrees(cx).next().unwrap();
2044 assert_eq!(
2045 worktree
2046 .read(cx)
2047 .snapshot()
2048 .entries(true, 0)
2049 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2050 .collect::<Vec<_>>(),
2051 &[
2052 ("", false),
2053 (".gitignore", false),
2054 ("Cargo.lock", false),
2055 ("src", false),
2056 ("src/a.rs", false),
2057 ("src/b.rs", false),
2058 ("target", true),
2059 ]
2060 );
2061 });
2062
2063 let prev_read_dir_count = fs.read_dir_call_count();
2064
2065 let fake_server = fake_servers.next().await.unwrap();
2066 cx.executor().run_until_parked();
2067 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2068 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2069 id
2070 });
2071
2072 // Simulate jumping to a definition in a dependency outside of the worktree.
2073 let _out_of_worktree_buffer = project
2074 .update(cx, |project, cx| {
2075 project.open_local_buffer_via_lsp(
2076 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2077 server_id,
2078 cx,
2079 )
2080 })
2081 .await
2082 .unwrap();
2083
2084 // Keep track of the FS events reported to the language server.
2085 let file_changes = Arc::new(Mutex::new(Vec::new()));
2086 fake_server
2087 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2088 registrations: vec![lsp::Registration {
2089 id: Default::default(),
2090 method: "workspace/didChangeWatchedFiles".to_string(),
2091 register_options: serde_json::to_value(
2092 lsp::DidChangeWatchedFilesRegistrationOptions {
2093 watchers: vec![
2094 lsp::FileSystemWatcher {
2095 glob_pattern: lsp::GlobPattern::String(
2096 path!("/the-root/Cargo.toml").to_string(),
2097 ),
2098 kind: None,
2099 },
2100 lsp::FileSystemWatcher {
2101 glob_pattern: lsp::GlobPattern::String(
2102 path!("/the-root/src/*.{rs,c}").to_string(),
2103 ),
2104 kind: None,
2105 },
2106 lsp::FileSystemWatcher {
2107 glob_pattern: lsp::GlobPattern::String(
2108 path!("/the-root/target/y/**/*.rs").to_string(),
2109 ),
2110 kind: None,
2111 },
2112 lsp::FileSystemWatcher {
2113 glob_pattern: lsp::GlobPattern::String(
2114 path!("/the/stdlib/src/**/*.rs").to_string(),
2115 ),
2116 kind: None,
2117 },
2118 lsp::FileSystemWatcher {
2119 glob_pattern: lsp::GlobPattern::String(
2120 path!("**/Cargo.lock").to_string(),
2121 ),
2122 kind: None,
2123 },
2124 ],
2125 },
2126 )
2127 .ok(),
2128 }],
2129 })
2130 .await
2131 .into_response()
2132 .unwrap();
2133 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2134 let file_changes = file_changes.clone();
2135 move |params, _| {
2136 let mut file_changes = file_changes.lock();
2137 file_changes.extend(params.changes);
2138 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2139 }
2140 });
2141
2142 cx.executor().run_until_parked();
2143 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2144 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2145
2146 let mut new_watched_paths = fs.watched_paths();
2147 new_watched_paths.retain(|path| {
2148 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2149 });
2150 assert_eq!(
2151 &new_watched_paths,
2152 &[
2153 Path::new(path!("/the-root")),
2154 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2155 Path::new(path!("/the/stdlib/src"))
2156 ]
2157 );
2158
2159 // Now the language server has asked us to watch an ignored directory path,
2160 // so we recursively load it.
2161 project.update(cx, |project, cx| {
2162 let worktree = project.visible_worktrees(cx).next().unwrap();
2163 assert_eq!(
2164 worktree
2165 .read(cx)
2166 .snapshot()
2167 .entries(true, 0)
2168 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2169 .collect::<Vec<_>>(),
2170 &[
2171 ("", false),
2172 (".gitignore", false),
2173 ("Cargo.lock", false),
2174 ("src", false),
2175 ("src/a.rs", false),
2176 ("src/b.rs", false),
2177 ("target", true),
2178 ("target/x", true),
2179 ("target/y", true),
2180 ("target/y/out", true),
2181 ("target/y/out/y.rs", true),
2182 ("target/z", true),
2183 ]
2184 );
2185 });
2186
2187 // Perform some file system mutations, two of which match the watched patterns,
2188 // and one of which does not.
2189 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2190 .await
2191 .unwrap();
2192 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2193 .await
2194 .unwrap();
2195 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2196 .await
2197 .unwrap();
2198 fs.create_file(
2199 path!("/the-root/target/x/out/x2.rs").as_ref(),
2200 Default::default(),
2201 )
2202 .await
2203 .unwrap();
2204 fs.create_file(
2205 path!("/the-root/target/y/out/y2.rs").as_ref(),
2206 Default::default(),
2207 )
2208 .await
2209 .unwrap();
2210 fs.save(
2211 path!("/the-root/Cargo.lock").as_ref(),
2212 &"".into(),
2213 Default::default(),
2214 )
2215 .await
2216 .unwrap();
2217 fs.save(
2218 path!("/the-stdlib/LICENSE").as_ref(),
2219 &"".into(),
2220 Default::default(),
2221 )
2222 .await
2223 .unwrap();
2224 fs.save(
2225 path!("/the/stdlib/src/string.rs").as_ref(),
2226 &"".into(),
2227 Default::default(),
2228 )
2229 .await
2230 .unwrap();
2231
2232 // The language server receives events for the FS mutations that match its watch patterns.
2233 cx.executor().run_until_parked();
2234 assert_eq!(
2235 &*file_changes.lock(),
2236 &[
2237 lsp::FileEvent {
2238 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2239 typ: lsp::FileChangeType::CHANGED,
2240 },
2241 lsp::FileEvent {
2242 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2243 typ: lsp::FileChangeType::DELETED,
2244 },
2245 lsp::FileEvent {
2246 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2247 typ: lsp::FileChangeType::CREATED,
2248 },
2249 lsp::FileEvent {
2250 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2251 typ: lsp::FileChangeType::CREATED,
2252 },
2253 lsp::FileEvent {
2254 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2255 typ: lsp::FileChangeType::CHANGED,
2256 },
2257 ]
2258 );
2259}
2260
2261#[gpui::test]
2262async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2263 init_test(cx);
2264
2265 let fs = FakeFs::new(cx.executor());
2266 fs.insert_tree(
2267 path!("/dir"),
2268 json!({
2269 "a.rs": "let a = 1;",
2270 "b.rs": "let b = 2;"
2271 }),
2272 )
2273 .await;
2274
2275 let project = Project::test(
2276 fs,
2277 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2278 cx,
2279 )
2280 .await;
2281 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2282
2283 let buffer_a = project
2284 .update(cx, |project, cx| {
2285 project.open_local_buffer(path!("/dir/a.rs"), cx)
2286 })
2287 .await
2288 .unwrap();
2289 let buffer_b = project
2290 .update(cx, |project, cx| {
2291 project.open_local_buffer(path!("/dir/b.rs"), cx)
2292 })
2293 .await
2294 .unwrap();
2295
2296 lsp_store.update(cx, |lsp_store, cx| {
2297 lsp_store
2298 .update_diagnostics(
2299 LanguageServerId(0),
2300 lsp::PublishDiagnosticsParams {
2301 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2302 version: None,
2303 diagnostics: vec![lsp::Diagnostic {
2304 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2305 severity: Some(lsp::DiagnosticSeverity::ERROR),
2306 message: "error 1".to_string(),
2307 ..Default::default()
2308 }],
2309 },
2310 None,
2311 DiagnosticSourceKind::Pushed,
2312 &[],
2313 cx,
2314 )
2315 .unwrap();
2316 lsp_store
2317 .update_diagnostics(
2318 LanguageServerId(0),
2319 lsp::PublishDiagnosticsParams {
2320 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2321 version: None,
2322 diagnostics: vec![lsp::Diagnostic {
2323 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2324 severity: Some(DiagnosticSeverity::WARNING),
2325 message: "error 2".to_string(),
2326 ..Default::default()
2327 }],
2328 },
2329 None,
2330 DiagnosticSourceKind::Pushed,
2331 &[],
2332 cx,
2333 )
2334 .unwrap();
2335 });
2336
2337 buffer_a.update(cx, |buffer, _| {
2338 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2339 assert_eq!(
2340 chunks
2341 .iter()
2342 .map(|(s, d)| (s.as_str(), *d))
2343 .collect::<Vec<_>>(),
2344 &[
2345 ("let ", None),
2346 ("a", Some(DiagnosticSeverity::ERROR)),
2347 (" = 1;", None),
2348 ]
2349 );
2350 });
2351 buffer_b.update(cx, |buffer, _| {
2352 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2353 assert_eq!(
2354 chunks
2355 .iter()
2356 .map(|(s, d)| (s.as_str(), *d))
2357 .collect::<Vec<_>>(),
2358 &[
2359 ("let ", None),
2360 ("b", Some(DiagnosticSeverity::WARNING)),
2361 (" = 2;", None),
2362 ]
2363 );
2364 });
2365}
2366
2367#[gpui::test]
2368async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2369 init_test(cx);
2370
2371 let fs = FakeFs::new(cx.executor());
2372 fs.insert_tree(
2373 path!("/root"),
2374 json!({
2375 "dir": {
2376 ".git": {
2377 "HEAD": "ref: refs/heads/main",
2378 },
2379 ".gitignore": "b.rs",
2380 "a.rs": "let a = 1;",
2381 "b.rs": "let b = 2;",
2382 },
2383 "other.rs": "let b = c;"
2384 }),
2385 )
2386 .await;
2387
2388 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2389 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2390 let (worktree, _) = project
2391 .update(cx, |project, cx| {
2392 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2393 })
2394 .await
2395 .unwrap();
2396 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2397
2398 let (worktree, _) = project
2399 .update(cx, |project, cx| {
2400 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2401 })
2402 .await
2403 .unwrap();
2404 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2405
2406 let server_id = LanguageServerId(0);
2407 lsp_store.update(cx, |lsp_store, cx| {
2408 lsp_store
2409 .update_diagnostics(
2410 server_id,
2411 lsp::PublishDiagnosticsParams {
2412 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2413 version: None,
2414 diagnostics: vec![lsp::Diagnostic {
2415 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2416 severity: Some(lsp::DiagnosticSeverity::ERROR),
2417 message: "unused variable 'b'".to_string(),
2418 ..Default::default()
2419 }],
2420 },
2421 None,
2422 DiagnosticSourceKind::Pushed,
2423 &[],
2424 cx,
2425 )
2426 .unwrap();
2427 lsp_store
2428 .update_diagnostics(
2429 server_id,
2430 lsp::PublishDiagnosticsParams {
2431 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2432 version: None,
2433 diagnostics: vec![lsp::Diagnostic {
2434 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2435 severity: Some(lsp::DiagnosticSeverity::ERROR),
2436 message: "unknown variable 'c'".to_string(),
2437 ..Default::default()
2438 }],
2439 },
2440 None,
2441 DiagnosticSourceKind::Pushed,
2442 &[],
2443 cx,
2444 )
2445 .unwrap();
2446 });
2447
2448 let main_ignored_buffer = project
2449 .update(cx, |project, cx| {
2450 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2451 })
2452 .await
2453 .unwrap();
2454 main_ignored_buffer.update(cx, |buffer, _| {
2455 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2456 assert_eq!(
2457 chunks
2458 .iter()
2459 .map(|(s, d)| (s.as_str(), *d))
2460 .collect::<Vec<_>>(),
2461 &[
2462 ("let ", None),
2463 ("b", Some(DiagnosticSeverity::ERROR)),
2464 (" = 2;", None),
2465 ],
2466 "Gigitnored buffers should still get in-buffer diagnostics",
2467 );
2468 });
2469 let other_buffer = project
2470 .update(cx, |project, cx| {
2471 project.open_buffer((other_worktree_id, rel_path("")), cx)
2472 })
2473 .await
2474 .unwrap();
2475 other_buffer.update(cx, |buffer, _| {
2476 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2477 assert_eq!(
2478 chunks
2479 .iter()
2480 .map(|(s, d)| (s.as_str(), *d))
2481 .collect::<Vec<_>>(),
2482 &[
2483 ("let b = ", None),
2484 ("c", Some(DiagnosticSeverity::ERROR)),
2485 (";", None),
2486 ],
2487 "Buffers from hidden projects should still get in-buffer diagnostics"
2488 );
2489 });
2490
2491 project.update(cx, |project, cx| {
2492 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2493 assert_eq!(
2494 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2495 vec![(
2496 ProjectPath {
2497 worktree_id: main_worktree_id,
2498 path: rel_path("b.rs").into(),
2499 },
2500 server_id,
2501 DiagnosticSummary {
2502 error_count: 1,
2503 warning_count: 0,
2504 }
2505 )]
2506 );
2507 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2508 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2509 });
2510}
2511
2512#[gpui::test]
2513async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2514 init_test(cx);
2515
2516 let progress_token = "the-progress-token";
2517
2518 let fs = FakeFs::new(cx.executor());
2519 fs.insert_tree(
2520 path!("/dir"),
2521 json!({
2522 "a.rs": "fn a() { A }",
2523 "b.rs": "const y: i32 = 1",
2524 }),
2525 )
2526 .await;
2527
2528 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2530
2531 language_registry.add(rust_lang());
2532 let mut fake_servers = language_registry.register_fake_lsp(
2533 "Rust",
2534 FakeLspAdapter {
2535 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2536 disk_based_diagnostics_sources: vec!["disk".into()],
2537 ..Default::default()
2538 },
2539 );
2540
2541 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2542
2543 // Cause worktree to start the fake language server
2544 let _ = project
2545 .update(cx, |project, cx| {
2546 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2547 })
2548 .await
2549 .unwrap();
2550
2551 let mut events = cx.events(&project);
2552
2553 let fake_server = fake_servers.next().await.unwrap();
2554 assert_eq!(
2555 events.next().await.unwrap(),
2556 Event::LanguageServerAdded(
2557 LanguageServerId(0),
2558 fake_server.server.name(),
2559 Some(worktree_id)
2560 ),
2561 );
2562
2563 fake_server
2564 .start_progress(format!("{}/0", progress_token))
2565 .await;
2566 assert_eq!(
2567 events.next().await.unwrap(),
2568 Event::DiskBasedDiagnosticsStarted {
2569 language_server_id: LanguageServerId(0),
2570 }
2571 );
2572
2573 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2574 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2575 version: None,
2576 diagnostics: vec![lsp::Diagnostic {
2577 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2578 severity: Some(lsp::DiagnosticSeverity::ERROR),
2579 message: "undefined variable 'A'".to_string(),
2580 ..Default::default()
2581 }],
2582 });
2583 assert_eq!(
2584 events.next().await.unwrap(),
2585 Event::DiagnosticsUpdated {
2586 language_server_id: LanguageServerId(0),
2587 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2588 }
2589 );
2590
2591 fake_server.end_progress(format!("{}/0", progress_token));
2592 assert_eq!(
2593 events.next().await.unwrap(),
2594 Event::DiskBasedDiagnosticsFinished {
2595 language_server_id: LanguageServerId(0)
2596 }
2597 );
2598
2599 let buffer = project
2600 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2601 .await
2602 .unwrap();
2603
2604 buffer.update(cx, |buffer, _| {
2605 let snapshot = buffer.snapshot();
2606 let diagnostics = snapshot
2607 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2608 .collect::<Vec<_>>();
2609 assert_eq!(
2610 diagnostics,
2611 &[DiagnosticEntryRef {
2612 range: Point::new(0, 9)..Point::new(0, 10),
2613 diagnostic: &Diagnostic {
2614 severity: lsp::DiagnosticSeverity::ERROR,
2615 message: "undefined variable 'A'".to_string(),
2616 group_id: 0,
2617 is_primary: true,
2618 source_kind: DiagnosticSourceKind::Pushed,
2619 ..Diagnostic::default()
2620 }
2621 }]
2622 )
2623 });
2624
2625 // Ensure publishing empty diagnostics twice only results in one update event.
2626 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2627 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2628 version: None,
2629 diagnostics: Default::default(),
2630 });
2631 assert_eq!(
2632 events.next().await.unwrap(),
2633 Event::DiagnosticsUpdated {
2634 language_server_id: LanguageServerId(0),
2635 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2636 }
2637 );
2638
2639 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2640 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2641 version: None,
2642 diagnostics: Default::default(),
2643 });
2644 cx.executor().run_until_parked();
2645 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2646}
2647
2648#[gpui::test]
2649async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2650 init_test(cx);
2651
2652 let progress_token = "the-progress-token";
2653
2654 let fs = FakeFs::new(cx.executor());
2655 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2656
2657 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2658
2659 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2660 language_registry.add(rust_lang());
2661 let mut fake_servers = language_registry.register_fake_lsp(
2662 "Rust",
2663 FakeLspAdapter {
2664 name: "the-language-server",
2665 disk_based_diagnostics_sources: vec!["disk".into()],
2666 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2667 ..FakeLspAdapter::default()
2668 },
2669 );
2670
2671 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2672
2673 let (buffer, _handle) = project
2674 .update(cx, |project, cx| {
2675 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2676 })
2677 .await
2678 .unwrap();
2679 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2680 // Simulate diagnostics starting to update.
2681 let fake_server = fake_servers.next().await.unwrap();
2682 cx.executor().run_until_parked();
2683 fake_server.start_progress(progress_token).await;
2684
2685 // Restart the server before the diagnostics finish updating.
2686 project.update(cx, |project, cx| {
2687 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2688 });
2689 let mut events = cx.events(&project);
2690
2691 // Simulate the newly started server sending more diagnostics.
2692 let fake_server = fake_servers.next().await.unwrap();
2693 cx.executor().run_until_parked();
2694 assert_eq!(
2695 events.next().await.unwrap(),
2696 Event::LanguageServerRemoved(LanguageServerId(0))
2697 );
2698 assert_eq!(
2699 events.next().await.unwrap(),
2700 Event::LanguageServerAdded(
2701 LanguageServerId(1),
2702 fake_server.server.name(),
2703 Some(worktree_id)
2704 )
2705 );
2706 fake_server.start_progress(progress_token).await;
2707 assert_eq!(
2708 events.next().await.unwrap(),
2709 Event::LanguageServerBufferRegistered {
2710 server_id: LanguageServerId(1),
2711 buffer_id,
2712 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2713 name: Some(fake_server.server.name())
2714 }
2715 );
2716 assert_eq!(
2717 events.next().await.unwrap(),
2718 Event::DiskBasedDiagnosticsStarted {
2719 language_server_id: LanguageServerId(1)
2720 }
2721 );
2722 project.update(cx, |project, cx| {
2723 assert_eq!(
2724 project
2725 .language_servers_running_disk_based_diagnostics(cx)
2726 .collect::<Vec<_>>(),
2727 [LanguageServerId(1)]
2728 );
2729 });
2730
2731 // All diagnostics are considered done, despite the old server's diagnostic
2732 // task never completing.
2733 fake_server.end_progress(progress_token);
2734 assert_eq!(
2735 events.next().await.unwrap(),
2736 Event::DiskBasedDiagnosticsFinished {
2737 language_server_id: LanguageServerId(1)
2738 }
2739 );
2740 project.update(cx, |project, cx| {
2741 assert_eq!(
2742 project
2743 .language_servers_running_disk_based_diagnostics(cx)
2744 .collect::<Vec<_>>(),
2745 [] as [language::LanguageServerId; 0]
2746 );
2747 });
2748}
2749
2750#[gpui::test]
2751async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2752 init_test(cx);
2753
2754 let fs = FakeFs::new(cx.executor());
2755 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2756
2757 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2758
2759 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2760 language_registry.add(rust_lang());
2761 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2762
2763 let (buffer, _) = project
2764 .update(cx, |project, cx| {
2765 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2766 })
2767 .await
2768 .unwrap();
2769
2770 // Publish diagnostics
2771 let fake_server = fake_servers.next().await.unwrap();
2772 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2773 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2774 version: None,
2775 diagnostics: vec![lsp::Diagnostic {
2776 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2777 severity: Some(lsp::DiagnosticSeverity::ERROR),
2778 message: "the message".to_string(),
2779 ..Default::default()
2780 }],
2781 });
2782
2783 cx.executor().run_until_parked();
2784 buffer.update(cx, |buffer, _| {
2785 assert_eq!(
2786 buffer
2787 .snapshot()
2788 .diagnostics_in_range::<_, usize>(0..1, false)
2789 .map(|entry| entry.diagnostic.message.clone())
2790 .collect::<Vec<_>>(),
2791 ["the message".to_string()]
2792 );
2793 });
2794 project.update(cx, |project, cx| {
2795 assert_eq!(
2796 project.diagnostic_summary(false, cx),
2797 DiagnosticSummary {
2798 error_count: 1,
2799 warning_count: 0,
2800 }
2801 );
2802 });
2803
2804 project.update(cx, |project, cx| {
2805 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2806 });
2807
2808 // The diagnostics are cleared.
2809 cx.executor().run_until_parked();
2810 buffer.update(cx, |buffer, _| {
2811 assert_eq!(
2812 buffer
2813 .snapshot()
2814 .diagnostics_in_range::<_, usize>(0..1, false)
2815 .map(|entry| entry.diagnostic.message.clone())
2816 .collect::<Vec<_>>(),
2817 Vec::<String>::new(),
2818 );
2819 });
2820 project.update(cx, |project, cx| {
2821 assert_eq!(
2822 project.diagnostic_summary(false, cx),
2823 DiagnosticSummary {
2824 error_count: 0,
2825 warning_count: 0,
2826 }
2827 );
2828 });
2829}
2830
2831#[gpui::test]
2832async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2833 init_test(cx);
2834
2835 let fs = FakeFs::new(cx.executor());
2836 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2837
2838 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2839 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2840
2841 language_registry.add(rust_lang());
2842 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2843
2844 let (buffer, _handle) = project
2845 .update(cx, |project, cx| {
2846 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2847 })
2848 .await
2849 .unwrap();
2850
2851 // Before restarting the server, report diagnostics with an unknown buffer version.
2852 let fake_server = fake_servers.next().await.unwrap();
2853 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2854 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2855 version: Some(10000),
2856 diagnostics: Vec::new(),
2857 });
2858 cx.executor().run_until_parked();
2859 project.update(cx, |project, cx| {
2860 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2861 });
2862
2863 let mut fake_server = fake_servers.next().await.unwrap();
2864 let notification = fake_server
2865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2866 .await
2867 .text_document;
2868 assert_eq!(notification.version, 0);
2869}
2870
2871#[gpui::test]
2872async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let progress_token = "the-progress-token";
2876
2877 let fs = FakeFs::new(cx.executor());
2878 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2879
2880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2881
2882 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2883 language_registry.add(rust_lang());
2884 let mut fake_servers = language_registry.register_fake_lsp(
2885 "Rust",
2886 FakeLspAdapter {
2887 name: "the-language-server",
2888 disk_based_diagnostics_sources: vec!["disk".into()],
2889 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2890 ..Default::default()
2891 },
2892 );
2893
2894 let (buffer, _handle) = project
2895 .update(cx, |project, cx| {
2896 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2897 })
2898 .await
2899 .unwrap();
2900
2901 // Simulate diagnostics starting to update.
2902 let mut fake_server = fake_servers.next().await.unwrap();
2903 fake_server
2904 .start_progress_with(
2905 "another-token",
2906 lsp::WorkDoneProgressBegin {
2907 cancellable: Some(false),
2908 ..Default::default()
2909 },
2910 )
2911 .await;
2912 // Ensure progress notification is fully processed before starting the next one
2913 cx.executor().run_until_parked();
2914
2915 fake_server
2916 .start_progress_with(
2917 progress_token,
2918 lsp::WorkDoneProgressBegin {
2919 cancellable: Some(true),
2920 ..Default::default()
2921 },
2922 )
2923 .await;
2924 // Ensure progress notification is fully processed before cancelling
2925 cx.executor().run_until_parked();
2926
2927 project.update(cx, |project, cx| {
2928 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2929 });
2930 cx.executor().run_until_parked();
2931
2932 let cancel_notification = fake_server
2933 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2934 .await;
2935 assert_eq!(
2936 cancel_notification.token,
2937 NumberOrString::String(progress_token.into())
2938 );
2939}
2940
2941#[gpui::test]
2942async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2943 init_test(cx);
2944
2945 let fs = FakeFs::new(cx.executor());
2946 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2947 .await;
2948
2949 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2950 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2951
2952 let mut fake_rust_servers = language_registry.register_fake_lsp(
2953 "Rust",
2954 FakeLspAdapter {
2955 name: "rust-lsp",
2956 ..Default::default()
2957 },
2958 );
2959 let mut fake_js_servers = language_registry.register_fake_lsp(
2960 "JavaScript",
2961 FakeLspAdapter {
2962 name: "js-lsp",
2963 ..Default::default()
2964 },
2965 );
2966 language_registry.add(rust_lang());
2967 language_registry.add(js_lang());
2968
2969 let _rs_buffer = project
2970 .update(cx, |project, cx| {
2971 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2972 })
2973 .await
2974 .unwrap();
2975 let _js_buffer = project
2976 .update(cx, |project, cx| {
2977 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2978 })
2979 .await
2980 .unwrap();
2981
2982 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2983 assert_eq!(
2984 fake_rust_server_1
2985 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2986 .await
2987 .text_document
2988 .uri
2989 .as_str(),
2990 uri!("file:///dir/a.rs")
2991 );
2992
2993 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2994 assert_eq!(
2995 fake_js_server
2996 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2997 .await
2998 .text_document
2999 .uri
3000 .as_str(),
3001 uri!("file:///dir/b.js")
3002 );
3003
3004 // Disable Rust language server, ensuring only that server gets stopped.
3005 cx.update(|cx| {
3006 SettingsStore::update_global(cx, |settings, cx| {
3007 settings.update_user_settings(cx, |settings| {
3008 settings.languages_mut().insert(
3009 "Rust".into(),
3010 LanguageSettingsContent {
3011 enable_language_server: Some(false),
3012 ..Default::default()
3013 },
3014 );
3015 });
3016 })
3017 });
3018 fake_rust_server_1
3019 .receive_notification::<lsp::notification::Exit>()
3020 .await;
3021
3022 // Enable Rust and disable JavaScript language servers, ensuring that the
3023 // former gets started again and that the latter stops.
3024 cx.update(|cx| {
3025 SettingsStore::update_global(cx, |settings, cx| {
3026 settings.update_user_settings(cx, |settings| {
3027 settings.languages_mut().insert(
3028 "Rust".into(),
3029 LanguageSettingsContent {
3030 enable_language_server: Some(true),
3031 ..Default::default()
3032 },
3033 );
3034 settings.languages_mut().insert(
3035 "JavaScript".into(),
3036 LanguageSettingsContent {
3037 enable_language_server: Some(false),
3038 ..Default::default()
3039 },
3040 );
3041 });
3042 })
3043 });
3044 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3045 assert_eq!(
3046 fake_rust_server_2
3047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3048 .await
3049 .text_document
3050 .uri
3051 .as_str(),
3052 uri!("file:///dir/a.rs")
3053 );
3054 fake_js_server
3055 .receive_notification::<lsp::notification::Exit>()
3056 .await;
3057}
3058
3059#[gpui::test(iterations = 3)]
3060async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3061 init_test(cx);
3062
3063 let text = "
3064 fn a() { A }
3065 fn b() { BB }
3066 fn c() { CCC }
3067 "
3068 .unindent();
3069
3070 let fs = FakeFs::new(cx.executor());
3071 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3072
3073 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3074 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3075
3076 language_registry.add(rust_lang());
3077 let mut fake_servers = language_registry.register_fake_lsp(
3078 "Rust",
3079 FakeLspAdapter {
3080 disk_based_diagnostics_sources: vec!["disk".into()],
3081 ..Default::default()
3082 },
3083 );
3084
3085 let buffer = project
3086 .update(cx, |project, cx| {
3087 project.open_local_buffer(path!("/dir/a.rs"), cx)
3088 })
3089 .await
3090 .unwrap();
3091
3092 let _handle = project.update(cx, |project, cx| {
3093 project.register_buffer_with_language_servers(&buffer, cx)
3094 });
3095
3096 let mut fake_server = fake_servers.next().await.unwrap();
3097 let open_notification = fake_server
3098 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3099 .await;
3100
3101 // Edit the buffer, moving the content down
3102 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3103 let change_notification_1 = fake_server
3104 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3105 .await;
3106 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3107
3108 // Report some diagnostics for the initial version of the buffer
3109 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3110 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3111 version: Some(open_notification.text_document.version),
3112 diagnostics: vec![
3113 lsp::Diagnostic {
3114 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3115 severity: Some(DiagnosticSeverity::ERROR),
3116 message: "undefined variable 'A'".to_string(),
3117 source: Some("disk".to_string()),
3118 ..Default::default()
3119 },
3120 lsp::Diagnostic {
3121 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3122 severity: Some(DiagnosticSeverity::ERROR),
3123 message: "undefined variable 'BB'".to_string(),
3124 source: Some("disk".to_string()),
3125 ..Default::default()
3126 },
3127 lsp::Diagnostic {
3128 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3129 severity: Some(DiagnosticSeverity::ERROR),
3130 source: Some("disk".to_string()),
3131 message: "undefined variable 'CCC'".to_string(),
3132 ..Default::default()
3133 },
3134 ],
3135 });
3136
3137 // The diagnostics have moved down since they were created.
3138 cx.executor().run_until_parked();
3139 buffer.update(cx, |buffer, _| {
3140 assert_eq!(
3141 buffer
3142 .snapshot()
3143 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3144 .collect::<Vec<_>>(),
3145 &[
3146 DiagnosticEntry {
3147 range: Point::new(3, 9)..Point::new(3, 11),
3148 diagnostic: Diagnostic {
3149 source: Some("disk".into()),
3150 severity: DiagnosticSeverity::ERROR,
3151 message: "undefined variable 'BB'".to_string(),
3152 is_disk_based: true,
3153 group_id: 1,
3154 is_primary: true,
3155 source_kind: DiagnosticSourceKind::Pushed,
3156 ..Diagnostic::default()
3157 },
3158 },
3159 DiagnosticEntry {
3160 range: Point::new(4, 9)..Point::new(4, 12),
3161 diagnostic: Diagnostic {
3162 source: Some("disk".into()),
3163 severity: DiagnosticSeverity::ERROR,
3164 message: "undefined variable 'CCC'".to_string(),
3165 is_disk_based: true,
3166 group_id: 2,
3167 is_primary: true,
3168 source_kind: DiagnosticSourceKind::Pushed,
3169 ..Diagnostic::default()
3170 }
3171 }
3172 ]
3173 );
3174 assert_eq!(
3175 chunks_with_diagnostics(buffer, 0..buffer.len()),
3176 [
3177 ("\n\nfn a() { ".to_string(), None),
3178 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3179 (" }\nfn b() { ".to_string(), None),
3180 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3181 (" }\nfn c() { ".to_string(), None),
3182 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3183 (" }\n".to_string(), None),
3184 ]
3185 );
3186 assert_eq!(
3187 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3188 [
3189 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3190 (" }\nfn c() { ".to_string(), None),
3191 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3192 ]
3193 );
3194 });
3195
3196 // Ensure overlapping diagnostics are highlighted correctly.
3197 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3198 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3199 version: Some(open_notification.text_document.version),
3200 diagnostics: vec![
3201 lsp::Diagnostic {
3202 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3203 severity: Some(DiagnosticSeverity::ERROR),
3204 message: "undefined variable 'A'".to_string(),
3205 source: Some("disk".to_string()),
3206 ..Default::default()
3207 },
3208 lsp::Diagnostic {
3209 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3210 severity: Some(DiagnosticSeverity::WARNING),
3211 message: "unreachable statement".to_string(),
3212 source: Some("disk".to_string()),
3213 ..Default::default()
3214 },
3215 ],
3216 });
3217
3218 cx.executor().run_until_parked();
3219 buffer.update(cx, |buffer, _| {
3220 assert_eq!(
3221 buffer
3222 .snapshot()
3223 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3224 .collect::<Vec<_>>(),
3225 &[
3226 DiagnosticEntry {
3227 range: Point::new(2, 9)..Point::new(2, 12),
3228 diagnostic: Diagnostic {
3229 source: Some("disk".into()),
3230 severity: DiagnosticSeverity::WARNING,
3231 message: "unreachable statement".to_string(),
3232 is_disk_based: true,
3233 group_id: 4,
3234 is_primary: true,
3235 source_kind: DiagnosticSourceKind::Pushed,
3236 ..Diagnostic::default()
3237 }
3238 },
3239 DiagnosticEntry {
3240 range: Point::new(2, 9)..Point::new(2, 10),
3241 diagnostic: Diagnostic {
3242 source: Some("disk".into()),
3243 severity: DiagnosticSeverity::ERROR,
3244 message: "undefined variable 'A'".to_string(),
3245 is_disk_based: true,
3246 group_id: 3,
3247 is_primary: true,
3248 source_kind: DiagnosticSourceKind::Pushed,
3249 ..Diagnostic::default()
3250 },
3251 }
3252 ]
3253 );
3254 assert_eq!(
3255 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3256 [
3257 ("fn a() { ".to_string(), None),
3258 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3259 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3260 ("\n".to_string(), None),
3261 ]
3262 );
3263 assert_eq!(
3264 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3265 [
3266 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3267 ("\n".to_string(), None),
3268 ]
3269 );
3270 });
3271
3272 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3273 // changes since the last save.
3274 buffer.update(cx, |buffer, cx| {
3275 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3276 buffer.edit(
3277 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3278 None,
3279 cx,
3280 );
3281 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3282 });
3283 let change_notification_2 = fake_server
3284 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3285 .await;
3286 assert!(
3287 change_notification_2.text_document.version > change_notification_1.text_document.version
3288 );
3289
3290 // Handle out-of-order diagnostics
3291 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3292 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3293 version: Some(change_notification_2.text_document.version),
3294 diagnostics: vec![
3295 lsp::Diagnostic {
3296 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3297 severity: Some(DiagnosticSeverity::ERROR),
3298 message: "undefined variable 'BB'".to_string(),
3299 source: Some("disk".to_string()),
3300 ..Default::default()
3301 },
3302 lsp::Diagnostic {
3303 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3304 severity: Some(DiagnosticSeverity::WARNING),
3305 message: "undefined variable 'A'".to_string(),
3306 source: Some("disk".to_string()),
3307 ..Default::default()
3308 },
3309 ],
3310 });
3311
3312 cx.executor().run_until_parked();
3313 buffer.update(cx, |buffer, _| {
3314 assert_eq!(
3315 buffer
3316 .snapshot()
3317 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3318 .collect::<Vec<_>>(),
3319 &[
3320 DiagnosticEntry {
3321 range: Point::new(2, 21)..Point::new(2, 22),
3322 diagnostic: Diagnostic {
3323 source: Some("disk".into()),
3324 severity: DiagnosticSeverity::WARNING,
3325 message: "undefined variable 'A'".to_string(),
3326 is_disk_based: true,
3327 group_id: 6,
3328 is_primary: true,
3329 source_kind: DiagnosticSourceKind::Pushed,
3330 ..Diagnostic::default()
3331 }
3332 },
3333 DiagnosticEntry {
3334 range: Point::new(3, 9)..Point::new(3, 14),
3335 diagnostic: Diagnostic {
3336 source: Some("disk".into()),
3337 severity: DiagnosticSeverity::ERROR,
3338 message: "undefined variable 'BB'".to_string(),
3339 is_disk_based: true,
3340 group_id: 5,
3341 is_primary: true,
3342 source_kind: DiagnosticSourceKind::Pushed,
3343 ..Diagnostic::default()
3344 },
3345 }
3346 ]
3347 );
3348 });
3349}
3350
3351#[gpui::test]
3352async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3353 init_test(cx);
3354
3355 let text = concat!(
3356 "let one = ;\n", //
3357 "let two = \n",
3358 "let three = 3;\n",
3359 );
3360
3361 let fs = FakeFs::new(cx.executor());
3362 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3363
3364 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3365 let buffer = project
3366 .update(cx, |project, cx| {
3367 project.open_local_buffer(path!("/dir/a.rs"), cx)
3368 })
3369 .await
3370 .unwrap();
3371
3372 project.update(cx, |project, cx| {
3373 project.lsp_store().update(cx, |lsp_store, cx| {
3374 lsp_store
3375 .update_diagnostic_entries(
3376 LanguageServerId(0),
3377 PathBuf::from(path!("/dir/a.rs")),
3378 None,
3379 None,
3380 vec![
3381 DiagnosticEntry {
3382 range: Unclipped(PointUtf16::new(0, 10))
3383 ..Unclipped(PointUtf16::new(0, 10)),
3384 diagnostic: Diagnostic {
3385 severity: DiagnosticSeverity::ERROR,
3386 message: "syntax error 1".to_string(),
3387 source_kind: DiagnosticSourceKind::Pushed,
3388 ..Diagnostic::default()
3389 },
3390 },
3391 DiagnosticEntry {
3392 range: Unclipped(PointUtf16::new(1, 10))
3393 ..Unclipped(PointUtf16::new(1, 10)),
3394 diagnostic: Diagnostic {
3395 severity: DiagnosticSeverity::ERROR,
3396 message: "syntax error 2".to_string(),
3397 source_kind: DiagnosticSourceKind::Pushed,
3398 ..Diagnostic::default()
3399 },
3400 },
3401 ],
3402 cx,
3403 )
3404 .unwrap();
3405 })
3406 });
3407
3408 // An empty range is extended forward to include the following character.
3409 // At the end of a line, an empty range is extended backward to include
3410 // the preceding character.
3411 buffer.update(cx, |buffer, _| {
3412 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3413 assert_eq!(
3414 chunks
3415 .iter()
3416 .map(|(s, d)| (s.as_str(), *d))
3417 .collect::<Vec<_>>(),
3418 &[
3419 ("let one = ", None),
3420 (";", Some(DiagnosticSeverity::ERROR)),
3421 ("\nlet two =", None),
3422 (" ", Some(DiagnosticSeverity::ERROR)),
3423 ("\nlet three = 3;\n", None)
3424 ]
3425 );
3426 });
3427}
3428
3429#[gpui::test]
3430async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3431 init_test(cx);
3432
3433 let fs = FakeFs::new(cx.executor());
3434 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3435 .await;
3436
3437 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3438 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3439
3440 lsp_store.update(cx, |lsp_store, cx| {
3441 lsp_store
3442 .update_diagnostic_entries(
3443 LanguageServerId(0),
3444 Path::new(path!("/dir/a.rs")).to_owned(),
3445 None,
3446 None,
3447 vec![DiagnosticEntry {
3448 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3449 diagnostic: Diagnostic {
3450 severity: DiagnosticSeverity::ERROR,
3451 is_primary: true,
3452 message: "syntax error a1".to_string(),
3453 source_kind: DiagnosticSourceKind::Pushed,
3454 ..Diagnostic::default()
3455 },
3456 }],
3457 cx,
3458 )
3459 .unwrap();
3460 lsp_store
3461 .update_diagnostic_entries(
3462 LanguageServerId(1),
3463 Path::new(path!("/dir/a.rs")).to_owned(),
3464 None,
3465 None,
3466 vec![DiagnosticEntry {
3467 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3468 diagnostic: Diagnostic {
3469 severity: DiagnosticSeverity::ERROR,
3470 is_primary: true,
3471 message: "syntax error b1".to_string(),
3472 source_kind: DiagnosticSourceKind::Pushed,
3473 ..Diagnostic::default()
3474 },
3475 }],
3476 cx,
3477 )
3478 .unwrap();
3479
3480 assert_eq!(
3481 lsp_store.diagnostic_summary(false, cx),
3482 DiagnosticSummary {
3483 error_count: 2,
3484 warning_count: 0,
3485 }
3486 );
3487 });
3488}
3489
3490#[gpui::test]
3491async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3492 init_test(cx);
3493
3494 let text = "
3495 fn a() {
3496 f1();
3497 }
3498 fn b() {
3499 f2();
3500 }
3501 fn c() {
3502 f3();
3503 }
3504 "
3505 .unindent();
3506
3507 let fs = FakeFs::new(cx.executor());
3508 fs.insert_tree(
3509 path!("/dir"),
3510 json!({
3511 "a.rs": text.clone(),
3512 }),
3513 )
3514 .await;
3515
3516 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3517 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3518
3519 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3520 language_registry.add(rust_lang());
3521 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3522
3523 let (buffer, _handle) = project
3524 .update(cx, |project, cx| {
3525 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3526 })
3527 .await
3528 .unwrap();
3529
3530 let mut fake_server = fake_servers.next().await.unwrap();
3531 let lsp_document_version = fake_server
3532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3533 .await
3534 .text_document
3535 .version;
3536
3537 // Simulate editing the buffer after the language server computes some edits.
3538 buffer.update(cx, |buffer, cx| {
3539 buffer.edit(
3540 [(
3541 Point::new(0, 0)..Point::new(0, 0),
3542 "// above first function\n",
3543 )],
3544 None,
3545 cx,
3546 );
3547 buffer.edit(
3548 [(
3549 Point::new(2, 0)..Point::new(2, 0),
3550 " // inside first function\n",
3551 )],
3552 None,
3553 cx,
3554 );
3555 buffer.edit(
3556 [(
3557 Point::new(6, 4)..Point::new(6, 4),
3558 "// inside second function ",
3559 )],
3560 None,
3561 cx,
3562 );
3563
3564 assert_eq!(
3565 buffer.text(),
3566 "
3567 // above first function
3568 fn a() {
3569 // inside first function
3570 f1();
3571 }
3572 fn b() {
3573 // inside second function f2();
3574 }
3575 fn c() {
3576 f3();
3577 }
3578 "
3579 .unindent()
3580 );
3581 });
3582
3583 let edits = lsp_store
3584 .update(cx, |lsp_store, cx| {
3585 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3586 &buffer,
3587 vec![
3588 // replace body of first function
3589 lsp::TextEdit {
3590 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3591 new_text: "
3592 fn a() {
3593 f10();
3594 }
3595 "
3596 .unindent(),
3597 },
3598 // edit inside second function
3599 lsp::TextEdit {
3600 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3601 new_text: "00".into(),
3602 },
3603 // edit inside third function via two distinct edits
3604 lsp::TextEdit {
3605 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3606 new_text: "4000".into(),
3607 },
3608 lsp::TextEdit {
3609 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3610 new_text: "".into(),
3611 },
3612 ],
3613 LanguageServerId(0),
3614 Some(lsp_document_version),
3615 cx,
3616 )
3617 })
3618 .await
3619 .unwrap();
3620
3621 buffer.update(cx, |buffer, cx| {
3622 for (range, new_text) in edits {
3623 buffer.edit([(range, new_text)], None, cx);
3624 }
3625 assert_eq!(
3626 buffer.text(),
3627 "
3628 // above first function
3629 fn a() {
3630 // inside first function
3631 f10();
3632 }
3633 fn b() {
3634 // inside second function f200();
3635 }
3636 fn c() {
3637 f4000();
3638 }
3639 "
3640 .unindent()
3641 );
3642 });
3643}
3644
3645#[gpui::test]
3646async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3647 init_test(cx);
3648
3649 let text = "
3650 use a::b;
3651 use a::c;
3652
3653 fn f() {
3654 b();
3655 c();
3656 }
3657 "
3658 .unindent();
3659
3660 let fs = FakeFs::new(cx.executor());
3661 fs.insert_tree(
3662 path!("/dir"),
3663 json!({
3664 "a.rs": text.clone(),
3665 }),
3666 )
3667 .await;
3668
3669 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3670 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3671 let buffer = project
3672 .update(cx, |project, cx| {
3673 project.open_local_buffer(path!("/dir/a.rs"), cx)
3674 })
3675 .await
3676 .unwrap();
3677
3678 // Simulate the language server sending us a small edit in the form of a very large diff.
3679 // Rust-analyzer does this when performing a merge-imports code action.
3680 let edits = lsp_store
3681 .update(cx, |lsp_store, cx| {
3682 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3683 &buffer,
3684 [
3685 // Replace the first use statement without editing the semicolon.
3686 lsp::TextEdit {
3687 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3688 new_text: "a::{b, c}".into(),
3689 },
3690 // Reinsert the remainder of the file between the semicolon and the final
3691 // newline of the file.
3692 lsp::TextEdit {
3693 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3694 new_text: "\n\n".into(),
3695 },
3696 lsp::TextEdit {
3697 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3698 new_text: "
3699 fn f() {
3700 b();
3701 c();
3702 }"
3703 .unindent(),
3704 },
3705 // Delete everything after the first newline of the file.
3706 lsp::TextEdit {
3707 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3708 new_text: "".into(),
3709 },
3710 ],
3711 LanguageServerId(0),
3712 None,
3713 cx,
3714 )
3715 })
3716 .await
3717 .unwrap();
3718
3719 buffer.update(cx, |buffer, cx| {
3720 let edits = edits
3721 .into_iter()
3722 .map(|(range, text)| {
3723 (
3724 range.start.to_point(buffer)..range.end.to_point(buffer),
3725 text,
3726 )
3727 })
3728 .collect::<Vec<_>>();
3729
3730 assert_eq!(
3731 edits,
3732 [
3733 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3734 (Point::new(1, 0)..Point::new(2, 0), "".into())
3735 ]
3736 );
3737
3738 for (range, new_text) in edits {
3739 buffer.edit([(range, new_text)], None, cx);
3740 }
3741 assert_eq!(
3742 buffer.text(),
3743 "
3744 use a::{b, c};
3745
3746 fn f() {
3747 b();
3748 c();
3749 }
3750 "
3751 .unindent()
3752 );
3753 });
3754}
3755
3756#[gpui::test]
3757async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3758 cx: &mut gpui::TestAppContext,
3759) {
3760 init_test(cx);
3761
3762 let text = "Path()";
3763
3764 let fs = FakeFs::new(cx.executor());
3765 fs.insert_tree(
3766 path!("/dir"),
3767 json!({
3768 "a.rs": text
3769 }),
3770 )
3771 .await;
3772
3773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3774 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3775 let buffer = project
3776 .update(cx, |project, cx| {
3777 project.open_local_buffer(path!("/dir/a.rs"), cx)
3778 })
3779 .await
3780 .unwrap();
3781
3782 // Simulate the language server sending us a pair of edits at the same location,
3783 // with an insertion following a replacement (which violates the LSP spec).
3784 let edits = lsp_store
3785 .update(cx, |lsp_store, cx| {
3786 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3787 &buffer,
3788 [
3789 lsp::TextEdit {
3790 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3791 new_text: "Path".into(),
3792 },
3793 lsp::TextEdit {
3794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3795 new_text: "from path import Path\n\n\n".into(),
3796 },
3797 ],
3798 LanguageServerId(0),
3799 None,
3800 cx,
3801 )
3802 })
3803 .await
3804 .unwrap();
3805
3806 buffer.update(cx, |buffer, cx| {
3807 buffer.edit(edits, None, cx);
3808 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3809 });
3810}
3811
3812#[gpui::test]
3813async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3814 init_test(cx);
3815
3816 let text = "
3817 use a::b;
3818 use a::c;
3819
3820 fn f() {
3821 b();
3822 c();
3823 }
3824 "
3825 .unindent();
3826
3827 let fs = FakeFs::new(cx.executor());
3828 fs.insert_tree(
3829 path!("/dir"),
3830 json!({
3831 "a.rs": text.clone(),
3832 }),
3833 )
3834 .await;
3835
3836 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3837 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3838 let buffer = project
3839 .update(cx, |project, cx| {
3840 project.open_local_buffer(path!("/dir/a.rs"), cx)
3841 })
3842 .await
3843 .unwrap();
3844
3845 // Simulate the language server sending us edits in a non-ordered fashion,
3846 // with ranges sometimes being inverted or pointing to invalid locations.
3847 let edits = lsp_store
3848 .update(cx, |lsp_store, cx| {
3849 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3850 &buffer,
3851 [
3852 lsp::TextEdit {
3853 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3854 new_text: "\n\n".into(),
3855 },
3856 lsp::TextEdit {
3857 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3858 new_text: "a::{b, c}".into(),
3859 },
3860 lsp::TextEdit {
3861 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3862 new_text: "".into(),
3863 },
3864 lsp::TextEdit {
3865 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3866 new_text: "
3867 fn f() {
3868 b();
3869 c();
3870 }"
3871 .unindent(),
3872 },
3873 ],
3874 LanguageServerId(0),
3875 None,
3876 cx,
3877 )
3878 })
3879 .await
3880 .unwrap();
3881
3882 buffer.update(cx, |buffer, cx| {
3883 let edits = edits
3884 .into_iter()
3885 .map(|(range, text)| {
3886 (
3887 range.start.to_point(buffer)..range.end.to_point(buffer),
3888 text,
3889 )
3890 })
3891 .collect::<Vec<_>>();
3892
3893 assert_eq!(
3894 edits,
3895 [
3896 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3897 (Point::new(1, 0)..Point::new(2, 0), "".into())
3898 ]
3899 );
3900
3901 for (range, new_text) in edits {
3902 buffer.edit([(range, new_text)], None, cx);
3903 }
3904 assert_eq!(
3905 buffer.text(),
3906 "
3907 use a::{b, c};
3908
3909 fn f() {
3910 b();
3911 c();
3912 }
3913 "
3914 .unindent()
3915 );
3916 });
3917}
3918
3919fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3920 buffer: &Buffer,
3921 range: Range<T>,
3922) -> Vec<(String, Option<DiagnosticSeverity>)> {
3923 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3924 for chunk in buffer.snapshot().chunks(range, true) {
3925 if chunks
3926 .last()
3927 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3928 {
3929 chunks.last_mut().unwrap().0.push_str(chunk.text);
3930 } else {
3931 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3932 }
3933 }
3934 chunks
3935}
3936
3937#[gpui::test(iterations = 10)]
3938async fn test_definition(cx: &mut gpui::TestAppContext) {
3939 init_test(cx);
3940
3941 let fs = FakeFs::new(cx.executor());
3942 fs.insert_tree(
3943 path!("/dir"),
3944 json!({
3945 "a.rs": "const fn a() { A }",
3946 "b.rs": "const y: i32 = crate::a()",
3947 }),
3948 )
3949 .await;
3950
3951 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3952
3953 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3954 language_registry.add(rust_lang());
3955 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3956
3957 let (buffer, _handle) = project
3958 .update(cx, |project, cx| {
3959 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3960 })
3961 .await
3962 .unwrap();
3963
3964 let fake_server = fake_servers.next().await.unwrap();
3965 cx.executor().run_until_parked();
3966
3967 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3968 let params = params.text_document_position_params;
3969 assert_eq!(
3970 params.text_document.uri.to_file_path().unwrap(),
3971 Path::new(path!("/dir/b.rs")),
3972 );
3973 assert_eq!(params.position, lsp::Position::new(0, 22));
3974
3975 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3976 lsp::Location::new(
3977 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3978 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3979 ),
3980 )))
3981 });
3982 let mut definitions = project
3983 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3984 .await
3985 .unwrap()
3986 .unwrap();
3987
3988 // Assert no new language server started
3989 cx.executor().run_until_parked();
3990 assert!(fake_servers.try_next().is_err());
3991
3992 assert_eq!(definitions.len(), 1);
3993 let definition = definitions.pop().unwrap();
3994 cx.update(|cx| {
3995 let target_buffer = definition.target.buffer.read(cx);
3996 assert_eq!(
3997 target_buffer
3998 .file()
3999 .unwrap()
4000 .as_local()
4001 .unwrap()
4002 .abs_path(cx),
4003 Path::new(path!("/dir/a.rs")),
4004 );
4005 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4006 assert_eq!(
4007 list_worktrees(&project, cx),
4008 [
4009 (path!("/dir/a.rs").as_ref(), false),
4010 (path!("/dir/b.rs").as_ref(), true)
4011 ],
4012 );
4013
4014 drop(definition);
4015 });
4016 cx.update(|cx| {
4017 assert_eq!(
4018 list_worktrees(&project, cx),
4019 [(path!("/dir/b.rs").as_ref(), true)]
4020 );
4021 });
4022
4023 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4024 project
4025 .read(cx)
4026 .worktrees(cx)
4027 .map(|worktree| {
4028 let worktree = worktree.read(cx);
4029 (
4030 worktree.as_local().unwrap().abs_path().as_ref(),
4031 worktree.is_visible(),
4032 )
4033 })
4034 .collect::<Vec<_>>()
4035 }
4036}
4037
4038#[gpui::test]
4039async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4040 init_test(cx);
4041
4042 let fs = FakeFs::new(cx.executor());
4043 fs.insert_tree(
4044 path!("/dir"),
4045 json!({
4046 "a.ts": "",
4047 }),
4048 )
4049 .await;
4050
4051 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4052
4053 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4054 language_registry.add(typescript_lang());
4055 let mut fake_language_servers = language_registry.register_fake_lsp(
4056 "TypeScript",
4057 FakeLspAdapter {
4058 capabilities: lsp::ServerCapabilities {
4059 completion_provider: Some(lsp::CompletionOptions {
4060 trigger_characters: Some(vec![".".to_string()]),
4061 ..Default::default()
4062 }),
4063 ..Default::default()
4064 },
4065 ..Default::default()
4066 },
4067 );
4068
4069 let (buffer, _handle) = project
4070 .update(cx, |p, cx| {
4071 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4072 })
4073 .await
4074 .unwrap();
4075
4076 let fake_server = fake_language_servers.next().await.unwrap();
4077 cx.executor().run_until_parked();
4078
4079 // When text_edit exists, it takes precedence over insert_text and label
4080 let text = "let a = obj.fqn";
4081 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4082 let completions = project.update(cx, |project, cx| {
4083 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4084 });
4085
4086 fake_server
4087 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4088 Ok(Some(lsp::CompletionResponse::Array(vec![
4089 lsp::CompletionItem {
4090 label: "labelText".into(),
4091 insert_text: Some("insertText".into()),
4092 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4093 range: lsp::Range::new(
4094 lsp::Position::new(0, text.len() as u32 - 3),
4095 lsp::Position::new(0, text.len() as u32),
4096 ),
4097 new_text: "textEditText".into(),
4098 })),
4099 ..Default::default()
4100 },
4101 ])))
4102 })
4103 .next()
4104 .await;
4105
4106 let completions = completions
4107 .await
4108 .unwrap()
4109 .into_iter()
4110 .flat_map(|response| response.completions)
4111 .collect::<Vec<_>>();
4112 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4113
4114 assert_eq!(completions.len(), 1);
4115 assert_eq!(completions[0].new_text, "textEditText");
4116 assert_eq!(
4117 completions[0].replace_range.to_offset(&snapshot),
4118 text.len() - 3..text.len()
4119 );
4120}
4121
4122#[gpui::test]
4123async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4124 init_test(cx);
4125
4126 let fs = FakeFs::new(cx.executor());
4127 fs.insert_tree(
4128 path!("/dir"),
4129 json!({
4130 "a.ts": "",
4131 }),
4132 )
4133 .await;
4134
4135 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4136
4137 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4138 language_registry.add(typescript_lang());
4139 let mut fake_language_servers = language_registry.register_fake_lsp(
4140 "TypeScript",
4141 FakeLspAdapter {
4142 capabilities: lsp::ServerCapabilities {
4143 completion_provider: Some(lsp::CompletionOptions {
4144 trigger_characters: Some(vec![".".to_string()]),
4145 ..Default::default()
4146 }),
4147 ..Default::default()
4148 },
4149 ..Default::default()
4150 },
4151 );
4152
4153 let (buffer, _handle) = project
4154 .update(cx, |p, cx| {
4155 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4156 })
4157 .await
4158 .unwrap();
4159
4160 let fake_server = fake_language_servers.next().await.unwrap();
4161 cx.executor().run_until_parked();
4162 let text = "let a = obj.fqn";
4163
4164 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4165 {
4166 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4167 let completions = project.update(cx, |project, cx| {
4168 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4169 });
4170
4171 fake_server
4172 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4173 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4174 is_incomplete: false,
4175 item_defaults: Some(lsp::CompletionListItemDefaults {
4176 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4177 lsp::Range::new(
4178 lsp::Position::new(0, text.len() as u32 - 3),
4179 lsp::Position::new(0, text.len() as u32),
4180 ),
4181 )),
4182 ..Default::default()
4183 }),
4184 items: vec![lsp::CompletionItem {
4185 label: "labelText".into(),
4186 text_edit_text: Some("textEditText".into()),
4187 text_edit: None,
4188 ..Default::default()
4189 }],
4190 })))
4191 })
4192 .next()
4193 .await;
4194
4195 let completions = completions
4196 .await
4197 .unwrap()
4198 .into_iter()
4199 .flat_map(|response| response.completions)
4200 .collect::<Vec<_>>();
4201 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4202
4203 assert_eq!(completions.len(), 1);
4204 assert_eq!(completions[0].new_text, "textEditText");
4205 assert_eq!(
4206 completions[0].replace_range.to_offset(&snapshot),
4207 text.len() - 3..text.len()
4208 );
4209 }
4210
4211 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4212 {
4213 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4214 let completions = project.update(cx, |project, cx| {
4215 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4216 });
4217
4218 fake_server
4219 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4220 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4221 is_incomplete: false,
4222 item_defaults: Some(lsp::CompletionListItemDefaults {
4223 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4224 lsp::Range::new(
4225 lsp::Position::new(0, text.len() as u32 - 3),
4226 lsp::Position::new(0, text.len() as u32),
4227 ),
4228 )),
4229 ..Default::default()
4230 }),
4231 items: vec![lsp::CompletionItem {
4232 label: "labelText".into(),
4233 text_edit_text: None,
4234 insert_text: Some("irrelevant".into()),
4235 text_edit: None,
4236 ..Default::default()
4237 }],
4238 })))
4239 })
4240 .next()
4241 .await;
4242
4243 let completions = completions
4244 .await
4245 .unwrap()
4246 .into_iter()
4247 .flat_map(|response| response.completions)
4248 .collect::<Vec<_>>();
4249 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4250
4251 assert_eq!(completions.len(), 1);
4252 assert_eq!(completions[0].new_text, "labelText");
4253 assert_eq!(
4254 completions[0].replace_range.to_offset(&snapshot),
4255 text.len() - 3..text.len()
4256 );
4257 }
4258}
4259
4260#[gpui::test]
4261async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4262 init_test(cx);
4263
4264 let fs = FakeFs::new(cx.executor());
4265 fs.insert_tree(
4266 path!("/dir"),
4267 json!({
4268 "a.ts": "",
4269 }),
4270 )
4271 .await;
4272
4273 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4274
4275 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4276 language_registry.add(typescript_lang());
4277 let mut fake_language_servers = language_registry.register_fake_lsp(
4278 "TypeScript",
4279 FakeLspAdapter {
4280 capabilities: lsp::ServerCapabilities {
4281 completion_provider: Some(lsp::CompletionOptions {
4282 trigger_characters: Some(vec![":".to_string()]),
4283 ..Default::default()
4284 }),
4285 ..Default::default()
4286 },
4287 ..Default::default()
4288 },
4289 );
4290
4291 let (buffer, _handle) = project
4292 .update(cx, |p, cx| {
4293 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4294 })
4295 .await
4296 .unwrap();
4297
4298 let fake_server = fake_language_servers.next().await.unwrap();
4299 cx.executor().run_until_parked();
4300
4301 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4302 let text = "let a = b.fqn";
4303 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4304 let completions = project.update(cx, |project, cx| {
4305 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4306 });
4307
4308 fake_server
4309 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4310 Ok(Some(lsp::CompletionResponse::Array(vec![
4311 lsp::CompletionItem {
4312 label: "fullyQualifiedName?".into(),
4313 insert_text: Some("fullyQualifiedName".into()),
4314 ..Default::default()
4315 },
4316 ])))
4317 })
4318 .next()
4319 .await;
4320 let completions = completions
4321 .await
4322 .unwrap()
4323 .into_iter()
4324 .flat_map(|response| response.completions)
4325 .collect::<Vec<_>>();
4326 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4327 assert_eq!(completions.len(), 1);
4328 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4329 assert_eq!(
4330 completions[0].replace_range.to_offset(&snapshot),
4331 text.len() - 3..text.len()
4332 );
4333
4334 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4335 let text = "let a = \"atoms/cmp\"";
4336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4337 let completions = project.update(cx, |project, cx| {
4338 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4339 });
4340
4341 fake_server
4342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4343 Ok(Some(lsp::CompletionResponse::Array(vec![
4344 lsp::CompletionItem {
4345 label: "component".into(),
4346 ..Default::default()
4347 },
4348 ])))
4349 })
4350 .next()
4351 .await;
4352 let completions = completions
4353 .await
4354 .unwrap()
4355 .into_iter()
4356 .flat_map(|response| response.completions)
4357 .collect::<Vec<_>>();
4358 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4359 assert_eq!(completions.len(), 1);
4360 assert_eq!(completions[0].new_text, "component");
4361 assert_eq!(
4362 completions[0].replace_range.to_offset(&snapshot),
4363 text.len() - 4..text.len() - 1
4364 );
4365}
4366
4367#[gpui::test]
4368async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4369 init_test(cx);
4370
4371 let fs = FakeFs::new(cx.executor());
4372 fs.insert_tree(
4373 path!("/dir"),
4374 json!({
4375 "a.ts": "",
4376 }),
4377 )
4378 .await;
4379
4380 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4381
4382 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4383 language_registry.add(typescript_lang());
4384 let mut fake_language_servers = language_registry.register_fake_lsp(
4385 "TypeScript",
4386 FakeLspAdapter {
4387 capabilities: lsp::ServerCapabilities {
4388 completion_provider: Some(lsp::CompletionOptions {
4389 trigger_characters: Some(vec![":".to_string()]),
4390 ..Default::default()
4391 }),
4392 ..Default::default()
4393 },
4394 ..Default::default()
4395 },
4396 );
4397
4398 let (buffer, _handle) = project
4399 .update(cx, |p, cx| {
4400 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4401 })
4402 .await
4403 .unwrap();
4404
4405 let fake_server = fake_language_servers.next().await.unwrap();
4406 cx.executor().run_until_parked();
4407
4408 let text = "let a = b.fqn";
4409 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4410 let completions = project.update(cx, |project, cx| {
4411 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4412 });
4413
4414 fake_server
4415 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4416 Ok(Some(lsp::CompletionResponse::Array(vec![
4417 lsp::CompletionItem {
4418 label: "fullyQualifiedName?".into(),
4419 insert_text: Some("fully\rQualified\r\nName".into()),
4420 ..Default::default()
4421 },
4422 ])))
4423 })
4424 .next()
4425 .await;
4426 let completions = completions
4427 .await
4428 .unwrap()
4429 .into_iter()
4430 .flat_map(|response| response.completions)
4431 .collect::<Vec<_>>();
4432 assert_eq!(completions.len(), 1);
4433 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4434}
4435
4436#[gpui::test(iterations = 10)]
4437async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4438 init_test(cx);
4439
4440 let fs = FakeFs::new(cx.executor());
4441 fs.insert_tree(
4442 path!("/dir"),
4443 json!({
4444 "a.ts": "a",
4445 }),
4446 )
4447 .await;
4448
4449 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4450
4451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4452 language_registry.add(typescript_lang());
4453 let mut fake_language_servers = language_registry.register_fake_lsp(
4454 "TypeScript",
4455 FakeLspAdapter {
4456 capabilities: lsp::ServerCapabilities {
4457 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4458 lsp::CodeActionOptions {
4459 resolve_provider: Some(true),
4460 ..lsp::CodeActionOptions::default()
4461 },
4462 )),
4463 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4464 commands: vec!["_the/command".to_string()],
4465 ..lsp::ExecuteCommandOptions::default()
4466 }),
4467 ..lsp::ServerCapabilities::default()
4468 },
4469 ..FakeLspAdapter::default()
4470 },
4471 );
4472
4473 let (buffer, _handle) = project
4474 .update(cx, |p, cx| {
4475 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4476 })
4477 .await
4478 .unwrap();
4479
4480 let fake_server = fake_language_servers.next().await.unwrap();
4481 cx.executor().run_until_parked();
4482
4483 // Language server returns code actions that contain commands, and not edits.
4484 let actions = project.update(cx, |project, cx| {
4485 project.code_actions(&buffer, 0..0, None, cx)
4486 });
4487 fake_server
4488 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4489 Ok(Some(vec![
4490 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4491 title: "The code action".into(),
4492 data: Some(serde_json::json!({
4493 "command": "_the/command",
4494 })),
4495 ..lsp::CodeAction::default()
4496 }),
4497 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4498 title: "two".into(),
4499 ..lsp::CodeAction::default()
4500 }),
4501 ]))
4502 })
4503 .next()
4504 .await;
4505
4506 let action = actions.await.unwrap().unwrap()[0].clone();
4507 let apply = project.update(cx, |project, cx| {
4508 project.apply_code_action(buffer.clone(), action, true, cx)
4509 });
4510
4511 // Resolving the code action does not populate its edits. In absence of
4512 // edits, we must execute the given command.
4513 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4514 |mut action, _| async move {
4515 if action.data.is_some() {
4516 action.command = Some(lsp::Command {
4517 title: "The command".into(),
4518 command: "_the/command".into(),
4519 arguments: Some(vec![json!("the-argument")]),
4520 });
4521 }
4522 Ok(action)
4523 },
4524 );
4525
4526 // While executing the command, the language server sends the editor
4527 // a `workspaceEdit` request.
4528 fake_server
4529 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4530 let fake = fake_server.clone();
4531 move |params, _| {
4532 assert_eq!(params.command, "_the/command");
4533 let fake = fake.clone();
4534 async move {
4535 fake.server
4536 .request::<lsp::request::ApplyWorkspaceEdit>(
4537 lsp::ApplyWorkspaceEditParams {
4538 label: None,
4539 edit: lsp::WorkspaceEdit {
4540 changes: Some(
4541 [(
4542 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4543 vec![lsp::TextEdit {
4544 range: lsp::Range::new(
4545 lsp::Position::new(0, 0),
4546 lsp::Position::new(0, 0),
4547 ),
4548 new_text: "X".into(),
4549 }],
4550 )]
4551 .into_iter()
4552 .collect(),
4553 ),
4554 ..Default::default()
4555 },
4556 },
4557 )
4558 .await
4559 .into_response()
4560 .unwrap();
4561 Ok(Some(json!(null)))
4562 }
4563 }
4564 })
4565 .next()
4566 .await;
4567
4568 // Applying the code action returns a project transaction containing the edits
4569 // sent by the language server in its `workspaceEdit` request.
4570 let transaction = apply.await.unwrap();
4571 assert!(transaction.0.contains_key(&buffer));
4572 buffer.update(cx, |buffer, cx| {
4573 assert_eq!(buffer.text(), "Xa");
4574 buffer.undo(cx);
4575 assert_eq!(buffer.text(), "a");
4576 });
4577}
4578
4579#[gpui::test]
4580async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4581 init_test(cx);
4582 let fs = FakeFs::new(cx.background_executor.clone());
4583 let expected_contents = "content";
4584 fs.as_fake()
4585 .insert_tree(
4586 "/root",
4587 json!({
4588 "test.txt": expected_contents
4589 }),
4590 )
4591 .await;
4592
4593 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4594
4595 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4596 let worktree = project.worktrees(cx).next().unwrap();
4597 let entry_id = worktree
4598 .read(cx)
4599 .entry_for_path(rel_path("test.txt"))
4600 .unwrap()
4601 .id;
4602 (worktree, entry_id)
4603 });
4604 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4605 let _result = project
4606 .update(cx, |project, cx| {
4607 project.rename_entry(
4608 entry_id,
4609 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4610 cx,
4611 )
4612 })
4613 .await
4614 .unwrap();
4615 worktree.read_with(cx, |worktree, _| {
4616 assert!(
4617 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4618 "Old file should have been removed"
4619 );
4620 assert!(
4621 worktree
4622 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4623 .is_some(),
4624 "Whole directory hierarchy and the new file should have been created"
4625 );
4626 });
4627 assert_eq!(
4628 worktree
4629 .update(cx, |worktree, cx| {
4630 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4631 })
4632 .await
4633 .unwrap()
4634 .text,
4635 expected_contents,
4636 "Moved file's contents should be preserved"
4637 );
4638
4639 let entry_id = worktree.read_with(cx, |worktree, _| {
4640 worktree
4641 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4642 .unwrap()
4643 .id
4644 });
4645
4646 let _result = project
4647 .update(cx, |project, cx| {
4648 project.rename_entry(
4649 entry_id,
4650 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4651 cx,
4652 )
4653 })
4654 .await
4655 .unwrap();
4656 worktree.read_with(cx, |worktree, _| {
4657 assert!(
4658 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4659 "First file should not reappear"
4660 );
4661 assert!(
4662 worktree
4663 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4664 .is_none(),
4665 "Old file should have been removed"
4666 );
4667 assert!(
4668 worktree
4669 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4670 .is_some(),
4671 "No error should have occurred after moving into existing directory"
4672 );
4673 });
4674 assert_eq!(
4675 worktree
4676 .update(cx, |worktree, cx| {
4677 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4678 })
4679 .await
4680 .unwrap()
4681 .text,
4682 expected_contents,
4683 "Moved file's contents should be preserved"
4684 );
4685}
4686
4687#[gpui::test(iterations = 10)]
4688async fn test_save_file(cx: &mut gpui::TestAppContext) {
4689 init_test(cx);
4690
4691 let fs = FakeFs::new(cx.executor());
4692 fs.insert_tree(
4693 path!("/dir"),
4694 json!({
4695 "file1": "the old contents",
4696 }),
4697 )
4698 .await;
4699
4700 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4701 let buffer = project
4702 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4703 .await
4704 .unwrap();
4705 buffer.update(cx, |buffer, cx| {
4706 assert_eq!(buffer.text(), "the old contents");
4707 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4708 });
4709
4710 project
4711 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4712 .await
4713 .unwrap();
4714
4715 let new_text = fs
4716 .load(Path::new(path!("/dir/file1")))
4717 .await
4718 .unwrap()
4719 .replace("\r\n", "\n");
4720 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4721}
4722
4723#[gpui::test(iterations = 10)]
4724async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4725 // Issue: #24349
4726 init_test(cx);
4727
4728 let fs = FakeFs::new(cx.executor());
4729 fs.insert_tree(path!("/dir"), json!({})).await;
4730
4731 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4732 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4733
4734 language_registry.add(rust_lang());
4735 let mut fake_rust_servers = language_registry.register_fake_lsp(
4736 "Rust",
4737 FakeLspAdapter {
4738 name: "the-rust-language-server",
4739 capabilities: lsp::ServerCapabilities {
4740 completion_provider: Some(lsp::CompletionOptions {
4741 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4742 ..Default::default()
4743 }),
4744 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4745 lsp::TextDocumentSyncOptions {
4746 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4747 ..Default::default()
4748 },
4749 )),
4750 ..Default::default()
4751 },
4752 ..Default::default()
4753 },
4754 );
4755
4756 let buffer = project
4757 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4758 .unwrap()
4759 .await;
4760 project.update(cx, |this, cx| {
4761 this.register_buffer_with_language_servers(&buffer, cx);
4762 buffer.update(cx, |buffer, cx| {
4763 assert!(!this.has_language_servers_for(buffer, cx));
4764 })
4765 });
4766
4767 project
4768 .update(cx, |this, cx| {
4769 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4770 this.save_buffer_as(
4771 buffer.clone(),
4772 ProjectPath {
4773 worktree_id,
4774 path: rel_path("file.rs").into(),
4775 },
4776 cx,
4777 )
4778 })
4779 .await
4780 .unwrap();
4781 // A server is started up, and it is notified about Rust files.
4782 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4783 assert_eq!(
4784 fake_rust_server
4785 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4786 .await
4787 .text_document,
4788 lsp::TextDocumentItem {
4789 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4790 version: 0,
4791 text: "".to_string(),
4792 language_id: "rust".to_string(),
4793 }
4794 );
4795
4796 project.update(cx, |this, cx| {
4797 buffer.update(cx, |buffer, cx| {
4798 assert!(this.has_language_servers_for(buffer, cx));
4799 })
4800 });
4801}
4802
4803#[gpui::test(iterations = 30)]
4804async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4805 init_test(cx);
4806
4807 let fs = FakeFs::new(cx.executor());
4808 fs.insert_tree(
4809 path!("/dir"),
4810 json!({
4811 "file1": "the original contents",
4812 }),
4813 )
4814 .await;
4815
4816 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4817 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4818 let buffer = project
4819 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4820 .await
4821 .unwrap();
4822
4823 // Change the buffer's file on disk, and then wait for the file change
4824 // to be detected by the worktree, so that the buffer starts reloading.
4825 fs.save(
4826 path!("/dir/file1").as_ref(),
4827 &"the first contents".into(),
4828 Default::default(),
4829 )
4830 .await
4831 .unwrap();
4832 worktree.next_event(cx).await;
4833
4834 // Change the buffer's file again. Depending on the random seed, the
4835 // previous file change may still be in progress.
4836 fs.save(
4837 path!("/dir/file1").as_ref(),
4838 &"the second contents".into(),
4839 Default::default(),
4840 )
4841 .await
4842 .unwrap();
4843 worktree.next_event(cx).await;
4844
4845 cx.executor().run_until_parked();
4846 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4847 buffer.read_with(cx, |buffer, _| {
4848 assert_eq!(buffer.text(), on_disk_text);
4849 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4850 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4851 });
4852}
4853
4854#[gpui::test(iterations = 30)]
4855async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4856 init_test(cx);
4857
4858 let fs = FakeFs::new(cx.executor());
4859 fs.insert_tree(
4860 path!("/dir"),
4861 json!({
4862 "file1": "the original contents",
4863 }),
4864 )
4865 .await;
4866
4867 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4868 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4869 let buffer = project
4870 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4871 .await
4872 .unwrap();
4873
4874 // Change the buffer's file on disk, and then wait for the file change
4875 // to be detected by the worktree, so that the buffer starts reloading.
4876 fs.save(
4877 path!("/dir/file1").as_ref(),
4878 &"the first contents".into(),
4879 Default::default(),
4880 )
4881 .await
4882 .unwrap();
4883 worktree.next_event(cx).await;
4884
4885 cx.executor()
4886 .spawn(cx.executor().simulate_random_delay())
4887 .await;
4888
4889 // Perform a noop edit, causing the buffer's version to increase.
4890 buffer.update(cx, |buffer, cx| {
4891 buffer.edit([(0..0, " ")], None, cx);
4892 buffer.undo(cx);
4893 });
4894
4895 cx.executor().run_until_parked();
4896 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4897 buffer.read_with(cx, |buffer, _| {
4898 let buffer_text = buffer.text();
4899 if buffer_text == on_disk_text {
4900 assert!(
4901 !buffer.is_dirty() && !buffer.has_conflict(),
4902 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4903 );
4904 }
4905 // If the file change occurred while the buffer was processing the first
4906 // change, the buffer will be in a conflicting state.
4907 else {
4908 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4909 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4910 }
4911 });
4912}
4913
4914#[gpui::test]
4915async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4916 init_test(cx);
4917
4918 let fs = FakeFs::new(cx.executor());
4919 fs.insert_tree(
4920 path!("/dir"),
4921 json!({
4922 "file1": "the old contents",
4923 }),
4924 )
4925 .await;
4926
4927 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4928 let buffer = project
4929 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4930 .await
4931 .unwrap();
4932 buffer.update(cx, |buffer, cx| {
4933 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4934 });
4935
4936 project
4937 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4938 .await
4939 .unwrap();
4940
4941 let new_text = fs
4942 .load(Path::new(path!("/dir/file1")))
4943 .await
4944 .unwrap()
4945 .replace("\r\n", "\n");
4946 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4947}
4948
4949#[gpui::test]
4950async fn test_save_as(cx: &mut gpui::TestAppContext) {
4951 init_test(cx);
4952
4953 let fs = FakeFs::new(cx.executor());
4954 fs.insert_tree("/dir", json!({})).await;
4955
4956 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4957
4958 let languages = project.update(cx, |project, _| project.languages().clone());
4959 languages.add(rust_lang());
4960
4961 let buffer = project.update(cx, |project, cx| {
4962 project.create_local_buffer("", None, false, cx)
4963 });
4964 buffer.update(cx, |buffer, cx| {
4965 buffer.edit([(0..0, "abc")], None, cx);
4966 assert!(buffer.is_dirty());
4967 assert!(!buffer.has_conflict());
4968 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4969 });
4970 project
4971 .update(cx, |project, cx| {
4972 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4973 let path = ProjectPath {
4974 worktree_id,
4975 path: rel_path("file1.rs").into(),
4976 };
4977 project.save_buffer_as(buffer.clone(), path, cx)
4978 })
4979 .await
4980 .unwrap();
4981 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4982
4983 cx.executor().run_until_parked();
4984 buffer.update(cx, |buffer, cx| {
4985 assert_eq!(
4986 buffer.file().unwrap().full_path(cx),
4987 Path::new("dir/file1.rs")
4988 );
4989 assert!(!buffer.is_dirty());
4990 assert!(!buffer.has_conflict());
4991 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4992 });
4993
4994 let opened_buffer = project
4995 .update(cx, |project, cx| {
4996 project.open_local_buffer("/dir/file1.rs", cx)
4997 })
4998 .await
4999 .unwrap();
5000 assert_eq!(opened_buffer, buffer);
5001}
5002
5003#[gpui::test]
5004async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5005 init_test(cx);
5006
5007 let fs = FakeFs::new(cx.executor());
5008 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5009
5010 fs.insert_tree(
5011 path!("/dir"),
5012 json!({
5013 "data_a.txt": "data about a"
5014 }),
5015 )
5016 .await;
5017
5018 let buffer = project
5019 .update(cx, |project, cx| {
5020 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5021 })
5022 .await
5023 .unwrap();
5024
5025 buffer.update(cx, |buffer, cx| {
5026 buffer.edit([(11..12, "b")], None, cx);
5027 });
5028
5029 // Save buffer's contents as a new file and confirm that the buffer's now
5030 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5031 // file associated with the buffer has now been updated to `data_b.txt`
5032 project
5033 .update(cx, |project, cx| {
5034 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5035 let new_path = ProjectPath {
5036 worktree_id,
5037 path: rel_path("data_b.txt").into(),
5038 };
5039
5040 project.save_buffer_as(buffer.clone(), new_path, cx)
5041 })
5042 .await
5043 .unwrap();
5044
5045 buffer.update(cx, |buffer, cx| {
5046 assert_eq!(
5047 buffer.file().unwrap().full_path(cx),
5048 Path::new("dir/data_b.txt")
5049 )
5050 });
5051
5052 // Open the original `data_a.txt` file, confirming that its contents are
5053 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5054 let original_buffer = project
5055 .update(cx, |project, cx| {
5056 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5057 })
5058 .await
5059 .unwrap();
5060
5061 original_buffer.update(cx, |buffer, cx| {
5062 assert_eq!(buffer.text(), "data about a");
5063 assert_eq!(
5064 buffer.file().unwrap().full_path(cx),
5065 Path::new("dir/data_a.txt")
5066 )
5067 });
5068}
5069
5070#[gpui::test(retries = 5)]
5071async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5072 use worktree::WorktreeModelHandle as _;
5073
5074 init_test(cx);
5075 cx.executor().allow_parking();
5076
5077 let dir = TempTree::new(json!({
5078 "a": {
5079 "file1": "",
5080 "file2": "",
5081 "file3": "",
5082 },
5083 "b": {
5084 "c": {
5085 "file4": "",
5086 "file5": "",
5087 }
5088 }
5089 }));
5090
5091 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5092
5093 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5094 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5095 async move { buffer.await.unwrap() }
5096 };
5097 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5098 project.update(cx, |project, cx| {
5099 let tree = project.worktrees(cx).next().unwrap();
5100 tree.read(cx)
5101 .entry_for_path(rel_path(path))
5102 .unwrap_or_else(|| panic!("no entry for path {}", path))
5103 .id
5104 })
5105 };
5106
5107 let buffer2 = buffer_for_path("a/file2", cx).await;
5108 let buffer3 = buffer_for_path("a/file3", cx).await;
5109 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5110 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5111
5112 let file2_id = id_for_path("a/file2", cx);
5113 let file3_id = id_for_path("a/file3", cx);
5114 let file4_id = id_for_path("b/c/file4", cx);
5115
5116 // Create a remote copy of this worktree.
5117 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5118 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5119
5120 let updates = Arc::new(Mutex::new(Vec::new()));
5121 tree.update(cx, |tree, cx| {
5122 let updates = updates.clone();
5123 tree.observe_updates(0, cx, move |update| {
5124 updates.lock().push(update);
5125 async { true }
5126 });
5127 });
5128
5129 let remote = cx.update(|cx| {
5130 Worktree::remote(
5131 0,
5132 ReplicaId::REMOTE_SERVER,
5133 metadata,
5134 project.read(cx).client().into(),
5135 project.read(cx).path_style(cx),
5136 cx,
5137 )
5138 });
5139
5140 cx.executor().run_until_parked();
5141
5142 cx.update(|cx| {
5143 assert!(!buffer2.read(cx).is_dirty());
5144 assert!(!buffer3.read(cx).is_dirty());
5145 assert!(!buffer4.read(cx).is_dirty());
5146 assert!(!buffer5.read(cx).is_dirty());
5147 });
5148
5149 // Rename and delete files and directories.
5150 tree.flush_fs_events(cx).await;
5151 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5152 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5153 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5154 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5155 tree.flush_fs_events(cx).await;
5156
5157 cx.update(|app| {
5158 assert_eq!(
5159 tree.read(app).paths().collect::<Vec<_>>(),
5160 vec![
5161 rel_path("a"),
5162 rel_path("a/file1"),
5163 rel_path("a/file2.new"),
5164 rel_path("b"),
5165 rel_path("d"),
5166 rel_path("d/file3"),
5167 rel_path("d/file4"),
5168 ]
5169 );
5170 });
5171
5172 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5173 assert_eq!(id_for_path("d/file3", cx), file3_id);
5174 assert_eq!(id_for_path("d/file4", cx), file4_id);
5175
5176 cx.update(|cx| {
5177 assert_eq!(
5178 buffer2.read(cx).file().unwrap().path().as_ref(),
5179 rel_path("a/file2.new")
5180 );
5181 assert_eq!(
5182 buffer3.read(cx).file().unwrap().path().as_ref(),
5183 rel_path("d/file3")
5184 );
5185 assert_eq!(
5186 buffer4.read(cx).file().unwrap().path().as_ref(),
5187 rel_path("d/file4")
5188 );
5189 assert_eq!(
5190 buffer5.read(cx).file().unwrap().path().as_ref(),
5191 rel_path("b/c/file5")
5192 );
5193
5194 assert_matches!(
5195 buffer2.read(cx).file().unwrap().disk_state(),
5196 DiskState::Present { .. }
5197 );
5198 assert_matches!(
5199 buffer3.read(cx).file().unwrap().disk_state(),
5200 DiskState::Present { .. }
5201 );
5202 assert_matches!(
5203 buffer4.read(cx).file().unwrap().disk_state(),
5204 DiskState::Present { .. }
5205 );
5206 assert_eq!(
5207 buffer5.read(cx).file().unwrap().disk_state(),
5208 DiskState::Deleted
5209 );
5210 });
5211
5212 // Update the remote worktree. Check that it becomes consistent with the
5213 // local worktree.
5214 cx.executor().run_until_parked();
5215
5216 remote.update(cx, |remote, _| {
5217 for update in updates.lock().drain(..) {
5218 remote.as_remote_mut().unwrap().update_from_remote(update);
5219 }
5220 });
5221 cx.executor().run_until_parked();
5222 remote.update(cx, |remote, _| {
5223 assert_eq!(
5224 remote.paths().collect::<Vec<_>>(),
5225 vec![
5226 rel_path("a"),
5227 rel_path("a/file1"),
5228 rel_path("a/file2.new"),
5229 rel_path("b"),
5230 rel_path("d"),
5231 rel_path("d/file3"),
5232 rel_path("d/file4"),
5233 ]
5234 );
5235 });
5236}
5237
5238#[gpui::test(iterations = 10)]
5239async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5240 init_test(cx);
5241
5242 let fs = FakeFs::new(cx.executor());
5243 fs.insert_tree(
5244 path!("/dir"),
5245 json!({
5246 "a": {
5247 "file1": "",
5248 }
5249 }),
5250 )
5251 .await;
5252
5253 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5254 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5255 let tree_id = tree.update(cx, |tree, _| tree.id());
5256
5257 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5258 project.update(cx, |project, cx| {
5259 let tree = project.worktrees(cx).next().unwrap();
5260 tree.read(cx)
5261 .entry_for_path(rel_path(path))
5262 .unwrap_or_else(|| panic!("no entry for path {}", path))
5263 .id
5264 })
5265 };
5266
5267 let dir_id = id_for_path("a", cx);
5268 let file_id = id_for_path("a/file1", cx);
5269 let buffer = project
5270 .update(cx, |p, cx| {
5271 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5272 })
5273 .await
5274 .unwrap();
5275 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5276
5277 project
5278 .update(cx, |project, cx| {
5279 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5280 })
5281 .unwrap()
5282 .await
5283 .into_included()
5284 .unwrap();
5285 cx.executor().run_until_parked();
5286
5287 assert_eq!(id_for_path("b", cx), dir_id);
5288 assert_eq!(id_for_path("b/file1", cx), file_id);
5289 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5290}
5291
5292#[gpui::test]
5293async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5294 init_test(cx);
5295
5296 let fs = FakeFs::new(cx.executor());
5297 fs.insert_tree(
5298 "/dir",
5299 json!({
5300 "a.txt": "a-contents",
5301 "b.txt": "b-contents",
5302 }),
5303 )
5304 .await;
5305
5306 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5307
5308 // Spawn multiple tasks to open paths, repeating some paths.
5309 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5310 (
5311 p.open_local_buffer("/dir/a.txt", cx),
5312 p.open_local_buffer("/dir/b.txt", cx),
5313 p.open_local_buffer("/dir/a.txt", cx),
5314 )
5315 });
5316
5317 let buffer_a_1 = buffer_a_1.await.unwrap();
5318 let buffer_a_2 = buffer_a_2.await.unwrap();
5319 let buffer_b = buffer_b.await.unwrap();
5320 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5321 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5322
5323 // There is only one buffer per path.
5324 let buffer_a_id = buffer_a_1.entity_id();
5325 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5326
5327 // Open the same path again while it is still open.
5328 drop(buffer_a_1);
5329 let buffer_a_3 = project
5330 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5331 .await
5332 .unwrap();
5333
5334 // There's still only one buffer per path.
5335 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5336}
5337
5338#[gpui::test]
5339async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5340 init_test(cx);
5341
5342 let fs = FakeFs::new(cx.executor());
5343 fs.insert_tree(
5344 path!("/dir"),
5345 json!({
5346 "file1": "abc",
5347 "file2": "def",
5348 "file3": "ghi",
5349 }),
5350 )
5351 .await;
5352
5353 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5354
5355 let buffer1 = project
5356 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5357 .await
5358 .unwrap();
5359 let events = Arc::new(Mutex::new(Vec::new()));
5360
5361 // initially, the buffer isn't dirty.
5362 buffer1.update(cx, |buffer, cx| {
5363 cx.subscribe(&buffer1, {
5364 let events = events.clone();
5365 move |_, _, event, _| match event {
5366 BufferEvent::Operation { .. } => {}
5367 _ => events.lock().push(event.clone()),
5368 }
5369 })
5370 .detach();
5371
5372 assert!(!buffer.is_dirty());
5373 assert!(events.lock().is_empty());
5374
5375 buffer.edit([(1..2, "")], None, cx);
5376 });
5377
5378 // after the first edit, the buffer is dirty, and emits a dirtied event.
5379 buffer1.update(cx, |buffer, cx| {
5380 assert!(buffer.text() == "ac");
5381 assert!(buffer.is_dirty());
5382 assert_eq!(
5383 *events.lock(),
5384 &[
5385 language::BufferEvent::Edited,
5386 language::BufferEvent::DirtyChanged
5387 ]
5388 );
5389 events.lock().clear();
5390 buffer.did_save(
5391 buffer.version(),
5392 buffer.file().unwrap().disk_state().mtime(),
5393 cx,
5394 );
5395 });
5396
5397 // after saving, the buffer is not dirty, and emits a saved event.
5398 buffer1.update(cx, |buffer, cx| {
5399 assert!(!buffer.is_dirty());
5400 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5401 events.lock().clear();
5402
5403 buffer.edit([(1..1, "B")], None, cx);
5404 buffer.edit([(2..2, "D")], None, cx);
5405 });
5406
5407 // after editing again, the buffer is dirty, and emits another dirty event.
5408 buffer1.update(cx, |buffer, cx| {
5409 assert!(buffer.text() == "aBDc");
5410 assert!(buffer.is_dirty());
5411 assert_eq!(
5412 *events.lock(),
5413 &[
5414 language::BufferEvent::Edited,
5415 language::BufferEvent::DirtyChanged,
5416 language::BufferEvent::Edited,
5417 ],
5418 );
5419 events.lock().clear();
5420
5421 // After restoring the buffer to its previously-saved state,
5422 // the buffer is not considered dirty anymore.
5423 buffer.edit([(1..3, "")], None, cx);
5424 assert!(buffer.text() == "ac");
5425 assert!(!buffer.is_dirty());
5426 });
5427
5428 assert_eq!(
5429 *events.lock(),
5430 &[
5431 language::BufferEvent::Edited,
5432 language::BufferEvent::DirtyChanged
5433 ]
5434 );
5435
5436 // When a file is deleted, it is not considered dirty.
5437 let events = Arc::new(Mutex::new(Vec::new()));
5438 let buffer2 = project
5439 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5440 .await
5441 .unwrap();
5442 buffer2.update(cx, |_, cx| {
5443 cx.subscribe(&buffer2, {
5444 let events = events.clone();
5445 move |_, _, event, _| match event {
5446 BufferEvent::Operation { .. } => {}
5447 _ => events.lock().push(event.clone()),
5448 }
5449 })
5450 .detach();
5451 });
5452
5453 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5454 .await
5455 .unwrap();
5456 cx.executor().run_until_parked();
5457 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5458 assert_eq!(
5459 mem::take(&mut *events.lock()),
5460 &[language::BufferEvent::FileHandleChanged]
5461 );
5462
5463 // Buffer becomes dirty when edited.
5464 buffer2.update(cx, |buffer, cx| {
5465 buffer.edit([(2..3, "")], None, cx);
5466 assert_eq!(buffer.is_dirty(), true);
5467 });
5468 assert_eq!(
5469 mem::take(&mut *events.lock()),
5470 &[
5471 language::BufferEvent::Edited,
5472 language::BufferEvent::DirtyChanged
5473 ]
5474 );
5475
5476 // Buffer becomes clean again when all of its content is removed, because
5477 // the file was deleted.
5478 buffer2.update(cx, |buffer, cx| {
5479 buffer.edit([(0..2, "")], None, cx);
5480 assert_eq!(buffer.is_empty(), true);
5481 assert_eq!(buffer.is_dirty(), false);
5482 });
5483 assert_eq!(
5484 *events.lock(),
5485 &[
5486 language::BufferEvent::Edited,
5487 language::BufferEvent::DirtyChanged
5488 ]
5489 );
5490
5491 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5492 let events = Arc::new(Mutex::new(Vec::new()));
5493 let buffer3 = project
5494 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5495 .await
5496 .unwrap();
5497 buffer3.update(cx, |_, cx| {
5498 cx.subscribe(&buffer3, {
5499 let events = events.clone();
5500 move |_, _, event, _| match event {
5501 BufferEvent::Operation { .. } => {}
5502 _ => events.lock().push(event.clone()),
5503 }
5504 })
5505 .detach();
5506 });
5507
5508 buffer3.update(cx, |buffer, cx| {
5509 buffer.edit([(0..0, "x")], None, cx);
5510 });
5511 events.lock().clear();
5512 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5513 .await
5514 .unwrap();
5515 cx.executor().run_until_parked();
5516 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5517 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5518}
5519
5520#[gpui::test]
5521async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5522 init_test(cx);
5523
5524 let (initial_contents, initial_offsets) =
5525 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5526 let fs = FakeFs::new(cx.executor());
5527 fs.insert_tree(
5528 path!("/dir"),
5529 json!({
5530 "the-file": initial_contents,
5531 }),
5532 )
5533 .await;
5534 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5535 let buffer = project
5536 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5537 .await
5538 .unwrap();
5539
5540 let anchors = initial_offsets
5541 .iter()
5542 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5543 .collect::<Vec<_>>();
5544
5545 // Change the file on disk, adding two new lines of text, and removing
5546 // one line.
5547 buffer.update(cx, |buffer, _| {
5548 assert!(!buffer.is_dirty());
5549 assert!(!buffer.has_conflict());
5550 });
5551
5552 let (new_contents, new_offsets) =
5553 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5554 fs.save(
5555 path!("/dir/the-file").as_ref(),
5556 &new_contents.as_str().into(),
5557 LineEnding::Unix,
5558 )
5559 .await
5560 .unwrap();
5561
5562 // Because the buffer was not modified, it is reloaded from disk. Its
5563 // contents are edited according to the diff between the old and new
5564 // file contents.
5565 cx.executor().run_until_parked();
5566 buffer.update(cx, |buffer, _| {
5567 assert_eq!(buffer.text(), new_contents);
5568 assert!(!buffer.is_dirty());
5569 assert!(!buffer.has_conflict());
5570
5571 let anchor_offsets = anchors
5572 .iter()
5573 .map(|anchor| anchor.to_offset(&*buffer))
5574 .collect::<Vec<_>>();
5575 assert_eq!(anchor_offsets, new_offsets);
5576 });
5577
5578 // Modify the buffer
5579 buffer.update(cx, |buffer, cx| {
5580 buffer.edit([(0..0, " ")], None, cx);
5581 assert!(buffer.is_dirty());
5582 assert!(!buffer.has_conflict());
5583 });
5584
5585 // Change the file on disk again, adding blank lines to the beginning.
5586 fs.save(
5587 path!("/dir/the-file").as_ref(),
5588 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5589 LineEnding::Unix,
5590 )
5591 .await
5592 .unwrap();
5593
5594 // Because the buffer is modified, it doesn't reload from disk, but is
5595 // marked as having a conflict.
5596 cx.executor().run_until_parked();
5597 buffer.update(cx, |buffer, _| {
5598 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5599 assert!(buffer.has_conflict());
5600 });
5601}
5602
5603#[gpui::test]
5604async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5605 init_test(cx);
5606
5607 let fs = FakeFs::new(cx.executor());
5608 fs.insert_tree(
5609 path!("/dir"),
5610 json!({
5611 "file1": "a\nb\nc\n",
5612 "file2": "one\r\ntwo\r\nthree\r\n",
5613 }),
5614 )
5615 .await;
5616
5617 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5618 let buffer1 = project
5619 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5620 .await
5621 .unwrap();
5622 let buffer2 = project
5623 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5624 .await
5625 .unwrap();
5626
5627 buffer1.update(cx, |buffer, _| {
5628 assert_eq!(buffer.text(), "a\nb\nc\n");
5629 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5630 });
5631 buffer2.update(cx, |buffer, _| {
5632 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5633 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5634 });
5635
5636 // Change a file's line endings on disk from unix to windows. The buffer's
5637 // state updates correctly.
5638 fs.save(
5639 path!("/dir/file1").as_ref(),
5640 &"aaa\nb\nc\n".into(),
5641 LineEnding::Windows,
5642 )
5643 .await
5644 .unwrap();
5645 cx.executor().run_until_parked();
5646 buffer1.update(cx, |buffer, _| {
5647 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5648 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5649 });
5650
5651 // Save a file with windows line endings. The file is written correctly.
5652 buffer2.update(cx, |buffer, cx| {
5653 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5654 });
5655 project
5656 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5657 .await
5658 .unwrap();
5659 assert_eq!(
5660 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5661 "one\r\ntwo\r\nthree\r\nfour\r\n",
5662 );
5663}
5664
5665#[gpui::test]
5666async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5667 init_test(cx);
5668
5669 let fs = FakeFs::new(cx.executor());
5670 fs.insert_tree(
5671 path!("/dir"),
5672 json!({
5673 "a.rs": "
5674 fn foo(mut v: Vec<usize>) {
5675 for x in &v {
5676 v.push(1);
5677 }
5678 }
5679 "
5680 .unindent(),
5681 }),
5682 )
5683 .await;
5684
5685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5686 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5687 let buffer = project
5688 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5689 .await
5690 .unwrap();
5691
5692 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5693 let message = lsp::PublishDiagnosticsParams {
5694 uri: buffer_uri.clone(),
5695 diagnostics: vec![
5696 lsp::Diagnostic {
5697 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5698 severity: Some(DiagnosticSeverity::WARNING),
5699 message: "error 1".to_string(),
5700 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5701 location: lsp::Location {
5702 uri: buffer_uri.clone(),
5703 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5704 },
5705 message: "error 1 hint 1".to_string(),
5706 }]),
5707 ..Default::default()
5708 },
5709 lsp::Diagnostic {
5710 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5711 severity: Some(DiagnosticSeverity::HINT),
5712 message: "error 1 hint 1".to_string(),
5713 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5714 location: lsp::Location {
5715 uri: buffer_uri.clone(),
5716 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5717 },
5718 message: "original diagnostic".to_string(),
5719 }]),
5720 ..Default::default()
5721 },
5722 lsp::Diagnostic {
5723 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5724 severity: Some(DiagnosticSeverity::ERROR),
5725 message: "error 2".to_string(),
5726 related_information: Some(vec![
5727 lsp::DiagnosticRelatedInformation {
5728 location: lsp::Location {
5729 uri: buffer_uri.clone(),
5730 range: lsp::Range::new(
5731 lsp::Position::new(1, 13),
5732 lsp::Position::new(1, 15),
5733 ),
5734 },
5735 message: "error 2 hint 1".to_string(),
5736 },
5737 lsp::DiagnosticRelatedInformation {
5738 location: lsp::Location {
5739 uri: buffer_uri.clone(),
5740 range: lsp::Range::new(
5741 lsp::Position::new(1, 13),
5742 lsp::Position::new(1, 15),
5743 ),
5744 },
5745 message: "error 2 hint 2".to_string(),
5746 },
5747 ]),
5748 ..Default::default()
5749 },
5750 lsp::Diagnostic {
5751 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5752 severity: Some(DiagnosticSeverity::HINT),
5753 message: "error 2 hint 1".to_string(),
5754 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5755 location: lsp::Location {
5756 uri: buffer_uri.clone(),
5757 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5758 },
5759 message: "original diagnostic".to_string(),
5760 }]),
5761 ..Default::default()
5762 },
5763 lsp::Diagnostic {
5764 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5765 severity: Some(DiagnosticSeverity::HINT),
5766 message: "error 2 hint 2".to_string(),
5767 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5768 location: lsp::Location {
5769 uri: buffer_uri,
5770 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5771 },
5772 message: "original diagnostic".to_string(),
5773 }]),
5774 ..Default::default()
5775 },
5776 ],
5777 version: None,
5778 };
5779
5780 lsp_store
5781 .update(cx, |lsp_store, cx| {
5782 lsp_store.update_diagnostics(
5783 LanguageServerId(0),
5784 message,
5785 None,
5786 DiagnosticSourceKind::Pushed,
5787 &[],
5788 cx,
5789 )
5790 })
5791 .unwrap();
5792 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5793
5794 assert_eq!(
5795 buffer
5796 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5797 .collect::<Vec<_>>(),
5798 &[
5799 DiagnosticEntry {
5800 range: Point::new(1, 8)..Point::new(1, 9),
5801 diagnostic: Diagnostic {
5802 severity: DiagnosticSeverity::WARNING,
5803 message: "error 1".to_string(),
5804 group_id: 1,
5805 is_primary: true,
5806 source_kind: DiagnosticSourceKind::Pushed,
5807 ..Diagnostic::default()
5808 }
5809 },
5810 DiagnosticEntry {
5811 range: Point::new(1, 8)..Point::new(1, 9),
5812 diagnostic: Diagnostic {
5813 severity: DiagnosticSeverity::HINT,
5814 message: "error 1 hint 1".to_string(),
5815 group_id: 1,
5816 is_primary: false,
5817 source_kind: DiagnosticSourceKind::Pushed,
5818 ..Diagnostic::default()
5819 }
5820 },
5821 DiagnosticEntry {
5822 range: Point::new(1, 13)..Point::new(1, 15),
5823 diagnostic: Diagnostic {
5824 severity: DiagnosticSeverity::HINT,
5825 message: "error 2 hint 1".to_string(),
5826 group_id: 0,
5827 is_primary: false,
5828 source_kind: DiagnosticSourceKind::Pushed,
5829 ..Diagnostic::default()
5830 }
5831 },
5832 DiagnosticEntry {
5833 range: Point::new(1, 13)..Point::new(1, 15),
5834 diagnostic: Diagnostic {
5835 severity: DiagnosticSeverity::HINT,
5836 message: "error 2 hint 2".to_string(),
5837 group_id: 0,
5838 is_primary: false,
5839 source_kind: DiagnosticSourceKind::Pushed,
5840 ..Diagnostic::default()
5841 }
5842 },
5843 DiagnosticEntry {
5844 range: Point::new(2, 8)..Point::new(2, 17),
5845 diagnostic: Diagnostic {
5846 severity: DiagnosticSeverity::ERROR,
5847 message: "error 2".to_string(),
5848 group_id: 0,
5849 is_primary: true,
5850 source_kind: DiagnosticSourceKind::Pushed,
5851 ..Diagnostic::default()
5852 }
5853 }
5854 ]
5855 );
5856
5857 assert_eq!(
5858 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5859 &[
5860 DiagnosticEntry {
5861 range: Point::new(1, 13)..Point::new(1, 15),
5862 diagnostic: Diagnostic {
5863 severity: DiagnosticSeverity::HINT,
5864 message: "error 2 hint 1".to_string(),
5865 group_id: 0,
5866 is_primary: false,
5867 source_kind: DiagnosticSourceKind::Pushed,
5868 ..Diagnostic::default()
5869 }
5870 },
5871 DiagnosticEntry {
5872 range: Point::new(1, 13)..Point::new(1, 15),
5873 diagnostic: Diagnostic {
5874 severity: DiagnosticSeverity::HINT,
5875 message: "error 2 hint 2".to_string(),
5876 group_id: 0,
5877 is_primary: false,
5878 source_kind: DiagnosticSourceKind::Pushed,
5879 ..Diagnostic::default()
5880 }
5881 },
5882 DiagnosticEntry {
5883 range: Point::new(2, 8)..Point::new(2, 17),
5884 diagnostic: Diagnostic {
5885 severity: DiagnosticSeverity::ERROR,
5886 message: "error 2".to_string(),
5887 group_id: 0,
5888 is_primary: true,
5889 source_kind: DiagnosticSourceKind::Pushed,
5890 ..Diagnostic::default()
5891 }
5892 }
5893 ]
5894 );
5895
5896 assert_eq!(
5897 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5898 &[
5899 DiagnosticEntry {
5900 range: Point::new(1, 8)..Point::new(1, 9),
5901 diagnostic: Diagnostic {
5902 severity: DiagnosticSeverity::WARNING,
5903 message: "error 1".to_string(),
5904 group_id: 1,
5905 is_primary: true,
5906 source_kind: DiagnosticSourceKind::Pushed,
5907 ..Diagnostic::default()
5908 }
5909 },
5910 DiagnosticEntry {
5911 range: Point::new(1, 8)..Point::new(1, 9),
5912 diagnostic: Diagnostic {
5913 severity: DiagnosticSeverity::HINT,
5914 message: "error 1 hint 1".to_string(),
5915 group_id: 1,
5916 is_primary: false,
5917 source_kind: DiagnosticSourceKind::Pushed,
5918 ..Diagnostic::default()
5919 }
5920 },
5921 ]
5922 );
5923}
5924
5925#[gpui::test]
5926async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5927 init_test(cx);
5928
5929 let fs = FakeFs::new(cx.executor());
5930 fs.insert_tree(
5931 path!("/dir"),
5932 json!({
5933 "one.rs": "const ONE: usize = 1;",
5934 "two": {
5935 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5936 }
5937
5938 }),
5939 )
5940 .await;
5941 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5942
5943 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5944 language_registry.add(rust_lang());
5945 let watched_paths = lsp::FileOperationRegistrationOptions {
5946 filters: vec![
5947 FileOperationFilter {
5948 scheme: Some("file".to_owned()),
5949 pattern: lsp::FileOperationPattern {
5950 glob: "**/*.rs".to_owned(),
5951 matches: Some(lsp::FileOperationPatternKind::File),
5952 options: None,
5953 },
5954 },
5955 FileOperationFilter {
5956 scheme: Some("file".to_owned()),
5957 pattern: lsp::FileOperationPattern {
5958 glob: "**/**".to_owned(),
5959 matches: Some(lsp::FileOperationPatternKind::Folder),
5960 options: None,
5961 },
5962 },
5963 ],
5964 };
5965 let mut fake_servers = language_registry.register_fake_lsp(
5966 "Rust",
5967 FakeLspAdapter {
5968 capabilities: lsp::ServerCapabilities {
5969 workspace: Some(lsp::WorkspaceServerCapabilities {
5970 workspace_folders: None,
5971 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5972 did_rename: Some(watched_paths.clone()),
5973 will_rename: Some(watched_paths),
5974 ..Default::default()
5975 }),
5976 }),
5977 ..Default::default()
5978 },
5979 ..Default::default()
5980 },
5981 );
5982
5983 let _ = project
5984 .update(cx, |project, cx| {
5985 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5986 })
5987 .await
5988 .unwrap();
5989
5990 let fake_server = fake_servers.next().await.unwrap();
5991 cx.executor().run_until_parked();
5992 let response = project.update(cx, |project, cx| {
5993 let worktree = project.worktrees(cx).next().unwrap();
5994 let entry = worktree
5995 .read(cx)
5996 .entry_for_path(rel_path("one.rs"))
5997 .unwrap();
5998 project.rename_entry(
5999 entry.id,
6000 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6001 cx,
6002 )
6003 });
6004 let expected_edit = lsp::WorkspaceEdit {
6005 changes: None,
6006 document_changes: Some(DocumentChanges::Edits({
6007 vec![TextDocumentEdit {
6008 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6009 range: lsp::Range {
6010 start: lsp::Position {
6011 line: 0,
6012 character: 1,
6013 },
6014 end: lsp::Position {
6015 line: 0,
6016 character: 3,
6017 },
6018 },
6019 new_text: "This is not a drill".to_owned(),
6020 })],
6021 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6022 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6023 version: Some(1337),
6024 },
6025 }]
6026 })),
6027 change_annotations: None,
6028 };
6029 let resolved_workspace_edit = Arc::new(OnceLock::new());
6030 fake_server
6031 .set_request_handler::<WillRenameFiles, _, _>({
6032 let resolved_workspace_edit = resolved_workspace_edit.clone();
6033 let expected_edit = expected_edit.clone();
6034 move |params, _| {
6035 let resolved_workspace_edit = resolved_workspace_edit.clone();
6036 let expected_edit = expected_edit.clone();
6037 async move {
6038 assert_eq!(params.files.len(), 1);
6039 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6040 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6041 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6042 Ok(Some(expected_edit))
6043 }
6044 }
6045 })
6046 .next()
6047 .await
6048 .unwrap();
6049 let _ = response.await.unwrap();
6050 fake_server
6051 .handle_notification::<DidRenameFiles, _>(|params, _| {
6052 assert_eq!(params.files.len(), 1);
6053 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6054 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6055 })
6056 .next()
6057 .await
6058 .unwrap();
6059 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6060}
6061
6062#[gpui::test]
6063async fn test_rename(cx: &mut gpui::TestAppContext) {
6064 // hi
6065 init_test(cx);
6066
6067 let fs = FakeFs::new(cx.executor());
6068 fs.insert_tree(
6069 path!("/dir"),
6070 json!({
6071 "one.rs": "const ONE: usize = 1;",
6072 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6073 }),
6074 )
6075 .await;
6076
6077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6078
6079 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6080 language_registry.add(rust_lang());
6081 let mut fake_servers = language_registry.register_fake_lsp(
6082 "Rust",
6083 FakeLspAdapter {
6084 capabilities: lsp::ServerCapabilities {
6085 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6086 prepare_provider: Some(true),
6087 work_done_progress_options: Default::default(),
6088 })),
6089 ..Default::default()
6090 },
6091 ..Default::default()
6092 },
6093 );
6094
6095 let (buffer, _handle) = project
6096 .update(cx, |project, cx| {
6097 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6098 })
6099 .await
6100 .unwrap();
6101
6102 let fake_server = fake_servers.next().await.unwrap();
6103 cx.executor().run_until_parked();
6104
6105 let response = project.update(cx, |project, cx| {
6106 project.prepare_rename(buffer.clone(), 7, cx)
6107 });
6108 fake_server
6109 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6110 assert_eq!(
6111 params.text_document.uri.as_str(),
6112 uri!("file:///dir/one.rs")
6113 );
6114 assert_eq!(params.position, lsp::Position::new(0, 7));
6115 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6116 lsp::Position::new(0, 6),
6117 lsp::Position::new(0, 9),
6118 ))))
6119 })
6120 .next()
6121 .await
6122 .unwrap();
6123 let response = response.await.unwrap();
6124 let PrepareRenameResponse::Success(range) = response else {
6125 panic!("{:?}", response);
6126 };
6127 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6128 assert_eq!(range, 6..9);
6129
6130 let response = project.update(cx, |project, cx| {
6131 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6132 });
6133 fake_server
6134 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6135 assert_eq!(
6136 params.text_document_position.text_document.uri.as_str(),
6137 uri!("file:///dir/one.rs")
6138 );
6139 assert_eq!(
6140 params.text_document_position.position,
6141 lsp::Position::new(0, 7)
6142 );
6143 assert_eq!(params.new_name, "THREE");
6144 Ok(Some(lsp::WorkspaceEdit {
6145 changes: Some(
6146 [
6147 (
6148 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6149 vec![lsp::TextEdit::new(
6150 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6151 "THREE".to_string(),
6152 )],
6153 ),
6154 (
6155 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6156 vec![
6157 lsp::TextEdit::new(
6158 lsp::Range::new(
6159 lsp::Position::new(0, 24),
6160 lsp::Position::new(0, 27),
6161 ),
6162 "THREE".to_string(),
6163 ),
6164 lsp::TextEdit::new(
6165 lsp::Range::new(
6166 lsp::Position::new(0, 35),
6167 lsp::Position::new(0, 38),
6168 ),
6169 "THREE".to_string(),
6170 ),
6171 ],
6172 ),
6173 ]
6174 .into_iter()
6175 .collect(),
6176 ),
6177 ..Default::default()
6178 }))
6179 })
6180 .next()
6181 .await
6182 .unwrap();
6183 let mut transaction = response.await.unwrap().0;
6184 assert_eq!(transaction.len(), 2);
6185 assert_eq!(
6186 transaction
6187 .remove_entry(&buffer)
6188 .unwrap()
6189 .0
6190 .update(cx, |buffer, _| buffer.text()),
6191 "const THREE: usize = 1;"
6192 );
6193 assert_eq!(
6194 transaction
6195 .into_keys()
6196 .next()
6197 .unwrap()
6198 .update(cx, |buffer, _| buffer.text()),
6199 "const TWO: usize = one::THREE + one::THREE;"
6200 );
6201}
6202
6203#[gpui::test]
6204async fn test_search(cx: &mut gpui::TestAppContext) {
6205 init_test(cx);
6206
6207 let fs = FakeFs::new(cx.executor());
6208 fs.insert_tree(
6209 path!("/dir"),
6210 json!({
6211 "one.rs": "const ONE: usize = 1;",
6212 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6213 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6214 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6215 }),
6216 )
6217 .await;
6218 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6219 assert_eq!(
6220 search(
6221 &project,
6222 SearchQuery::text(
6223 "TWO",
6224 false,
6225 true,
6226 false,
6227 Default::default(),
6228 Default::default(),
6229 false,
6230 None
6231 )
6232 .unwrap(),
6233 cx
6234 )
6235 .await
6236 .unwrap(),
6237 HashMap::from_iter([
6238 (path!("dir/two.rs").to_string(), vec![6..9]),
6239 (path!("dir/three.rs").to_string(), vec![37..40])
6240 ])
6241 );
6242
6243 let buffer_4 = project
6244 .update(cx, |project, cx| {
6245 project.open_local_buffer(path!("/dir/four.rs"), cx)
6246 })
6247 .await
6248 .unwrap();
6249 buffer_4.update(cx, |buffer, cx| {
6250 let text = "two::TWO";
6251 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6252 });
6253
6254 assert_eq!(
6255 search(
6256 &project,
6257 SearchQuery::text(
6258 "TWO",
6259 false,
6260 true,
6261 false,
6262 Default::default(),
6263 Default::default(),
6264 false,
6265 None,
6266 )
6267 .unwrap(),
6268 cx
6269 )
6270 .await
6271 .unwrap(),
6272 HashMap::from_iter([
6273 (path!("dir/two.rs").to_string(), vec![6..9]),
6274 (path!("dir/three.rs").to_string(), vec![37..40]),
6275 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6276 ])
6277 );
6278}
6279
6280#[gpui::test]
6281async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6282 init_test(cx);
6283
6284 let search_query = "file";
6285
6286 let fs = FakeFs::new(cx.executor());
6287 fs.insert_tree(
6288 path!("/dir"),
6289 json!({
6290 "one.rs": r#"// Rust file one"#,
6291 "one.ts": r#"// TypeScript file one"#,
6292 "two.rs": r#"// Rust file two"#,
6293 "two.ts": r#"// TypeScript file two"#,
6294 }),
6295 )
6296 .await;
6297 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6298
6299 assert!(
6300 search(
6301 &project,
6302 SearchQuery::text(
6303 search_query,
6304 false,
6305 true,
6306 false,
6307 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6308 Default::default(),
6309 false,
6310 None
6311 )
6312 .unwrap(),
6313 cx
6314 )
6315 .await
6316 .unwrap()
6317 .is_empty(),
6318 "If no inclusions match, no files should be returned"
6319 );
6320
6321 assert_eq!(
6322 search(
6323 &project,
6324 SearchQuery::text(
6325 search_query,
6326 false,
6327 true,
6328 false,
6329 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6330 Default::default(),
6331 false,
6332 None
6333 )
6334 .unwrap(),
6335 cx
6336 )
6337 .await
6338 .unwrap(),
6339 HashMap::from_iter([
6340 (path!("dir/one.rs").to_string(), vec![8..12]),
6341 (path!("dir/two.rs").to_string(), vec![8..12]),
6342 ]),
6343 "Rust only search should give only Rust files"
6344 );
6345
6346 assert_eq!(
6347 search(
6348 &project,
6349 SearchQuery::text(
6350 search_query,
6351 false,
6352 true,
6353 false,
6354 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6355 .unwrap(),
6356 Default::default(),
6357 false,
6358 None,
6359 )
6360 .unwrap(),
6361 cx
6362 )
6363 .await
6364 .unwrap(),
6365 HashMap::from_iter([
6366 (path!("dir/one.ts").to_string(), vec![14..18]),
6367 (path!("dir/two.ts").to_string(), vec![14..18]),
6368 ]),
6369 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6370 );
6371
6372 assert_eq!(
6373 search(
6374 &project,
6375 SearchQuery::text(
6376 search_query,
6377 false,
6378 true,
6379 false,
6380 PathMatcher::new(
6381 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6382 PathStyle::local()
6383 )
6384 .unwrap(),
6385 Default::default(),
6386 false,
6387 None,
6388 )
6389 .unwrap(),
6390 cx
6391 )
6392 .await
6393 .unwrap(),
6394 HashMap::from_iter([
6395 (path!("dir/two.ts").to_string(), vec![14..18]),
6396 (path!("dir/one.rs").to_string(), vec![8..12]),
6397 (path!("dir/one.ts").to_string(), vec![14..18]),
6398 (path!("dir/two.rs").to_string(), vec![8..12]),
6399 ]),
6400 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6401 );
6402}
6403
6404#[gpui::test]
6405async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6406 init_test(cx);
6407
6408 let search_query = "file";
6409
6410 let fs = FakeFs::new(cx.executor());
6411 fs.insert_tree(
6412 path!("/dir"),
6413 json!({
6414 "one.rs": r#"// Rust file one"#,
6415 "one.ts": r#"// TypeScript file one"#,
6416 "two.rs": r#"// Rust file two"#,
6417 "two.ts": r#"// TypeScript file two"#,
6418 }),
6419 )
6420 .await;
6421 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6422
6423 assert_eq!(
6424 search(
6425 &project,
6426 SearchQuery::text(
6427 search_query,
6428 false,
6429 true,
6430 false,
6431 Default::default(),
6432 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6433 false,
6434 None,
6435 )
6436 .unwrap(),
6437 cx
6438 )
6439 .await
6440 .unwrap(),
6441 HashMap::from_iter([
6442 (path!("dir/one.rs").to_string(), vec![8..12]),
6443 (path!("dir/one.ts").to_string(), vec![14..18]),
6444 (path!("dir/two.rs").to_string(), vec![8..12]),
6445 (path!("dir/two.ts").to_string(), vec![14..18]),
6446 ]),
6447 "If no exclusions match, all files should be returned"
6448 );
6449
6450 assert_eq!(
6451 search(
6452 &project,
6453 SearchQuery::text(
6454 search_query,
6455 false,
6456 true,
6457 false,
6458 Default::default(),
6459 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6460 false,
6461 None,
6462 )
6463 .unwrap(),
6464 cx
6465 )
6466 .await
6467 .unwrap(),
6468 HashMap::from_iter([
6469 (path!("dir/one.ts").to_string(), vec![14..18]),
6470 (path!("dir/two.ts").to_string(), vec![14..18]),
6471 ]),
6472 "Rust exclusion search should give only TypeScript files"
6473 );
6474
6475 assert_eq!(
6476 search(
6477 &project,
6478 SearchQuery::text(
6479 search_query,
6480 false,
6481 true,
6482 false,
6483 Default::default(),
6484 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6485 .unwrap(),
6486 false,
6487 None,
6488 )
6489 .unwrap(),
6490 cx
6491 )
6492 .await
6493 .unwrap(),
6494 HashMap::from_iter([
6495 (path!("dir/one.rs").to_string(), vec![8..12]),
6496 (path!("dir/two.rs").to_string(), vec![8..12]),
6497 ]),
6498 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6499 );
6500
6501 assert!(
6502 search(
6503 &project,
6504 SearchQuery::text(
6505 search_query,
6506 false,
6507 true,
6508 false,
6509 Default::default(),
6510 PathMatcher::new(
6511 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6512 PathStyle::local(),
6513 )
6514 .unwrap(),
6515 false,
6516 None,
6517 )
6518 .unwrap(),
6519 cx
6520 )
6521 .await
6522 .unwrap()
6523 .is_empty(),
6524 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6525 );
6526}
6527
6528#[gpui::test]
6529async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6530 init_test(cx);
6531
6532 let search_query = "file";
6533
6534 let fs = FakeFs::new(cx.executor());
6535 fs.insert_tree(
6536 path!("/dir"),
6537 json!({
6538 "one.rs": r#"// Rust file one"#,
6539 "one.ts": r#"// TypeScript file one"#,
6540 "two.rs": r#"// Rust file two"#,
6541 "two.ts": r#"// TypeScript file two"#,
6542 }),
6543 )
6544 .await;
6545
6546 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6547 let path_style = PathStyle::local();
6548 let _buffer = project.update(cx, |project, cx| {
6549 project.create_local_buffer("file", None, false, cx)
6550 });
6551
6552 assert_eq!(
6553 search(
6554 &project,
6555 SearchQuery::text(
6556 search_query,
6557 false,
6558 true,
6559 false,
6560 Default::default(),
6561 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6562 false,
6563 None,
6564 )
6565 .unwrap(),
6566 cx
6567 )
6568 .await
6569 .unwrap(),
6570 HashMap::from_iter([
6571 (path!("dir/one.rs").to_string(), vec![8..12]),
6572 (path!("dir/one.ts").to_string(), vec![14..18]),
6573 (path!("dir/two.rs").to_string(), vec![8..12]),
6574 (path!("dir/two.ts").to_string(), vec![14..18]),
6575 ]),
6576 "If no exclusions match, all files should be returned"
6577 );
6578
6579 assert_eq!(
6580 search(
6581 &project,
6582 SearchQuery::text(
6583 search_query,
6584 false,
6585 true,
6586 false,
6587 Default::default(),
6588 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6589 false,
6590 None,
6591 )
6592 .unwrap(),
6593 cx
6594 )
6595 .await
6596 .unwrap(),
6597 HashMap::from_iter([
6598 (path!("dir/one.ts").to_string(), vec![14..18]),
6599 (path!("dir/two.ts").to_string(), vec![14..18]),
6600 ]),
6601 "Rust exclusion search should give only TypeScript files"
6602 );
6603
6604 assert_eq!(
6605 search(
6606 &project,
6607 SearchQuery::text(
6608 search_query,
6609 false,
6610 true,
6611 false,
6612 Default::default(),
6613 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6614 false,
6615 None,
6616 )
6617 .unwrap(),
6618 cx
6619 )
6620 .await
6621 .unwrap(),
6622 HashMap::from_iter([
6623 (path!("dir/one.rs").to_string(), vec![8..12]),
6624 (path!("dir/two.rs").to_string(), vec![8..12]),
6625 ]),
6626 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6627 );
6628
6629 assert!(
6630 search(
6631 &project,
6632 SearchQuery::text(
6633 search_query,
6634 false,
6635 true,
6636 false,
6637 Default::default(),
6638 PathMatcher::new(
6639 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6640 PathStyle::local(),
6641 )
6642 .unwrap(),
6643 false,
6644 None,
6645 )
6646 .unwrap(),
6647 cx
6648 )
6649 .await
6650 .unwrap()
6651 .is_empty(),
6652 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6653 );
6654}
6655
6656#[gpui::test]
6657async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6658 init_test(cx);
6659
6660 let search_query = "file";
6661
6662 let fs = FakeFs::new(cx.executor());
6663 fs.insert_tree(
6664 path!("/dir"),
6665 json!({
6666 "one.rs": r#"// Rust file one"#,
6667 "one.ts": r#"// TypeScript file one"#,
6668 "two.rs": r#"// Rust file two"#,
6669 "two.ts": r#"// TypeScript file two"#,
6670 }),
6671 )
6672 .await;
6673 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6674 assert!(
6675 search(
6676 &project,
6677 SearchQuery::text(
6678 search_query,
6679 false,
6680 true,
6681 false,
6682 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6683 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6684 false,
6685 None,
6686 )
6687 .unwrap(),
6688 cx
6689 )
6690 .await
6691 .unwrap()
6692 .is_empty(),
6693 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6694 );
6695
6696 assert!(
6697 search(
6698 &project,
6699 SearchQuery::text(
6700 search_query,
6701 false,
6702 true,
6703 false,
6704 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6705 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6706 false,
6707 None,
6708 )
6709 .unwrap(),
6710 cx
6711 )
6712 .await
6713 .unwrap()
6714 .is_empty(),
6715 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6716 );
6717
6718 assert!(
6719 search(
6720 &project,
6721 SearchQuery::text(
6722 search_query,
6723 false,
6724 true,
6725 false,
6726 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6727 .unwrap(),
6728 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6729 .unwrap(),
6730 false,
6731 None,
6732 )
6733 .unwrap(),
6734 cx
6735 )
6736 .await
6737 .unwrap()
6738 .is_empty(),
6739 "Non-matching inclusions and exclusions should not change that."
6740 );
6741
6742 assert_eq!(
6743 search(
6744 &project,
6745 SearchQuery::text(
6746 search_query,
6747 false,
6748 true,
6749 false,
6750 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6751 .unwrap(),
6752 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6753 .unwrap(),
6754 false,
6755 None,
6756 )
6757 .unwrap(),
6758 cx
6759 )
6760 .await
6761 .unwrap(),
6762 HashMap::from_iter([
6763 (path!("dir/one.ts").to_string(), vec![14..18]),
6764 (path!("dir/two.ts").to_string(), vec![14..18]),
6765 ]),
6766 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6767 );
6768}
6769
6770#[gpui::test]
6771async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6772 init_test(cx);
6773
6774 let fs = FakeFs::new(cx.executor());
6775 fs.insert_tree(
6776 path!("/worktree-a"),
6777 json!({
6778 "haystack.rs": r#"// NEEDLE"#,
6779 "haystack.ts": r#"// NEEDLE"#,
6780 }),
6781 )
6782 .await;
6783 fs.insert_tree(
6784 path!("/worktree-b"),
6785 json!({
6786 "haystack.rs": r#"// NEEDLE"#,
6787 "haystack.ts": r#"// NEEDLE"#,
6788 }),
6789 )
6790 .await;
6791
6792 let path_style = PathStyle::local();
6793 let project = Project::test(
6794 fs.clone(),
6795 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6796 cx,
6797 )
6798 .await;
6799
6800 assert_eq!(
6801 search(
6802 &project,
6803 SearchQuery::text(
6804 "NEEDLE",
6805 false,
6806 true,
6807 false,
6808 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6809 Default::default(),
6810 true,
6811 None,
6812 )
6813 .unwrap(),
6814 cx
6815 )
6816 .await
6817 .unwrap(),
6818 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6819 "should only return results from included worktree"
6820 );
6821 assert_eq!(
6822 search(
6823 &project,
6824 SearchQuery::text(
6825 "NEEDLE",
6826 false,
6827 true,
6828 false,
6829 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6830 Default::default(),
6831 true,
6832 None,
6833 )
6834 .unwrap(),
6835 cx
6836 )
6837 .await
6838 .unwrap(),
6839 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6840 "should only return results from included worktree"
6841 );
6842
6843 assert_eq!(
6844 search(
6845 &project,
6846 SearchQuery::text(
6847 "NEEDLE",
6848 false,
6849 true,
6850 false,
6851 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6852 Default::default(),
6853 false,
6854 None,
6855 )
6856 .unwrap(),
6857 cx
6858 )
6859 .await
6860 .unwrap(),
6861 HashMap::from_iter([
6862 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6863 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6864 ]),
6865 "should return results from both worktrees"
6866 );
6867}
6868
6869#[gpui::test]
6870async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6871 init_test(cx);
6872
6873 let fs = FakeFs::new(cx.background_executor.clone());
6874 fs.insert_tree(
6875 path!("/dir"),
6876 json!({
6877 ".git": {},
6878 ".gitignore": "**/target\n/node_modules\n",
6879 "target": {
6880 "index.txt": "index_key:index_value"
6881 },
6882 "node_modules": {
6883 "eslint": {
6884 "index.ts": "const eslint_key = 'eslint value'",
6885 "package.json": r#"{ "some_key": "some value" }"#,
6886 },
6887 "prettier": {
6888 "index.ts": "const prettier_key = 'prettier value'",
6889 "package.json": r#"{ "other_key": "other value" }"#,
6890 },
6891 },
6892 "package.json": r#"{ "main_key": "main value" }"#,
6893 }),
6894 )
6895 .await;
6896 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6897
6898 let query = "key";
6899 assert_eq!(
6900 search(
6901 &project,
6902 SearchQuery::text(
6903 query,
6904 false,
6905 false,
6906 false,
6907 Default::default(),
6908 Default::default(),
6909 false,
6910 None,
6911 )
6912 .unwrap(),
6913 cx
6914 )
6915 .await
6916 .unwrap(),
6917 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6918 "Only one non-ignored file should have the query"
6919 );
6920
6921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6922 let path_style = PathStyle::local();
6923 assert_eq!(
6924 search(
6925 &project,
6926 SearchQuery::text(
6927 query,
6928 false,
6929 false,
6930 true,
6931 Default::default(),
6932 Default::default(),
6933 false,
6934 None,
6935 )
6936 .unwrap(),
6937 cx
6938 )
6939 .await
6940 .unwrap(),
6941 HashMap::from_iter([
6942 (path!("dir/package.json").to_string(), vec![8..11]),
6943 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6944 (
6945 path!("dir/node_modules/prettier/package.json").to_string(),
6946 vec![9..12]
6947 ),
6948 (
6949 path!("dir/node_modules/prettier/index.ts").to_string(),
6950 vec![15..18]
6951 ),
6952 (
6953 path!("dir/node_modules/eslint/index.ts").to_string(),
6954 vec![13..16]
6955 ),
6956 (
6957 path!("dir/node_modules/eslint/package.json").to_string(),
6958 vec![8..11]
6959 ),
6960 ]),
6961 "Unrestricted search with ignored directories should find every file with the query"
6962 );
6963
6964 let files_to_include =
6965 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6966 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6967 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6968 assert_eq!(
6969 search(
6970 &project,
6971 SearchQuery::text(
6972 query,
6973 false,
6974 false,
6975 true,
6976 files_to_include,
6977 files_to_exclude,
6978 false,
6979 None,
6980 )
6981 .unwrap(),
6982 cx
6983 )
6984 .await
6985 .unwrap(),
6986 HashMap::from_iter([(
6987 path!("dir/node_modules/prettier/package.json").to_string(),
6988 vec![9..12]
6989 )]),
6990 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6991 );
6992}
6993
6994#[gpui::test]
6995async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6996 init_test(cx);
6997
6998 let fs = FakeFs::new(cx.executor());
6999 fs.insert_tree(
7000 path!("/dir"),
7001 json!({
7002 "one.rs": "// ПРИВЕТ? привет!",
7003 "two.rs": "// ПРИВЕТ.",
7004 "three.rs": "// привет",
7005 }),
7006 )
7007 .await;
7008 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7009 let unicode_case_sensitive_query = SearchQuery::text(
7010 "привет",
7011 false,
7012 true,
7013 false,
7014 Default::default(),
7015 Default::default(),
7016 false,
7017 None,
7018 );
7019 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7020 assert_eq!(
7021 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7022 .await
7023 .unwrap(),
7024 HashMap::from_iter([
7025 (path!("dir/one.rs").to_string(), vec![17..29]),
7026 (path!("dir/three.rs").to_string(), vec![3..15]),
7027 ])
7028 );
7029
7030 let unicode_case_insensitive_query = SearchQuery::text(
7031 "привет",
7032 false,
7033 false,
7034 false,
7035 Default::default(),
7036 Default::default(),
7037 false,
7038 None,
7039 );
7040 assert_matches!(
7041 unicode_case_insensitive_query,
7042 Ok(SearchQuery::Regex { .. })
7043 );
7044 assert_eq!(
7045 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7046 .await
7047 .unwrap(),
7048 HashMap::from_iter([
7049 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7050 (path!("dir/two.rs").to_string(), vec![3..15]),
7051 (path!("dir/three.rs").to_string(), vec![3..15]),
7052 ])
7053 );
7054
7055 assert_eq!(
7056 search(
7057 &project,
7058 SearchQuery::text(
7059 "привет.",
7060 false,
7061 false,
7062 false,
7063 Default::default(),
7064 Default::default(),
7065 false,
7066 None,
7067 )
7068 .unwrap(),
7069 cx
7070 )
7071 .await
7072 .unwrap(),
7073 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7074 );
7075}
7076
7077#[gpui::test]
7078async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7079 init_test(cx);
7080
7081 let fs = FakeFs::new(cx.executor());
7082 fs.insert_tree(
7083 "/one/two",
7084 json!({
7085 "three": {
7086 "a.txt": "",
7087 "four": {}
7088 },
7089 "c.rs": ""
7090 }),
7091 )
7092 .await;
7093
7094 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7095 project
7096 .update(cx, |project, cx| {
7097 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7098 project.create_entry((id, rel_path("b..")), true, cx)
7099 })
7100 .await
7101 .unwrap()
7102 .into_included()
7103 .unwrap();
7104
7105 assert_eq!(
7106 fs.paths(true),
7107 vec![
7108 PathBuf::from(path!("/")),
7109 PathBuf::from(path!("/one")),
7110 PathBuf::from(path!("/one/two")),
7111 PathBuf::from(path!("/one/two/c.rs")),
7112 PathBuf::from(path!("/one/two/three")),
7113 PathBuf::from(path!("/one/two/three/a.txt")),
7114 PathBuf::from(path!("/one/two/three/b..")),
7115 PathBuf::from(path!("/one/two/three/four")),
7116 ]
7117 );
7118}
7119
7120#[gpui::test]
7121async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7122 init_test(cx);
7123
7124 let fs = FakeFs::new(cx.executor());
7125 fs.insert_tree(
7126 path!("/dir"),
7127 json!({
7128 "a.tsx": "a",
7129 }),
7130 )
7131 .await;
7132
7133 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7134
7135 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7136 language_registry.add(tsx_lang());
7137 let language_server_names = [
7138 "TypeScriptServer",
7139 "TailwindServer",
7140 "ESLintServer",
7141 "NoHoverCapabilitiesServer",
7142 ];
7143 let mut language_servers = [
7144 language_registry.register_fake_lsp(
7145 "tsx",
7146 FakeLspAdapter {
7147 name: language_server_names[0],
7148 capabilities: lsp::ServerCapabilities {
7149 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7150 ..lsp::ServerCapabilities::default()
7151 },
7152 ..FakeLspAdapter::default()
7153 },
7154 ),
7155 language_registry.register_fake_lsp(
7156 "tsx",
7157 FakeLspAdapter {
7158 name: language_server_names[1],
7159 capabilities: lsp::ServerCapabilities {
7160 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7161 ..lsp::ServerCapabilities::default()
7162 },
7163 ..FakeLspAdapter::default()
7164 },
7165 ),
7166 language_registry.register_fake_lsp(
7167 "tsx",
7168 FakeLspAdapter {
7169 name: language_server_names[2],
7170 capabilities: lsp::ServerCapabilities {
7171 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7172 ..lsp::ServerCapabilities::default()
7173 },
7174 ..FakeLspAdapter::default()
7175 },
7176 ),
7177 language_registry.register_fake_lsp(
7178 "tsx",
7179 FakeLspAdapter {
7180 name: language_server_names[3],
7181 capabilities: lsp::ServerCapabilities {
7182 hover_provider: None,
7183 ..lsp::ServerCapabilities::default()
7184 },
7185 ..FakeLspAdapter::default()
7186 },
7187 ),
7188 ];
7189
7190 let (buffer, _handle) = project
7191 .update(cx, |p, cx| {
7192 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7193 })
7194 .await
7195 .unwrap();
7196 cx.executor().run_until_parked();
7197
7198 let mut servers_with_hover_requests = HashMap::default();
7199 for i in 0..language_server_names.len() {
7200 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7201 panic!(
7202 "Failed to get language server #{i} with name {}",
7203 &language_server_names[i]
7204 )
7205 });
7206 let new_server_name = new_server.server.name();
7207 assert!(
7208 !servers_with_hover_requests.contains_key(&new_server_name),
7209 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7210 );
7211 match new_server_name.as_ref() {
7212 "TailwindServer" | "TypeScriptServer" => {
7213 servers_with_hover_requests.insert(
7214 new_server_name.clone(),
7215 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7216 move |_, _| {
7217 let name = new_server_name.clone();
7218 async move {
7219 Ok(Some(lsp::Hover {
7220 contents: lsp::HoverContents::Scalar(
7221 lsp::MarkedString::String(format!("{name} hover")),
7222 ),
7223 range: None,
7224 }))
7225 }
7226 },
7227 ),
7228 );
7229 }
7230 "ESLintServer" => {
7231 servers_with_hover_requests.insert(
7232 new_server_name,
7233 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7234 |_, _| async move { Ok(None) },
7235 ),
7236 );
7237 }
7238 "NoHoverCapabilitiesServer" => {
7239 let _never_handled = new_server
7240 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7241 panic!(
7242 "Should not call for hovers server with no corresponding capabilities"
7243 )
7244 });
7245 }
7246 unexpected => panic!("Unexpected server name: {unexpected}"),
7247 }
7248 }
7249
7250 let hover_task = project.update(cx, |project, cx| {
7251 project.hover(&buffer, Point::new(0, 0), cx)
7252 });
7253 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7254 |mut hover_request| async move {
7255 hover_request
7256 .next()
7257 .await
7258 .expect("All hover requests should have been triggered")
7259 },
7260 ))
7261 .await;
7262 assert_eq!(
7263 vec!["TailwindServer hover", "TypeScriptServer hover"],
7264 hover_task
7265 .await
7266 .into_iter()
7267 .flatten()
7268 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7269 .sorted()
7270 .collect::<Vec<_>>(),
7271 "Should receive hover responses from all related servers with hover capabilities"
7272 );
7273}
7274
7275#[gpui::test]
7276async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7277 init_test(cx);
7278
7279 let fs = FakeFs::new(cx.executor());
7280 fs.insert_tree(
7281 path!("/dir"),
7282 json!({
7283 "a.ts": "a",
7284 }),
7285 )
7286 .await;
7287
7288 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7289
7290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7291 language_registry.add(typescript_lang());
7292 let mut fake_language_servers = language_registry.register_fake_lsp(
7293 "TypeScript",
7294 FakeLspAdapter {
7295 capabilities: lsp::ServerCapabilities {
7296 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7297 ..lsp::ServerCapabilities::default()
7298 },
7299 ..FakeLspAdapter::default()
7300 },
7301 );
7302
7303 let (buffer, _handle) = project
7304 .update(cx, |p, cx| {
7305 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7306 })
7307 .await
7308 .unwrap();
7309 cx.executor().run_until_parked();
7310
7311 let fake_server = fake_language_servers
7312 .next()
7313 .await
7314 .expect("failed to get the language server");
7315
7316 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7317 move |_, _| async move {
7318 Ok(Some(lsp::Hover {
7319 contents: lsp::HoverContents::Array(vec![
7320 lsp::MarkedString::String("".to_string()),
7321 lsp::MarkedString::String(" ".to_string()),
7322 lsp::MarkedString::String("\n\n\n".to_string()),
7323 ]),
7324 range: None,
7325 }))
7326 },
7327 );
7328
7329 let hover_task = project.update(cx, |project, cx| {
7330 project.hover(&buffer, Point::new(0, 0), cx)
7331 });
7332 let () = request_handled
7333 .next()
7334 .await
7335 .expect("All hover requests should have been triggered");
7336 assert_eq!(
7337 Vec::<String>::new(),
7338 hover_task
7339 .await
7340 .into_iter()
7341 .flatten()
7342 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7343 .sorted()
7344 .collect::<Vec<_>>(),
7345 "Empty hover parts should be ignored"
7346 );
7347}
7348
7349#[gpui::test]
7350async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7351 init_test(cx);
7352
7353 let fs = FakeFs::new(cx.executor());
7354 fs.insert_tree(
7355 path!("/dir"),
7356 json!({
7357 "a.ts": "a",
7358 }),
7359 )
7360 .await;
7361
7362 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7363
7364 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7365 language_registry.add(typescript_lang());
7366 let mut fake_language_servers = language_registry.register_fake_lsp(
7367 "TypeScript",
7368 FakeLspAdapter {
7369 capabilities: lsp::ServerCapabilities {
7370 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7371 ..lsp::ServerCapabilities::default()
7372 },
7373 ..FakeLspAdapter::default()
7374 },
7375 );
7376
7377 let (buffer, _handle) = project
7378 .update(cx, |p, cx| {
7379 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7380 })
7381 .await
7382 .unwrap();
7383 cx.executor().run_until_parked();
7384
7385 let fake_server = fake_language_servers
7386 .next()
7387 .await
7388 .expect("failed to get the language server");
7389
7390 let mut request_handled = fake_server
7391 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7392 Ok(Some(vec![
7393 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7394 title: "organize imports".to_string(),
7395 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7396 ..lsp::CodeAction::default()
7397 }),
7398 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7399 title: "fix code".to_string(),
7400 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7401 ..lsp::CodeAction::default()
7402 }),
7403 ]))
7404 });
7405
7406 let code_actions_task = project.update(cx, |project, cx| {
7407 project.code_actions(
7408 &buffer,
7409 0..buffer.read(cx).len(),
7410 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7411 cx,
7412 )
7413 });
7414
7415 let () = request_handled
7416 .next()
7417 .await
7418 .expect("The code action request should have been triggered");
7419
7420 let code_actions = code_actions_task.await.unwrap().unwrap();
7421 assert_eq!(code_actions.len(), 1);
7422 assert_eq!(
7423 code_actions[0].lsp_action.action_kind(),
7424 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7425 );
7426}
7427
7428#[gpui::test]
7429async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7430 init_test(cx);
7431
7432 let fs = FakeFs::new(cx.executor());
7433 fs.insert_tree(
7434 path!("/dir"),
7435 json!({
7436 "a.tsx": "a",
7437 }),
7438 )
7439 .await;
7440
7441 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7442
7443 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7444 language_registry.add(tsx_lang());
7445 let language_server_names = [
7446 "TypeScriptServer",
7447 "TailwindServer",
7448 "ESLintServer",
7449 "NoActionsCapabilitiesServer",
7450 ];
7451
7452 let mut language_server_rxs = [
7453 language_registry.register_fake_lsp(
7454 "tsx",
7455 FakeLspAdapter {
7456 name: language_server_names[0],
7457 capabilities: lsp::ServerCapabilities {
7458 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7459 ..lsp::ServerCapabilities::default()
7460 },
7461 ..FakeLspAdapter::default()
7462 },
7463 ),
7464 language_registry.register_fake_lsp(
7465 "tsx",
7466 FakeLspAdapter {
7467 name: language_server_names[1],
7468 capabilities: lsp::ServerCapabilities {
7469 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7470 ..lsp::ServerCapabilities::default()
7471 },
7472 ..FakeLspAdapter::default()
7473 },
7474 ),
7475 language_registry.register_fake_lsp(
7476 "tsx",
7477 FakeLspAdapter {
7478 name: language_server_names[2],
7479 capabilities: lsp::ServerCapabilities {
7480 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7481 ..lsp::ServerCapabilities::default()
7482 },
7483 ..FakeLspAdapter::default()
7484 },
7485 ),
7486 language_registry.register_fake_lsp(
7487 "tsx",
7488 FakeLspAdapter {
7489 name: language_server_names[3],
7490 capabilities: lsp::ServerCapabilities {
7491 code_action_provider: None,
7492 ..lsp::ServerCapabilities::default()
7493 },
7494 ..FakeLspAdapter::default()
7495 },
7496 ),
7497 ];
7498
7499 let (buffer, _handle) = project
7500 .update(cx, |p, cx| {
7501 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7502 })
7503 .await
7504 .unwrap();
7505 cx.executor().run_until_parked();
7506
7507 let mut servers_with_actions_requests = HashMap::default();
7508 for i in 0..language_server_names.len() {
7509 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7510 panic!(
7511 "Failed to get language server #{i} with name {}",
7512 &language_server_names[i]
7513 )
7514 });
7515 let new_server_name = new_server.server.name();
7516
7517 assert!(
7518 !servers_with_actions_requests.contains_key(&new_server_name),
7519 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7520 );
7521 match new_server_name.0.as_ref() {
7522 "TailwindServer" | "TypeScriptServer" => {
7523 servers_with_actions_requests.insert(
7524 new_server_name.clone(),
7525 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7526 move |_, _| {
7527 let name = new_server_name.clone();
7528 async move {
7529 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7530 lsp::CodeAction {
7531 title: format!("{name} code action"),
7532 ..lsp::CodeAction::default()
7533 },
7534 )]))
7535 }
7536 },
7537 ),
7538 );
7539 }
7540 "ESLintServer" => {
7541 servers_with_actions_requests.insert(
7542 new_server_name,
7543 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7544 |_, _| async move { Ok(None) },
7545 ),
7546 );
7547 }
7548 "NoActionsCapabilitiesServer" => {
7549 let _never_handled = new_server
7550 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7551 panic!(
7552 "Should not call for code actions server with no corresponding capabilities"
7553 )
7554 });
7555 }
7556 unexpected => panic!("Unexpected server name: {unexpected}"),
7557 }
7558 }
7559
7560 let code_actions_task = project.update(cx, |project, cx| {
7561 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7562 });
7563
7564 // cx.run_until_parked();
7565 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7566 |mut code_actions_request| async move {
7567 code_actions_request
7568 .next()
7569 .await
7570 .expect("All code actions requests should have been triggered")
7571 },
7572 ))
7573 .await;
7574 assert_eq!(
7575 vec!["TailwindServer code action", "TypeScriptServer code action"],
7576 code_actions_task
7577 .await
7578 .unwrap()
7579 .unwrap()
7580 .into_iter()
7581 .map(|code_action| code_action.lsp_action.title().to_owned())
7582 .sorted()
7583 .collect::<Vec<_>>(),
7584 "Should receive code actions responses from all related servers with hover capabilities"
7585 );
7586}
7587
7588#[gpui::test]
7589async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7590 init_test(cx);
7591
7592 let fs = FakeFs::new(cx.executor());
7593 fs.insert_tree(
7594 "/dir",
7595 json!({
7596 "a.rs": "let a = 1;",
7597 "b.rs": "let b = 2;",
7598 "c.rs": "let c = 2;",
7599 }),
7600 )
7601 .await;
7602
7603 let project = Project::test(
7604 fs,
7605 [
7606 "/dir/a.rs".as_ref(),
7607 "/dir/b.rs".as_ref(),
7608 "/dir/c.rs".as_ref(),
7609 ],
7610 cx,
7611 )
7612 .await;
7613
7614 // check the initial state and get the worktrees
7615 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7616 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7617 assert_eq!(worktrees.len(), 3);
7618
7619 let worktree_a = worktrees[0].read(cx);
7620 let worktree_b = worktrees[1].read(cx);
7621 let worktree_c = worktrees[2].read(cx);
7622
7623 // check they start in the right order
7624 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7625 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7626 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7627
7628 (
7629 worktrees[0].clone(),
7630 worktrees[1].clone(),
7631 worktrees[2].clone(),
7632 )
7633 });
7634
7635 // move first worktree to after the second
7636 // [a, b, c] -> [b, a, c]
7637 project
7638 .update(cx, |project, cx| {
7639 let first = worktree_a.read(cx);
7640 let second = worktree_b.read(cx);
7641 project.move_worktree(first.id(), second.id(), cx)
7642 })
7643 .expect("moving first after second");
7644
7645 // check the state after moving
7646 project.update(cx, |project, cx| {
7647 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7648 assert_eq!(worktrees.len(), 3);
7649
7650 let first = worktrees[0].read(cx);
7651 let second = worktrees[1].read(cx);
7652 let third = worktrees[2].read(cx);
7653
7654 // check they are now in the right order
7655 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7656 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7657 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7658 });
7659
7660 // move the second worktree to before the first
7661 // [b, a, c] -> [a, b, c]
7662 project
7663 .update(cx, |project, cx| {
7664 let second = worktree_a.read(cx);
7665 let first = worktree_b.read(cx);
7666 project.move_worktree(first.id(), second.id(), cx)
7667 })
7668 .expect("moving second before first");
7669
7670 // check the state after moving
7671 project.update(cx, |project, cx| {
7672 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7673 assert_eq!(worktrees.len(), 3);
7674
7675 let first = worktrees[0].read(cx);
7676 let second = worktrees[1].read(cx);
7677 let third = worktrees[2].read(cx);
7678
7679 // check they are now in the right order
7680 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7681 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7682 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7683 });
7684
7685 // move the second worktree to after the third
7686 // [a, b, c] -> [a, c, b]
7687 project
7688 .update(cx, |project, cx| {
7689 let second = worktree_b.read(cx);
7690 let third = worktree_c.read(cx);
7691 project.move_worktree(second.id(), third.id(), cx)
7692 })
7693 .expect("moving second after third");
7694
7695 // check the state after moving
7696 project.update(cx, |project, cx| {
7697 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7698 assert_eq!(worktrees.len(), 3);
7699
7700 let first = worktrees[0].read(cx);
7701 let second = worktrees[1].read(cx);
7702 let third = worktrees[2].read(cx);
7703
7704 // check they are now in the right order
7705 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7706 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7707 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7708 });
7709
7710 // move the third worktree to before the second
7711 // [a, c, b] -> [a, b, c]
7712 project
7713 .update(cx, |project, cx| {
7714 let third = worktree_c.read(cx);
7715 let second = worktree_b.read(cx);
7716 project.move_worktree(third.id(), second.id(), cx)
7717 })
7718 .expect("moving third before second");
7719
7720 // check the state after moving
7721 project.update(cx, |project, cx| {
7722 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7723 assert_eq!(worktrees.len(), 3);
7724
7725 let first = worktrees[0].read(cx);
7726 let second = worktrees[1].read(cx);
7727 let third = worktrees[2].read(cx);
7728
7729 // check they are now in the right order
7730 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7731 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7732 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7733 });
7734
7735 // move the first worktree to after the third
7736 // [a, b, c] -> [b, c, a]
7737 project
7738 .update(cx, |project, cx| {
7739 let first = worktree_a.read(cx);
7740 let third = worktree_c.read(cx);
7741 project.move_worktree(first.id(), third.id(), cx)
7742 })
7743 .expect("moving first after third");
7744
7745 // check the state after moving
7746 project.update(cx, |project, cx| {
7747 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7748 assert_eq!(worktrees.len(), 3);
7749
7750 let first = worktrees[0].read(cx);
7751 let second = worktrees[1].read(cx);
7752 let third = worktrees[2].read(cx);
7753
7754 // check they are now in the right order
7755 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7756 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7757 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7758 });
7759
7760 // move the third worktree to before the first
7761 // [b, c, a] -> [a, b, c]
7762 project
7763 .update(cx, |project, cx| {
7764 let third = worktree_a.read(cx);
7765 let first = worktree_b.read(cx);
7766 project.move_worktree(third.id(), first.id(), cx)
7767 })
7768 .expect("moving third before first");
7769
7770 // check the state after moving
7771 project.update(cx, |project, cx| {
7772 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7773 assert_eq!(worktrees.len(), 3);
7774
7775 let first = worktrees[0].read(cx);
7776 let second = worktrees[1].read(cx);
7777 let third = worktrees[2].read(cx);
7778
7779 // check they are now in the right order
7780 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7781 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7782 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7783 });
7784}
7785
7786#[gpui::test]
7787async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7788 init_test(cx);
7789
7790 let staged_contents = r#"
7791 fn main() {
7792 println!("hello world");
7793 }
7794 "#
7795 .unindent();
7796 let file_contents = r#"
7797 // print goodbye
7798 fn main() {
7799 println!("goodbye world");
7800 }
7801 "#
7802 .unindent();
7803
7804 let fs = FakeFs::new(cx.background_executor.clone());
7805 fs.insert_tree(
7806 "/dir",
7807 json!({
7808 ".git": {},
7809 "src": {
7810 "main.rs": file_contents,
7811 }
7812 }),
7813 )
7814 .await;
7815
7816 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7817
7818 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7819
7820 let buffer = project
7821 .update(cx, |project, cx| {
7822 project.open_local_buffer("/dir/src/main.rs", cx)
7823 })
7824 .await
7825 .unwrap();
7826 let unstaged_diff = project
7827 .update(cx, |project, cx| {
7828 project.open_unstaged_diff(buffer.clone(), cx)
7829 })
7830 .await
7831 .unwrap();
7832
7833 cx.run_until_parked();
7834 unstaged_diff.update(cx, |unstaged_diff, cx| {
7835 let snapshot = buffer.read(cx).snapshot();
7836 assert_hunks(
7837 unstaged_diff.snapshot(cx).hunks(&snapshot),
7838 &snapshot,
7839 &unstaged_diff.base_text_string(cx).unwrap(),
7840 &[
7841 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7842 (
7843 2..3,
7844 " println!(\"hello world\");\n",
7845 " println!(\"goodbye world\");\n",
7846 DiffHunkStatus::modified_none(),
7847 ),
7848 ],
7849 );
7850 });
7851
7852 let staged_contents = r#"
7853 // print goodbye
7854 fn main() {
7855 }
7856 "#
7857 .unindent();
7858
7859 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7860
7861 cx.run_until_parked();
7862 unstaged_diff.update(cx, |unstaged_diff, cx| {
7863 let snapshot = buffer.read(cx).snapshot();
7864 assert_hunks(
7865 unstaged_diff
7866 .snapshot(cx)
7867 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7868 &snapshot,
7869 &unstaged_diff.base_text(cx).text(),
7870 &[(
7871 2..3,
7872 "",
7873 " println!(\"goodbye world\");\n",
7874 DiffHunkStatus::added_none(),
7875 )],
7876 );
7877 });
7878}
7879
7880#[gpui::test]
7881async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7882 init_test(cx);
7883
7884 let committed_contents = r#"
7885 fn main() {
7886 println!("hello world");
7887 }
7888 "#
7889 .unindent();
7890 let staged_contents = r#"
7891 fn main() {
7892 println!("goodbye world");
7893 }
7894 "#
7895 .unindent();
7896 let file_contents = r#"
7897 // print goodbye
7898 fn main() {
7899 println!("goodbye world");
7900 }
7901 "#
7902 .unindent();
7903
7904 let fs = FakeFs::new(cx.background_executor.clone());
7905 fs.insert_tree(
7906 "/dir",
7907 json!({
7908 ".git": {},
7909 "src": {
7910 "modification.rs": file_contents,
7911 }
7912 }),
7913 )
7914 .await;
7915
7916 fs.set_head_for_repo(
7917 Path::new("/dir/.git"),
7918 &[
7919 ("src/modification.rs", committed_contents),
7920 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7921 ],
7922 "deadbeef",
7923 );
7924 fs.set_index_for_repo(
7925 Path::new("/dir/.git"),
7926 &[
7927 ("src/modification.rs", staged_contents),
7928 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7929 ],
7930 );
7931
7932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7933 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7934 let language = rust_lang();
7935 language_registry.add(language.clone());
7936
7937 let buffer_1 = project
7938 .update(cx, |project, cx| {
7939 project.open_local_buffer("/dir/src/modification.rs", cx)
7940 })
7941 .await
7942 .unwrap();
7943 let diff_1 = project
7944 .update(cx, |project, cx| {
7945 project.open_uncommitted_diff(buffer_1.clone(), cx)
7946 })
7947 .await
7948 .unwrap();
7949 diff_1.read_with(cx, |diff, cx| {
7950 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7951 });
7952 cx.run_until_parked();
7953 diff_1.update(cx, |diff, cx| {
7954 let snapshot = buffer_1.read(cx).snapshot();
7955 assert_hunks(
7956 diff.snapshot(cx)
7957 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7958 &snapshot,
7959 &diff.base_text_string(cx).unwrap(),
7960 &[
7961 (
7962 0..1,
7963 "",
7964 "// print goodbye\n",
7965 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7966 ),
7967 (
7968 2..3,
7969 " println!(\"hello world\");\n",
7970 " println!(\"goodbye world\");\n",
7971 DiffHunkStatus::modified_none(),
7972 ),
7973 ],
7974 );
7975 });
7976
7977 // Reset HEAD to a version that differs from both the buffer and the index.
7978 let committed_contents = r#"
7979 // print goodbye
7980 fn main() {
7981 }
7982 "#
7983 .unindent();
7984 fs.set_head_for_repo(
7985 Path::new("/dir/.git"),
7986 &[
7987 ("src/modification.rs", committed_contents.clone()),
7988 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7989 ],
7990 "deadbeef",
7991 );
7992
7993 // Buffer now has an unstaged hunk.
7994 cx.run_until_parked();
7995 diff_1.update(cx, |diff, cx| {
7996 let snapshot = buffer_1.read(cx).snapshot();
7997 assert_hunks(
7998 diff.snapshot(cx)
7999 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8000 &snapshot,
8001 &diff.base_text(cx).text(),
8002 &[(
8003 2..3,
8004 "",
8005 " println!(\"goodbye world\");\n",
8006 DiffHunkStatus::added_none(),
8007 )],
8008 );
8009 });
8010
8011 // Open a buffer for a file that's been deleted.
8012 let buffer_2 = project
8013 .update(cx, |project, cx| {
8014 project.open_local_buffer("/dir/src/deletion.rs", cx)
8015 })
8016 .await
8017 .unwrap();
8018 let diff_2 = project
8019 .update(cx, |project, cx| {
8020 project.open_uncommitted_diff(buffer_2.clone(), cx)
8021 })
8022 .await
8023 .unwrap();
8024 cx.run_until_parked();
8025 diff_2.update(cx, |diff, cx| {
8026 let snapshot = buffer_2.read(cx).snapshot();
8027 assert_hunks(
8028 diff.snapshot(cx)
8029 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8030 &snapshot,
8031 &diff.base_text_string(cx).unwrap(),
8032 &[(
8033 0..0,
8034 "// the-deleted-contents\n",
8035 "",
8036 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8037 )],
8038 );
8039 });
8040
8041 // Stage the deletion of this file
8042 fs.set_index_for_repo(
8043 Path::new("/dir/.git"),
8044 &[("src/modification.rs", committed_contents.clone())],
8045 );
8046 cx.run_until_parked();
8047 diff_2.update(cx, |diff, cx| {
8048 let snapshot = buffer_2.read(cx).snapshot();
8049 assert_hunks(
8050 diff.snapshot(cx)
8051 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8052 &snapshot,
8053 &diff.base_text_string(cx).unwrap(),
8054 &[(
8055 0..0,
8056 "// the-deleted-contents\n",
8057 "",
8058 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8059 )],
8060 );
8061 });
8062}
8063
8064#[gpui::test]
8065async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8066 use DiffHunkSecondaryStatus::*;
8067 init_test(cx);
8068
8069 let committed_contents = r#"
8070 zero
8071 one
8072 two
8073 three
8074 four
8075 five
8076 "#
8077 .unindent();
8078 let file_contents = r#"
8079 one
8080 TWO
8081 three
8082 FOUR
8083 five
8084 "#
8085 .unindent();
8086
8087 let fs = FakeFs::new(cx.background_executor.clone());
8088 fs.insert_tree(
8089 "/dir",
8090 json!({
8091 ".git": {},
8092 "file.txt": file_contents.clone()
8093 }),
8094 )
8095 .await;
8096
8097 fs.set_head_and_index_for_repo(
8098 path!("/dir/.git").as_ref(),
8099 &[("file.txt", committed_contents.clone())],
8100 );
8101
8102 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8103
8104 let buffer = project
8105 .update(cx, |project, cx| {
8106 project.open_local_buffer("/dir/file.txt", cx)
8107 })
8108 .await
8109 .unwrap();
8110 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8111 let uncommitted_diff = project
8112 .update(cx, |project, cx| {
8113 project.open_uncommitted_diff(buffer.clone(), cx)
8114 })
8115 .await
8116 .unwrap();
8117 let mut diff_events = cx.events(&uncommitted_diff);
8118
8119 // The hunks are initially unstaged.
8120 uncommitted_diff.read_with(cx, |diff, cx| {
8121 assert_hunks(
8122 diff.snapshot(cx).hunks(&snapshot),
8123 &snapshot,
8124 &diff.base_text_string(cx).unwrap(),
8125 &[
8126 (
8127 0..0,
8128 "zero\n",
8129 "",
8130 DiffHunkStatus::deleted(HasSecondaryHunk),
8131 ),
8132 (
8133 1..2,
8134 "two\n",
8135 "TWO\n",
8136 DiffHunkStatus::modified(HasSecondaryHunk),
8137 ),
8138 (
8139 3..4,
8140 "four\n",
8141 "FOUR\n",
8142 DiffHunkStatus::modified(HasSecondaryHunk),
8143 ),
8144 ],
8145 );
8146 });
8147
8148 // Stage a hunk. It appears as optimistically staged.
8149 uncommitted_diff.update(cx, |diff, cx| {
8150 let range =
8151 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8152 let hunks = diff
8153 .snapshot(cx)
8154 .hunks_intersecting_range(range, &snapshot)
8155 .collect::<Vec<_>>();
8156 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8157
8158 assert_hunks(
8159 diff.snapshot(cx).hunks(&snapshot),
8160 &snapshot,
8161 &diff.base_text_string(cx).unwrap(),
8162 &[
8163 (
8164 0..0,
8165 "zero\n",
8166 "",
8167 DiffHunkStatus::deleted(HasSecondaryHunk),
8168 ),
8169 (
8170 1..2,
8171 "two\n",
8172 "TWO\n",
8173 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8174 ),
8175 (
8176 3..4,
8177 "four\n",
8178 "FOUR\n",
8179 DiffHunkStatus::modified(HasSecondaryHunk),
8180 ),
8181 ],
8182 );
8183 });
8184
8185 // The diff emits a change event for the range of the staged hunk.
8186 assert!(matches!(
8187 diff_events.next().await.unwrap(),
8188 BufferDiffEvent::HunksStagedOrUnstaged(_)
8189 ));
8190 let event = diff_events.next().await.unwrap();
8191 if let BufferDiffEvent::DiffChanged(DiffChanged {
8192 changed_range: Some(changed_range),
8193 base_text_changed_range: _,
8194 extended_range: _,
8195 }) = event
8196 {
8197 let changed_range = changed_range.to_point(&snapshot);
8198 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8199 } else {
8200 panic!("Unexpected event {event:?}");
8201 }
8202
8203 // When the write to the index completes, it appears as staged.
8204 cx.run_until_parked();
8205 uncommitted_diff.update(cx, |diff, cx| {
8206 assert_hunks(
8207 diff.snapshot(cx).hunks(&snapshot),
8208 &snapshot,
8209 &diff.base_text_string(cx).unwrap(),
8210 &[
8211 (
8212 0..0,
8213 "zero\n",
8214 "",
8215 DiffHunkStatus::deleted(HasSecondaryHunk),
8216 ),
8217 (
8218 1..2,
8219 "two\n",
8220 "TWO\n",
8221 DiffHunkStatus::modified(NoSecondaryHunk),
8222 ),
8223 (
8224 3..4,
8225 "four\n",
8226 "FOUR\n",
8227 DiffHunkStatus::modified(HasSecondaryHunk),
8228 ),
8229 ],
8230 );
8231 });
8232
8233 // The diff emits a change event for the changed index text.
8234 let event = diff_events.next().await.unwrap();
8235 if let BufferDiffEvent::DiffChanged(DiffChanged {
8236 changed_range: Some(changed_range),
8237 base_text_changed_range: _,
8238 extended_range: _,
8239 }) = event
8240 {
8241 let changed_range = changed_range.to_point(&snapshot);
8242 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8243 } else {
8244 panic!("Unexpected event {event:?}");
8245 }
8246
8247 // Simulate a problem writing to the git index.
8248 fs.set_error_message_for_index_write(
8249 "/dir/.git".as_ref(),
8250 Some("failed to write git index".into()),
8251 );
8252
8253 // Stage another hunk.
8254 uncommitted_diff.update(cx, |diff, cx| {
8255 let range =
8256 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8257 let hunks = diff
8258 .snapshot(cx)
8259 .hunks_intersecting_range(range, &snapshot)
8260 .collect::<Vec<_>>();
8261 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8262
8263 assert_hunks(
8264 diff.snapshot(cx).hunks(&snapshot),
8265 &snapshot,
8266 &diff.base_text_string(cx).unwrap(),
8267 &[
8268 (
8269 0..0,
8270 "zero\n",
8271 "",
8272 DiffHunkStatus::deleted(HasSecondaryHunk),
8273 ),
8274 (
8275 1..2,
8276 "two\n",
8277 "TWO\n",
8278 DiffHunkStatus::modified(NoSecondaryHunk),
8279 ),
8280 (
8281 3..4,
8282 "four\n",
8283 "FOUR\n",
8284 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8285 ),
8286 ],
8287 );
8288 });
8289 assert!(matches!(
8290 diff_events.next().await.unwrap(),
8291 BufferDiffEvent::HunksStagedOrUnstaged(_)
8292 ));
8293 let event = diff_events.next().await.unwrap();
8294 if let BufferDiffEvent::DiffChanged(DiffChanged {
8295 changed_range: Some(changed_range),
8296 base_text_changed_range: _,
8297 extended_range: _,
8298 }) = event
8299 {
8300 let changed_range = changed_range.to_point(&snapshot);
8301 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8302 } else {
8303 panic!("Unexpected event {event:?}");
8304 }
8305
8306 // When the write fails, the hunk returns to being unstaged.
8307 cx.run_until_parked();
8308 uncommitted_diff.update(cx, |diff, cx| {
8309 assert_hunks(
8310 diff.snapshot(cx).hunks(&snapshot),
8311 &snapshot,
8312 &diff.base_text_string(cx).unwrap(),
8313 &[
8314 (
8315 0..0,
8316 "zero\n",
8317 "",
8318 DiffHunkStatus::deleted(HasSecondaryHunk),
8319 ),
8320 (
8321 1..2,
8322 "two\n",
8323 "TWO\n",
8324 DiffHunkStatus::modified(NoSecondaryHunk),
8325 ),
8326 (
8327 3..4,
8328 "four\n",
8329 "FOUR\n",
8330 DiffHunkStatus::modified(HasSecondaryHunk),
8331 ),
8332 ],
8333 );
8334 });
8335
8336 let event = diff_events.next().await.unwrap();
8337 if let BufferDiffEvent::DiffChanged(DiffChanged {
8338 changed_range: Some(changed_range),
8339 base_text_changed_range: _,
8340 extended_range: _,
8341 }) = event
8342 {
8343 let changed_range = changed_range.to_point(&snapshot);
8344 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8345 } else {
8346 panic!("Unexpected event {event:?}");
8347 }
8348
8349 // Allow writing to the git index to succeed again.
8350 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8351
8352 // Stage two hunks with separate operations.
8353 uncommitted_diff.update(cx, |diff, cx| {
8354 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8355 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8356 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8357 });
8358
8359 // Both staged hunks appear as pending.
8360 uncommitted_diff.update(cx, |diff, cx| {
8361 assert_hunks(
8362 diff.snapshot(cx).hunks(&snapshot),
8363 &snapshot,
8364 &diff.base_text_string(cx).unwrap(),
8365 &[
8366 (
8367 0..0,
8368 "zero\n",
8369 "",
8370 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8371 ),
8372 (
8373 1..2,
8374 "two\n",
8375 "TWO\n",
8376 DiffHunkStatus::modified(NoSecondaryHunk),
8377 ),
8378 (
8379 3..4,
8380 "four\n",
8381 "FOUR\n",
8382 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8383 ),
8384 ],
8385 );
8386 });
8387
8388 // Both staging operations take effect.
8389 cx.run_until_parked();
8390 uncommitted_diff.update(cx, |diff, cx| {
8391 assert_hunks(
8392 diff.snapshot(cx).hunks(&snapshot),
8393 &snapshot,
8394 &diff.base_text_string(cx).unwrap(),
8395 &[
8396 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8397 (
8398 1..2,
8399 "two\n",
8400 "TWO\n",
8401 DiffHunkStatus::modified(NoSecondaryHunk),
8402 ),
8403 (
8404 3..4,
8405 "four\n",
8406 "FOUR\n",
8407 DiffHunkStatus::modified(NoSecondaryHunk),
8408 ),
8409 ],
8410 );
8411 });
8412}
8413
8414#[gpui::test(seeds(340, 472))]
8415async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8416 use DiffHunkSecondaryStatus::*;
8417 init_test(cx);
8418
8419 let committed_contents = r#"
8420 zero
8421 one
8422 two
8423 three
8424 four
8425 five
8426 "#
8427 .unindent();
8428 let file_contents = r#"
8429 one
8430 TWO
8431 three
8432 FOUR
8433 five
8434 "#
8435 .unindent();
8436
8437 let fs = FakeFs::new(cx.background_executor.clone());
8438 fs.insert_tree(
8439 "/dir",
8440 json!({
8441 ".git": {},
8442 "file.txt": file_contents.clone()
8443 }),
8444 )
8445 .await;
8446
8447 fs.set_head_for_repo(
8448 "/dir/.git".as_ref(),
8449 &[("file.txt", committed_contents.clone())],
8450 "deadbeef",
8451 );
8452 fs.set_index_for_repo(
8453 "/dir/.git".as_ref(),
8454 &[("file.txt", committed_contents.clone())],
8455 );
8456
8457 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8458
8459 let buffer = project
8460 .update(cx, |project, cx| {
8461 project.open_local_buffer("/dir/file.txt", cx)
8462 })
8463 .await
8464 .unwrap();
8465 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8466 let uncommitted_diff = project
8467 .update(cx, |project, cx| {
8468 project.open_uncommitted_diff(buffer.clone(), cx)
8469 })
8470 .await
8471 .unwrap();
8472
8473 // The hunks are initially unstaged.
8474 uncommitted_diff.read_with(cx, |diff, cx| {
8475 assert_hunks(
8476 diff.snapshot(cx).hunks(&snapshot),
8477 &snapshot,
8478 &diff.base_text_string(cx).unwrap(),
8479 &[
8480 (
8481 0..0,
8482 "zero\n",
8483 "",
8484 DiffHunkStatus::deleted(HasSecondaryHunk),
8485 ),
8486 (
8487 1..2,
8488 "two\n",
8489 "TWO\n",
8490 DiffHunkStatus::modified(HasSecondaryHunk),
8491 ),
8492 (
8493 3..4,
8494 "four\n",
8495 "FOUR\n",
8496 DiffHunkStatus::modified(HasSecondaryHunk),
8497 ),
8498 ],
8499 );
8500 });
8501
8502 // Pause IO events
8503 fs.pause_events();
8504
8505 // Stage the first hunk.
8506 uncommitted_diff.update(cx, |diff, cx| {
8507 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8508 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8509 assert_hunks(
8510 diff.snapshot(cx).hunks(&snapshot),
8511 &snapshot,
8512 &diff.base_text_string(cx).unwrap(),
8513 &[
8514 (
8515 0..0,
8516 "zero\n",
8517 "",
8518 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8519 ),
8520 (
8521 1..2,
8522 "two\n",
8523 "TWO\n",
8524 DiffHunkStatus::modified(HasSecondaryHunk),
8525 ),
8526 (
8527 3..4,
8528 "four\n",
8529 "FOUR\n",
8530 DiffHunkStatus::modified(HasSecondaryHunk),
8531 ),
8532 ],
8533 );
8534 });
8535
8536 // Stage the second hunk *before* receiving the FS event for the first hunk.
8537 cx.run_until_parked();
8538 uncommitted_diff.update(cx, |diff, cx| {
8539 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8540 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8541 assert_hunks(
8542 diff.snapshot(cx).hunks(&snapshot),
8543 &snapshot,
8544 &diff.base_text_string(cx).unwrap(),
8545 &[
8546 (
8547 0..0,
8548 "zero\n",
8549 "",
8550 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8551 ),
8552 (
8553 1..2,
8554 "two\n",
8555 "TWO\n",
8556 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8557 ),
8558 (
8559 3..4,
8560 "four\n",
8561 "FOUR\n",
8562 DiffHunkStatus::modified(HasSecondaryHunk),
8563 ),
8564 ],
8565 );
8566 });
8567
8568 // Process the FS event for staging the first hunk (second event is still pending).
8569 fs.flush_events(1);
8570 cx.run_until_parked();
8571
8572 // Stage the third hunk before receiving the second FS event.
8573 uncommitted_diff.update(cx, |diff, cx| {
8574 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8575 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8576 });
8577
8578 // Wait for all remaining IO.
8579 cx.run_until_parked();
8580 fs.flush_events(fs.buffered_event_count());
8581
8582 // Now all hunks are staged.
8583 cx.run_until_parked();
8584 uncommitted_diff.update(cx, |diff, cx| {
8585 assert_hunks(
8586 diff.snapshot(cx).hunks(&snapshot),
8587 &snapshot,
8588 &diff.base_text_string(cx).unwrap(),
8589 &[
8590 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8591 (
8592 1..2,
8593 "two\n",
8594 "TWO\n",
8595 DiffHunkStatus::modified(NoSecondaryHunk),
8596 ),
8597 (
8598 3..4,
8599 "four\n",
8600 "FOUR\n",
8601 DiffHunkStatus::modified(NoSecondaryHunk),
8602 ),
8603 ],
8604 );
8605 });
8606}
8607
8608#[gpui::test(iterations = 25)]
8609async fn test_staging_random_hunks(
8610 mut rng: StdRng,
8611 _executor: BackgroundExecutor,
8612 cx: &mut gpui::TestAppContext,
8613) {
8614 let operations = env::var("OPERATIONS")
8615 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8616 .unwrap_or(20);
8617
8618 use DiffHunkSecondaryStatus::*;
8619 init_test(cx);
8620
8621 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8622 let index_text = committed_text.clone();
8623 let buffer_text = (0..30)
8624 .map(|i| match i % 5 {
8625 0 => format!("line {i} (modified)\n"),
8626 _ => format!("line {i}\n"),
8627 })
8628 .collect::<String>();
8629
8630 let fs = FakeFs::new(cx.background_executor.clone());
8631 fs.insert_tree(
8632 path!("/dir"),
8633 json!({
8634 ".git": {},
8635 "file.txt": buffer_text.clone()
8636 }),
8637 )
8638 .await;
8639 fs.set_head_for_repo(
8640 path!("/dir/.git").as_ref(),
8641 &[("file.txt", committed_text.clone())],
8642 "deadbeef",
8643 );
8644 fs.set_index_for_repo(
8645 path!("/dir/.git").as_ref(),
8646 &[("file.txt", index_text.clone())],
8647 );
8648 let repo = fs
8649 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8650 .unwrap();
8651
8652 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8653 let buffer = project
8654 .update(cx, |project, cx| {
8655 project.open_local_buffer(path!("/dir/file.txt"), cx)
8656 })
8657 .await
8658 .unwrap();
8659 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8660 let uncommitted_diff = project
8661 .update(cx, |project, cx| {
8662 project.open_uncommitted_diff(buffer.clone(), cx)
8663 })
8664 .await
8665 .unwrap();
8666
8667 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8668 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8669 });
8670 assert_eq!(hunks.len(), 6);
8671
8672 for _i in 0..operations {
8673 let hunk_ix = rng.random_range(0..hunks.len());
8674 let hunk = &mut hunks[hunk_ix];
8675 let row = hunk.range.start.row;
8676
8677 if hunk.status().has_secondary_hunk() {
8678 log::info!("staging hunk at {row}");
8679 uncommitted_diff.update(cx, |diff, cx| {
8680 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8681 });
8682 hunk.secondary_status = SecondaryHunkRemovalPending;
8683 } else {
8684 log::info!("unstaging hunk at {row}");
8685 uncommitted_diff.update(cx, |diff, cx| {
8686 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8687 });
8688 hunk.secondary_status = SecondaryHunkAdditionPending;
8689 }
8690
8691 for _ in 0..rng.random_range(0..10) {
8692 log::info!("yielding");
8693 cx.executor().simulate_random_delay().await;
8694 }
8695 }
8696
8697 cx.executor().run_until_parked();
8698
8699 for hunk in &mut hunks {
8700 if hunk.secondary_status == SecondaryHunkRemovalPending {
8701 hunk.secondary_status = NoSecondaryHunk;
8702 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8703 hunk.secondary_status = HasSecondaryHunk;
8704 }
8705 }
8706
8707 log::info!(
8708 "index text:\n{}",
8709 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8710 .await
8711 .unwrap()
8712 );
8713
8714 uncommitted_diff.update(cx, |diff, cx| {
8715 let expected_hunks = hunks
8716 .iter()
8717 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8718 .collect::<Vec<_>>();
8719 let actual_hunks = diff
8720 .snapshot(cx)
8721 .hunks(&snapshot)
8722 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8723 .collect::<Vec<_>>();
8724 assert_eq!(actual_hunks, expected_hunks);
8725 });
8726}
8727
8728#[gpui::test]
8729async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8730 init_test(cx);
8731
8732 let committed_contents = r#"
8733 fn main() {
8734 println!("hello from HEAD");
8735 }
8736 "#
8737 .unindent();
8738 let file_contents = r#"
8739 fn main() {
8740 println!("hello from the working copy");
8741 }
8742 "#
8743 .unindent();
8744
8745 let fs = FakeFs::new(cx.background_executor.clone());
8746 fs.insert_tree(
8747 "/dir",
8748 json!({
8749 ".git": {},
8750 "src": {
8751 "main.rs": file_contents,
8752 }
8753 }),
8754 )
8755 .await;
8756
8757 fs.set_head_for_repo(
8758 Path::new("/dir/.git"),
8759 &[("src/main.rs", committed_contents.clone())],
8760 "deadbeef",
8761 );
8762 fs.set_index_for_repo(
8763 Path::new("/dir/.git"),
8764 &[("src/main.rs", committed_contents.clone())],
8765 );
8766
8767 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8768
8769 let buffer = project
8770 .update(cx, |project, cx| {
8771 project.open_local_buffer("/dir/src/main.rs", cx)
8772 })
8773 .await
8774 .unwrap();
8775 let uncommitted_diff = project
8776 .update(cx, |project, cx| {
8777 project.open_uncommitted_diff(buffer.clone(), cx)
8778 })
8779 .await
8780 .unwrap();
8781
8782 cx.run_until_parked();
8783 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8784 let snapshot = buffer.read(cx).snapshot();
8785 assert_hunks(
8786 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8787 &snapshot,
8788 &uncommitted_diff.base_text_string(cx).unwrap(),
8789 &[(
8790 1..2,
8791 " println!(\"hello from HEAD\");\n",
8792 " println!(\"hello from the working copy\");\n",
8793 DiffHunkStatus {
8794 kind: DiffHunkStatusKind::Modified,
8795 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8796 },
8797 )],
8798 );
8799 });
8800}
8801
8802// TODO: Should we test this on Windows also?
8803#[gpui::test]
8804#[cfg(not(windows))]
8805async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8806 use std::os::unix::fs::PermissionsExt;
8807 init_test(cx);
8808 cx.executor().allow_parking();
8809 let committed_contents = "bar\n";
8810 let file_contents = "baz\n";
8811 let root = TempTree::new(json!({
8812 "project": {
8813 "foo": committed_contents
8814 },
8815 }));
8816
8817 let work_dir = root.path().join("project");
8818 let file_path = work_dir.join("foo");
8819 let repo = git_init(work_dir.as_path());
8820 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8821 perms.set_mode(0o755);
8822 std::fs::set_permissions(&file_path, perms).unwrap();
8823 git_add("foo", &repo);
8824 git_commit("Initial commit", &repo);
8825 std::fs::write(&file_path, file_contents).unwrap();
8826
8827 let project = Project::test(
8828 Arc::new(RealFs::new(None, cx.executor())),
8829 [root.path()],
8830 cx,
8831 )
8832 .await;
8833
8834 let buffer = project
8835 .update(cx, |project, cx| {
8836 project.open_local_buffer(file_path.as_path(), cx)
8837 })
8838 .await
8839 .unwrap();
8840
8841 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8842
8843 let uncommitted_diff = project
8844 .update(cx, |project, cx| {
8845 project.open_uncommitted_diff(buffer.clone(), cx)
8846 })
8847 .await
8848 .unwrap();
8849
8850 uncommitted_diff.update(cx, |diff, cx| {
8851 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8852 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8853 });
8854
8855 cx.run_until_parked();
8856
8857 let output = smol::process::Command::new("git")
8858 .current_dir(&work_dir)
8859 .args(["diff", "--staged"])
8860 .output()
8861 .await
8862 .unwrap();
8863
8864 let staged_diff = String::from_utf8_lossy(&output.stdout);
8865
8866 assert!(
8867 !staged_diff.contains("new mode 100644"),
8868 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8869 staged_diff
8870 );
8871
8872 let output = smol::process::Command::new("git")
8873 .current_dir(&work_dir)
8874 .args(["ls-files", "-s"])
8875 .output()
8876 .await
8877 .unwrap();
8878 let index_contents = String::from_utf8_lossy(&output.stdout);
8879
8880 assert!(
8881 index_contents.contains("100755"),
8882 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8883 index_contents
8884 );
8885}
8886
8887#[gpui::test]
8888async fn test_repository_and_path_for_project_path(
8889 background_executor: BackgroundExecutor,
8890 cx: &mut gpui::TestAppContext,
8891) {
8892 init_test(cx);
8893 let fs = FakeFs::new(background_executor);
8894 fs.insert_tree(
8895 path!("/root"),
8896 json!({
8897 "c.txt": "",
8898 "dir1": {
8899 ".git": {},
8900 "deps": {
8901 "dep1": {
8902 ".git": {},
8903 "src": {
8904 "a.txt": ""
8905 }
8906 }
8907 },
8908 "src": {
8909 "b.txt": ""
8910 }
8911 },
8912 }),
8913 )
8914 .await;
8915
8916 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8917 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8918 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8919 project
8920 .update(cx, |project, cx| project.git_scans_complete(cx))
8921 .await;
8922 cx.run_until_parked();
8923
8924 project.read_with(cx, |project, cx| {
8925 let git_store = project.git_store().read(cx);
8926 let pairs = [
8927 ("c.txt", None),
8928 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8929 (
8930 "dir1/deps/dep1/src/a.txt",
8931 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8932 ),
8933 ];
8934 let expected = pairs
8935 .iter()
8936 .map(|(path, result)| {
8937 (
8938 path,
8939 result.map(|(repo, repo_path)| {
8940 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8941 }),
8942 )
8943 })
8944 .collect::<Vec<_>>();
8945 let actual = pairs
8946 .iter()
8947 .map(|(path, _)| {
8948 let project_path = (tree_id, rel_path(path)).into();
8949 let result = maybe!({
8950 let (repo, repo_path) =
8951 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8952 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8953 });
8954 (path, result)
8955 })
8956 .collect::<Vec<_>>();
8957 pretty_assertions::assert_eq!(expected, actual);
8958 });
8959
8960 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8961 .await
8962 .unwrap();
8963 cx.run_until_parked();
8964
8965 project.read_with(cx, |project, cx| {
8966 let git_store = project.git_store().read(cx);
8967 assert_eq!(
8968 git_store.repository_and_path_for_project_path(
8969 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8970 cx
8971 ),
8972 None
8973 );
8974 });
8975}
8976
8977#[gpui::test]
8978async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8979 init_test(cx);
8980 let fs = FakeFs::new(cx.background_executor.clone());
8981 let home = paths::home_dir();
8982 fs.insert_tree(
8983 home,
8984 json!({
8985 ".git": {},
8986 "project": {
8987 "a.txt": "A"
8988 },
8989 }),
8990 )
8991 .await;
8992
8993 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8994 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8995 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8996
8997 project
8998 .update(cx, |project, cx| project.git_scans_complete(cx))
8999 .await;
9000 tree.flush_fs_events(cx).await;
9001
9002 project.read_with(cx, |project, cx| {
9003 let containing = project
9004 .git_store()
9005 .read(cx)
9006 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9007 assert!(containing.is_none());
9008 });
9009
9010 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9011 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9012 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9013 project
9014 .update(cx, |project, cx| project.git_scans_complete(cx))
9015 .await;
9016 tree.flush_fs_events(cx).await;
9017
9018 project.read_with(cx, |project, cx| {
9019 let containing = project
9020 .git_store()
9021 .read(cx)
9022 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9023 assert_eq!(
9024 containing
9025 .unwrap()
9026 .0
9027 .read(cx)
9028 .work_directory_abs_path
9029 .as_ref(),
9030 home,
9031 );
9032 });
9033}
9034
9035#[gpui::test]
9036async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9037 init_test(cx);
9038 cx.executor().allow_parking();
9039
9040 let root = TempTree::new(json!({
9041 "project": {
9042 "a.txt": "a", // Modified
9043 "b.txt": "bb", // Added
9044 "c.txt": "ccc", // Unchanged
9045 "d.txt": "dddd", // Deleted
9046 },
9047 }));
9048
9049 // Set up git repository before creating the project.
9050 let work_dir = root.path().join("project");
9051 let repo = git_init(work_dir.as_path());
9052 git_add("a.txt", &repo);
9053 git_add("c.txt", &repo);
9054 git_add("d.txt", &repo);
9055 git_commit("Initial commit", &repo);
9056 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9057 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9058
9059 let project = Project::test(
9060 Arc::new(RealFs::new(None, cx.executor())),
9061 [root.path()],
9062 cx,
9063 )
9064 .await;
9065
9066 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9067 tree.flush_fs_events(cx).await;
9068 project
9069 .update(cx, |project, cx| project.git_scans_complete(cx))
9070 .await;
9071 cx.executor().run_until_parked();
9072
9073 let repository = project.read_with(cx, |project, cx| {
9074 project.repositories(cx).values().next().unwrap().clone()
9075 });
9076
9077 // Check that the right git state is observed on startup
9078 repository.read_with(cx, |repository, _| {
9079 let entries = repository.cached_status().collect::<Vec<_>>();
9080 assert_eq!(
9081 entries,
9082 [
9083 StatusEntry {
9084 repo_path: repo_path("a.txt"),
9085 status: StatusCode::Modified.worktree(),
9086 },
9087 StatusEntry {
9088 repo_path: repo_path("b.txt"),
9089 status: FileStatus::Untracked,
9090 },
9091 StatusEntry {
9092 repo_path: repo_path("d.txt"),
9093 status: StatusCode::Deleted.worktree(),
9094 },
9095 ]
9096 );
9097 });
9098
9099 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9100
9101 tree.flush_fs_events(cx).await;
9102 project
9103 .update(cx, |project, cx| project.git_scans_complete(cx))
9104 .await;
9105 cx.executor().run_until_parked();
9106
9107 repository.read_with(cx, |repository, _| {
9108 let entries = repository.cached_status().collect::<Vec<_>>();
9109 assert_eq!(
9110 entries,
9111 [
9112 StatusEntry {
9113 repo_path: repo_path("a.txt"),
9114 status: StatusCode::Modified.worktree(),
9115 },
9116 StatusEntry {
9117 repo_path: repo_path("b.txt"),
9118 status: FileStatus::Untracked,
9119 },
9120 StatusEntry {
9121 repo_path: repo_path("c.txt"),
9122 status: StatusCode::Modified.worktree(),
9123 },
9124 StatusEntry {
9125 repo_path: repo_path("d.txt"),
9126 status: StatusCode::Deleted.worktree(),
9127 },
9128 ]
9129 );
9130 });
9131
9132 git_add("a.txt", &repo);
9133 git_add("c.txt", &repo);
9134 git_remove_index(Path::new("d.txt"), &repo);
9135 git_commit("Another commit", &repo);
9136 tree.flush_fs_events(cx).await;
9137 project
9138 .update(cx, |project, cx| project.git_scans_complete(cx))
9139 .await;
9140 cx.executor().run_until_parked();
9141
9142 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9143 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9144 tree.flush_fs_events(cx).await;
9145 project
9146 .update(cx, |project, cx| project.git_scans_complete(cx))
9147 .await;
9148 cx.executor().run_until_parked();
9149
9150 repository.read_with(cx, |repository, _cx| {
9151 let entries = repository.cached_status().collect::<Vec<_>>();
9152
9153 // Deleting an untracked entry, b.txt, should leave no status
9154 // a.txt was tracked, and so should have a status
9155 assert_eq!(
9156 entries,
9157 [StatusEntry {
9158 repo_path: repo_path("a.txt"),
9159 status: StatusCode::Deleted.worktree(),
9160 }]
9161 );
9162 });
9163}
9164
9165#[gpui::test]
9166#[ignore]
9167async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9168 init_test(cx);
9169 cx.executor().allow_parking();
9170
9171 let root = TempTree::new(json!({
9172 "project": {
9173 "sub": {},
9174 "a.txt": "",
9175 },
9176 }));
9177
9178 let work_dir = root.path().join("project");
9179 let repo = git_init(work_dir.as_path());
9180 // a.txt exists in HEAD and the working copy but is deleted in the index.
9181 git_add("a.txt", &repo);
9182 git_commit("Initial commit", &repo);
9183 git_remove_index("a.txt".as_ref(), &repo);
9184 // `sub` is a nested git repository.
9185 let _sub = git_init(&work_dir.join("sub"));
9186
9187 let project = Project::test(
9188 Arc::new(RealFs::new(None, cx.executor())),
9189 [root.path()],
9190 cx,
9191 )
9192 .await;
9193
9194 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9195 tree.flush_fs_events(cx).await;
9196 project
9197 .update(cx, |project, cx| project.git_scans_complete(cx))
9198 .await;
9199 cx.executor().run_until_parked();
9200
9201 let repository = project.read_with(cx, |project, cx| {
9202 project
9203 .repositories(cx)
9204 .values()
9205 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9206 .unwrap()
9207 .clone()
9208 });
9209
9210 repository.read_with(cx, |repository, _cx| {
9211 let entries = repository.cached_status().collect::<Vec<_>>();
9212
9213 // `sub` doesn't appear in our computed statuses.
9214 // a.txt appears with a combined `DA` status.
9215 assert_eq!(
9216 entries,
9217 [StatusEntry {
9218 repo_path: repo_path("a.txt"),
9219 status: TrackedStatus {
9220 index_status: StatusCode::Deleted,
9221 worktree_status: StatusCode::Added
9222 }
9223 .into(),
9224 }]
9225 )
9226 });
9227}
9228
9229#[track_caller]
9230/// We merge lhs into rhs.
9231fn merge_pending_ops_snapshots(
9232 source: Vec<pending_op::PendingOps>,
9233 mut target: Vec<pending_op::PendingOps>,
9234) -> Vec<pending_op::PendingOps> {
9235 for s_ops in source {
9236 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9237 if ops.repo_path == s_ops.repo_path {
9238 Some(idx)
9239 } else {
9240 None
9241 }
9242 }) {
9243 let t_ops = &mut target[idx];
9244 for s_op in s_ops.ops {
9245 if let Some(op_idx) = t_ops
9246 .ops
9247 .iter()
9248 .zip(0..)
9249 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9250 {
9251 let t_op = &mut t_ops.ops[op_idx];
9252 match (s_op.job_status, t_op.job_status) {
9253 (pending_op::JobStatus::Running, _) => {}
9254 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9255 (s_st, t_st) if s_st == t_st => {}
9256 _ => unreachable!(),
9257 }
9258 } else {
9259 t_ops.ops.push(s_op);
9260 }
9261 }
9262 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9263 } else {
9264 target.push(s_ops);
9265 }
9266 }
9267 target
9268}
9269
9270#[gpui::test]
9271async fn test_repository_pending_ops_staging(
9272 executor: gpui::BackgroundExecutor,
9273 cx: &mut gpui::TestAppContext,
9274) {
9275 init_test(cx);
9276
9277 let fs = FakeFs::new(executor);
9278 fs.insert_tree(
9279 path!("/root"),
9280 json!({
9281 "my-repo": {
9282 ".git": {},
9283 "a.txt": "a",
9284 }
9285
9286 }),
9287 )
9288 .await;
9289
9290 fs.set_status_for_repo(
9291 path!("/root/my-repo/.git").as_ref(),
9292 &[("a.txt", FileStatus::Untracked)],
9293 );
9294
9295 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9296 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9297 project.update(cx, |project, cx| {
9298 let pending_ops_all = pending_ops_all.clone();
9299 cx.subscribe(project.git_store(), move |_, _, e, _| {
9300 if let GitStoreEvent::RepositoryUpdated(
9301 _,
9302 RepositoryEvent::PendingOpsChanged { pending_ops },
9303 _,
9304 ) = e
9305 {
9306 let merged = merge_pending_ops_snapshots(
9307 pending_ops.items(()),
9308 pending_ops_all.lock().items(()),
9309 );
9310 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9311 }
9312 })
9313 .detach();
9314 });
9315 project
9316 .update(cx, |project, cx| project.git_scans_complete(cx))
9317 .await;
9318
9319 let repo = project.read_with(cx, |project, cx| {
9320 project.repositories(cx).values().next().unwrap().clone()
9321 });
9322
9323 // Ensure we have no pending ops for any of the untracked files
9324 repo.read_with(cx, |repo, _cx| {
9325 assert!(repo.pending_ops().next().is_none());
9326 });
9327
9328 let mut id = 1u16;
9329
9330 let mut assert_stage = async |path: RepoPath, stage| {
9331 let git_status = if stage {
9332 pending_op::GitStatus::Staged
9333 } else {
9334 pending_op::GitStatus::Unstaged
9335 };
9336 repo.update(cx, |repo, cx| {
9337 let task = if stage {
9338 repo.stage_entries(vec![path.clone()], cx)
9339 } else {
9340 repo.unstage_entries(vec![path.clone()], cx)
9341 };
9342 let ops = repo.pending_ops_for_path(&path).unwrap();
9343 assert_eq!(
9344 ops.ops.last(),
9345 Some(&pending_op::PendingOp {
9346 id: id.into(),
9347 git_status,
9348 job_status: pending_op::JobStatus::Running
9349 })
9350 );
9351 task
9352 })
9353 .await
9354 .unwrap();
9355
9356 repo.read_with(cx, |repo, _cx| {
9357 let ops = repo.pending_ops_for_path(&path).unwrap();
9358 assert_eq!(
9359 ops.ops.last(),
9360 Some(&pending_op::PendingOp {
9361 id: id.into(),
9362 git_status,
9363 job_status: pending_op::JobStatus::Finished
9364 })
9365 );
9366 });
9367
9368 id += 1;
9369 };
9370
9371 assert_stage(repo_path("a.txt"), true).await;
9372 assert_stage(repo_path("a.txt"), false).await;
9373 assert_stage(repo_path("a.txt"), true).await;
9374 assert_stage(repo_path("a.txt"), false).await;
9375 assert_stage(repo_path("a.txt"), true).await;
9376
9377 cx.run_until_parked();
9378
9379 assert_eq!(
9380 pending_ops_all
9381 .lock()
9382 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9383 .unwrap()
9384 .ops,
9385 vec![
9386 pending_op::PendingOp {
9387 id: 1u16.into(),
9388 git_status: pending_op::GitStatus::Staged,
9389 job_status: pending_op::JobStatus::Finished
9390 },
9391 pending_op::PendingOp {
9392 id: 2u16.into(),
9393 git_status: pending_op::GitStatus::Unstaged,
9394 job_status: pending_op::JobStatus::Finished
9395 },
9396 pending_op::PendingOp {
9397 id: 3u16.into(),
9398 git_status: pending_op::GitStatus::Staged,
9399 job_status: pending_op::JobStatus::Finished
9400 },
9401 pending_op::PendingOp {
9402 id: 4u16.into(),
9403 git_status: pending_op::GitStatus::Unstaged,
9404 job_status: pending_op::JobStatus::Finished
9405 },
9406 pending_op::PendingOp {
9407 id: 5u16.into(),
9408 git_status: pending_op::GitStatus::Staged,
9409 job_status: pending_op::JobStatus::Finished
9410 }
9411 ],
9412 );
9413
9414 repo.update(cx, |repo, _cx| {
9415 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9416
9417 assert_eq!(
9418 git_statuses,
9419 [StatusEntry {
9420 repo_path: repo_path("a.txt"),
9421 status: TrackedStatus {
9422 index_status: StatusCode::Added,
9423 worktree_status: StatusCode::Unmodified
9424 }
9425 .into(),
9426 }]
9427 );
9428 });
9429}
9430
9431#[gpui::test]
9432async fn test_repository_pending_ops_long_running_staging(
9433 executor: gpui::BackgroundExecutor,
9434 cx: &mut gpui::TestAppContext,
9435) {
9436 init_test(cx);
9437
9438 let fs = FakeFs::new(executor);
9439 fs.insert_tree(
9440 path!("/root"),
9441 json!({
9442 "my-repo": {
9443 ".git": {},
9444 "a.txt": "a",
9445 }
9446
9447 }),
9448 )
9449 .await;
9450
9451 fs.set_status_for_repo(
9452 path!("/root/my-repo/.git").as_ref(),
9453 &[("a.txt", FileStatus::Untracked)],
9454 );
9455
9456 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9457 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9458 project.update(cx, |project, cx| {
9459 let pending_ops_all = pending_ops_all.clone();
9460 cx.subscribe(project.git_store(), move |_, _, e, _| {
9461 if let GitStoreEvent::RepositoryUpdated(
9462 _,
9463 RepositoryEvent::PendingOpsChanged { pending_ops },
9464 _,
9465 ) = e
9466 {
9467 let merged = merge_pending_ops_snapshots(
9468 pending_ops.items(()),
9469 pending_ops_all.lock().items(()),
9470 );
9471 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9472 }
9473 })
9474 .detach();
9475 });
9476
9477 project
9478 .update(cx, |project, cx| project.git_scans_complete(cx))
9479 .await;
9480
9481 let repo = project.read_with(cx, |project, cx| {
9482 project.repositories(cx).values().next().unwrap().clone()
9483 });
9484
9485 repo.update(cx, |repo, cx| {
9486 repo.stage_entries(vec![repo_path("a.txt")], cx)
9487 })
9488 .detach();
9489
9490 repo.update(cx, |repo, cx| {
9491 repo.stage_entries(vec![repo_path("a.txt")], cx)
9492 })
9493 .unwrap()
9494 .with_timeout(Duration::from_secs(1), &cx.executor())
9495 .await
9496 .unwrap();
9497
9498 cx.run_until_parked();
9499
9500 assert_eq!(
9501 pending_ops_all
9502 .lock()
9503 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9504 .unwrap()
9505 .ops,
9506 vec![
9507 pending_op::PendingOp {
9508 id: 1u16.into(),
9509 git_status: pending_op::GitStatus::Staged,
9510 job_status: pending_op::JobStatus::Skipped
9511 },
9512 pending_op::PendingOp {
9513 id: 2u16.into(),
9514 git_status: pending_op::GitStatus::Staged,
9515 job_status: pending_op::JobStatus::Finished
9516 }
9517 ],
9518 );
9519
9520 repo.update(cx, |repo, _cx| {
9521 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9522
9523 assert_eq!(
9524 git_statuses,
9525 [StatusEntry {
9526 repo_path: repo_path("a.txt"),
9527 status: TrackedStatus {
9528 index_status: StatusCode::Added,
9529 worktree_status: StatusCode::Unmodified
9530 }
9531 .into(),
9532 }]
9533 );
9534 });
9535}
9536
9537#[gpui::test]
9538async fn test_repository_pending_ops_stage_all(
9539 executor: gpui::BackgroundExecutor,
9540 cx: &mut gpui::TestAppContext,
9541) {
9542 init_test(cx);
9543
9544 let fs = FakeFs::new(executor);
9545 fs.insert_tree(
9546 path!("/root"),
9547 json!({
9548 "my-repo": {
9549 ".git": {},
9550 "a.txt": "a",
9551 "b.txt": "b"
9552 }
9553
9554 }),
9555 )
9556 .await;
9557
9558 fs.set_status_for_repo(
9559 path!("/root/my-repo/.git").as_ref(),
9560 &[
9561 ("a.txt", FileStatus::Untracked),
9562 ("b.txt", FileStatus::Untracked),
9563 ],
9564 );
9565
9566 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9567 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9568 project.update(cx, |project, cx| {
9569 let pending_ops_all = pending_ops_all.clone();
9570 cx.subscribe(project.git_store(), move |_, _, e, _| {
9571 if let GitStoreEvent::RepositoryUpdated(
9572 _,
9573 RepositoryEvent::PendingOpsChanged { pending_ops },
9574 _,
9575 ) = e
9576 {
9577 let merged = merge_pending_ops_snapshots(
9578 pending_ops.items(()),
9579 pending_ops_all.lock().items(()),
9580 );
9581 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9582 }
9583 })
9584 .detach();
9585 });
9586 project
9587 .update(cx, |project, cx| project.git_scans_complete(cx))
9588 .await;
9589
9590 let repo = project.read_with(cx, |project, cx| {
9591 project.repositories(cx).values().next().unwrap().clone()
9592 });
9593
9594 repo.update(cx, |repo, cx| {
9595 repo.stage_entries(vec![repo_path("a.txt")], cx)
9596 })
9597 .await
9598 .unwrap();
9599 repo.update(cx, |repo, cx| repo.stage_all(cx))
9600 .await
9601 .unwrap();
9602 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9603 .await
9604 .unwrap();
9605
9606 cx.run_until_parked();
9607
9608 assert_eq!(
9609 pending_ops_all
9610 .lock()
9611 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9612 .unwrap()
9613 .ops,
9614 vec![
9615 pending_op::PendingOp {
9616 id: 1u16.into(),
9617 git_status: pending_op::GitStatus::Staged,
9618 job_status: pending_op::JobStatus::Finished
9619 },
9620 pending_op::PendingOp {
9621 id: 2u16.into(),
9622 git_status: pending_op::GitStatus::Unstaged,
9623 job_status: pending_op::JobStatus::Finished
9624 },
9625 ],
9626 );
9627 assert_eq!(
9628 pending_ops_all
9629 .lock()
9630 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9631 .unwrap()
9632 .ops,
9633 vec![
9634 pending_op::PendingOp {
9635 id: 1u16.into(),
9636 git_status: pending_op::GitStatus::Staged,
9637 job_status: pending_op::JobStatus::Finished
9638 },
9639 pending_op::PendingOp {
9640 id: 2u16.into(),
9641 git_status: pending_op::GitStatus::Unstaged,
9642 job_status: pending_op::JobStatus::Finished
9643 },
9644 ],
9645 );
9646
9647 repo.update(cx, |repo, _cx| {
9648 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9649
9650 assert_eq!(
9651 git_statuses,
9652 [
9653 StatusEntry {
9654 repo_path: repo_path("a.txt"),
9655 status: FileStatus::Untracked,
9656 },
9657 StatusEntry {
9658 repo_path: repo_path("b.txt"),
9659 status: FileStatus::Untracked,
9660 },
9661 ]
9662 );
9663 });
9664}
9665
9666#[gpui::test]
9667async fn test_repository_subfolder_git_status(
9668 executor: gpui::BackgroundExecutor,
9669 cx: &mut gpui::TestAppContext,
9670) {
9671 init_test(cx);
9672
9673 let fs = FakeFs::new(executor);
9674 fs.insert_tree(
9675 path!("/root"),
9676 json!({
9677 "my-repo": {
9678 ".git": {},
9679 "a.txt": "a",
9680 "sub-folder-1": {
9681 "sub-folder-2": {
9682 "c.txt": "cc",
9683 "d": {
9684 "e.txt": "eee"
9685 }
9686 },
9687 }
9688 },
9689 }),
9690 )
9691 .await;
9692
9693 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9694 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9695
9696 fs.set_status_for_repo(
9697 path!("/root/my-repo/.git").as_ref(),
9698 &[(E_TXT, FileStatus::Untracked)],
9699 );
9700
9701 let project = Project::test(
9702 fs.clone(),
9703 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9704 cx,
9705 )
9706 .await;
9707
9708 project
9709 .update(cx, |project, cx| project.git_scans_complete(cx))
9710 .await;
9711 cx.run_until_parked();
9712
9713 let repository = project.read_with(cx, |project, cx| {
9714 project.repositories(cx).values().next().unwrap().clone()
9715 });
9716
9717 // Ensure that the git status is loaded correctly
9718 repository.read_with(cx, |repository, _cx| {
9719 assert_eq!(
9720 repository.work_directory_abs_path,
9721 Path::new(path!("/root/my-repo")).into()
9722 );
9723
9724 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9725 assert_eq!(
9726 repository
9727 .status_for_path(&repo_path(E_TXT))
9728 .unwrap()
9729 .status,
9730 FileStatus::Untracked
9731 );
9732 });
9733
9734 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9735 project
9736 .update(cx, |project, cx| project.git_scans_complete(cx))
9737 .await;
9738 cx.run_until_parked();
9739
9740 repository.read_with(cx, |repository, _cx| {
9741 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9742 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9743 });
9744}
9745
9746// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9747#[cfg(any())]
9748#[gpui::test]
9749async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9750 init_test(cx);
9751 cx.executor().allow_parking();
9752
9753 let root = TempTree::new(json!({
9754 "project": {
9755 "a.txt": "a",
9756 },
9757 }));
9758 let root_path = root.path();
9759
9760 let repo = git_init(&root_path.join("project"));
9761 git_add("a.txt", &repo);
9762 git_commit("init", &repo);
9763
9764 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9765
9766 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9767 tree.flush_fs_events(cx).await;
9768 project
9769 .update(cx, |project, cx| project.git_scans_complete(cx))
9770 .await;
9771 cx.executor().run_until_parked();
9772
9773 let repository = project.read_with(cx, |project, cx| {
9774 project.repositories(cx).values().next().unwrap().clone()
9775 });
9776
9777 git_branch("other-branch", &repo);
9778 git_checkout("refs/heads/other-branch", &repo);
9779 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9780 git_add("a.txt", &repo);
9781 git_commit("capitalize", &repo);
9782 let commit = repo
9783 .head()
9784 .expect("Failed to get HEAD")
9785 .peel_to_commit()
9786 .expect("HEAD is not a commit");
9787 git_checkout("refs/heads/main", &repo);
9788 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9789 git_add("a.txt", &repo);
9790 git_commit("improve letter", &repo);
9791 git_cherry_pick(&commit, &repo);
9792 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9793 .expect("No CHERRY_PICK_HEAD");
9794 pretty_assertions::assert_eq!(
9795 git_status(&repo),
9796 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9797 );
9798 tree.flush_fs_events(cx).await;
9799 project
9800 .update(cx, |project, cx| project.git_scans_complete(cx))
9801 .await;
9802 cx.executor().run_until_parked();
9803 let conflicts = repository.update(cx, |repository, _| {
9804 repository
9805 .merge_conflicts
9806 .iter()
9807 .cloned()
9808 .collect::<Vec<_>>()
9809 });
9810 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9811
9812 git_add("a.txt", &repo);
9813 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9814 git_commit("whatevs", &repo);
9815 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9816 .expect("Failed to remove CHERRY_PICK_HEAD");
9817 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9818 tree.flush_fs_events(cx).await;
9819 let conflicts = repository.update(cx, |repository, _| {
9820 repository
9821 .merge_conflicts
9822 .iter()
9823 .cloned()
9824 .collect::<Vec<_>>()
9825 });
9826 pretty_assertions::assert_eq!(conflicts, []);
9827}
9828
9829#[gpui::test]
9830async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9831 init_test(cx);
9832 let fs = FakeFs::new(cx.background_executor.clone());
9833 fs.insert_tree(
9834 path!("/root"),
9835 json!({
9836 ".git": {},
9837 ".gitignore": "*.txt\n",
9838 "a.xml": "<a></a>",
9839 "b.txt": "Some text"
9840 }),
9841 )
9842 .await;
9843
9844 fs.set_head_and_index_for_repo(
9845 path!("/root/.git").as_ref(),
9846 &[
9847 (".gitignore", "*.txt\n".into()),
9848 ("a.xml", "<a></a>".into()),
9849 ],
9850 );
9851
9852 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9853
9854 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9855 tree.flush_fs_events(cx).await;
9856 project
9857 .update(cx, |project, cx| project.git_scans_complete(cx))
9858 .await;
9859 cx.executor().run_until_parked();
9860
9861 let repository = project.read_with(cx, |project, cx| {
9862 project.repositories(cx).values().next().unwrap().clone()
9863 });
9864
9865 // One file is unmodified, the other is ignored.
9866 cx.read(|cx| {
9867 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9868 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9869 });
9870
9871 // Change the gitignore, and stage the newly non-ignored file.
9872 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9873 .await
9874 .unwrap();
9875 fs.set_index_for_repo(
9876 Path::new(path!("/root/.git")),
9877 &[
9878 (".gitignore", "*.txt\n".into()),
9879 ("a.xml", "<a></a>".into()),
9880 ("b.txt", "Some text".into()),
9881 ],
9882 );
9883
9884 cx.executor().run_until_parked();
9885 cx.read(|cx| {
9886 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9887 assert_entry_git_state(
9888 tree.read(cx),
9889 repository.read(cx),
9890 "b.txt",
9891 Some(StatusCode::Added),
9892 false,
9893 );
9894 });
9895}
9896
9897// NOTE:
9898// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9899// a directory which some program has already open.
9900// This is a limitation of the Windows.
9901// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9902// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9903#[gpui::test]
9904#[cfg_attr(target_os = "windows", ignore)]
9905async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9906 init_test(cx);
9907 cx.executor().allow_parking();
9908 let root = TempTree::new(json!({
9909 "projects": {
9910 "project1": {
9911 "a": "",
9912 "b": "",
9913 }
9914 },
9915
9916 }));
9917 let root_path = root.path();
9918
9919 let repo = git_init(&root_path.join("projects/project1"));
9920 git_add("a", &repo);
9921 git_commit("init", &repo);
9922 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9923
9924 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9925
9926 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9927 tree.flush_fs_events(cx).await;
9928 project
9929 .update(cx, |project, cx| project.git_scans_complete(cx))
9930 .await;
9931 cx.executor().run_until_parked();
9932
9933 let repository = project.read_with(cx, |project, cx| {
9934 project.repositories(cx).values().next().unwrap().clone()
9935 });
9936
9937 repository.read_with(cx, |repository, _| {
9938 assert_eq!(
9939 repository.work_directory_abs_path.as_ref(),
9940 root_path.join("projects/project1").as_path()
9941 );
9942 assert_eq!(
9943 repository
9944 .status_for_path(&repo_path("a"))
9945 .map(|entry| entry.status),
9946 Some(StatusCode::Modified.worktree()),
9947 );
9948 assert_eq!(
9949 repository
9950 .status_for_path(&repo_path("b"))
9951 .map(|entry| entry.status),
9952 Some(FileStatus::Untracked),
9953 );
9954 });
9955
9956 std::fs::rename(
9957 root_path.join("projects/project1"),
9958 root_path.join("projects/project2"),
9959 )
9960 .unwrap();
9961 tree.flush_fs_events(cx).await;
9962
9963 repository.read_with(cx, |repository, _| {
9964 assert_eq!(
9965 repository.work_directory_abs_path.as_ref(),
9966 root_path.join("projects/project2").as_path()
9967 );
9968 assert_eq!(
9969 repository.status_for_path(&repo_path("a")).unwrap().status,
9970 StatusCode::Modified.worktree(),
9971 );
9972 assert_eq!(
9973 repository.status_for_path(&repo_path("b")).unwrap().status,
9974 FileStatus::Untracked,
9975 );
9976 });
9977}
9978
9979// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9980// you can't rename a directory which some program has already open. This is a
9981// limitation of the Windows. See:
9982// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9983// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9984#[gpui::test]
9985#[cfg_attr(target_os = "windows", ignore)]
9986async fn test_file_status(cx: &mut gpui::TestAppContext) {
9987 init_test(cx);
9988 cx.executor().allow_parking();
9989 const IGNORE_RULE: &str = "**/target";
9990
9991 let root = TempTree::new(json!({
9992 "project": {
9993 "a.txt": "a",
9994 "b.txt": "bb",
9995 "c": {
9996 "d": {
9997 "e.txt": "eee"
9998 }
9999 },
10000 "f.txt": "ffff",
10001 "target": {
10002 "build_file": "???"
10003 },
10004 ".gitignore": IGNORE_RULE
10005 },
10006
10007 }));
10008 let root_path = root.path();
10009
10010 const A_TXT: &str = "a.txt";
10011 const B_TXT: &str = "b.txt";
10012 const E_TXT: &str = "c/d/e.txt";
10013 const F_TXT: &str = "f.txt";
10014 const DOTGITIGNORE: &str = ".gitignore";
10015 const BUILD_FILE: &str = "target/build_file";
10016
10017 // Set up git repository before creating the worktree.
10018 let work_dir = root.path().join("project");
10019 let mut repo = git_init(work_dir.as_path());
10020 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10021 git_add(A_TXT, &repo);
10022 git_add(E_TXT, &repo);
10023 git_add(DOTGITIGNORE, &repo);
10024 git_commit("Initial commit", &repo);
10025
10026 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10027
10028 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10029 tree.flush_fs_events(cx).await;
10030 project
10031 .update(cx, |project, cx| project.git_scans_complete(cx))
10032 .await;
10033 cx.executor().run_until_parked();
10034
10035 let repository = project.read_with(cx, |project, cx| {
10036 project.repositories(cx).values().next().unwrap().clone()
10037 });
10038
10039 // Check that the right git state is observed on startup
10040 repository.read_with(cx, |repository, _cx| {
10041 assert_eq!(
10042 repository.work_directory_abs_path.as_ref(),
10043 root_path.join("project").as_path()
10044 );
10045
10046 assert_eq!(
10047 repository
10048 .status_for_path(&repo_path(B_TXT))
10049 .unwrap()
10050 .status,
10051 FileStatus::Untracked,
10052 );
10053 assert_eq!(
10054 repository
10055 .status_for_path(&repo_path(F_TXT))
10056 .unwrap()
10057 .status,
10058 FileStatus::Untracked,
10059 );
10060 });
10061
10062 // Modify a file in the working copy.
10063 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10064 tree.flush_fs_events(cx).await;
10065 project
10066 .update(cx, |project, cx| project.git_scans_complete(cx))
10067 .await;
10068 cx.executor().run_until_parked();
10069
10070 // The worktree detects that the file's git status has changed.
10071 repository.read_with(cx, |repository, _| {
10072 assert_eq!(
10073 repository
10074 .status_for_path(&repo_path(A_TXT))
10075 .unwrap()
10076 .status,
10077 StatusCode::Modified.worktree(),
10078 );
10079 });
10080
10081 // Create a commit in the git repository.
10082 git_add(A_TXT, &repo);
10083 git_add(B_TXT, &repo);
10084 git_commit("Committing modified and added", &repo);
10085 tree.flush_fs_events(cx).await;
10086 project
10087 .update(cx, |project, cx| project.git_scans_complete(cx))
10088 .await;
10089 cx.executor().run_until_parked();
10090
10091 // The worktree detects that the files' git status have changed.
10092 repository.read_with(cx, |repository, _cx| {
10093 assert_eq!(
10094 repository
10095 .status_for_path(&repo_path(F_TXT))
10096 .unwrap()
10097 .status,
10098 FileStatus::Untracked,
10099 );
10100 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10101 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10102 });
10103
10104 // Modify files in the working copy and perform git operations on other files.
10105 git_reset(0, &repo);
10106 git_remove_index(Path::new(B_TXT), &repo);
10107 git_stash(&mut repo);
10108 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10109 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10110 tree.flush_fs_events(cx).await;
10111 project
10112 .update(cx, |project, cx| project.git_scans_complete(cx))
10113 .await;
10114 cx.executor().run_until_parked();
10115
10116 // Check that more complex repo changes are tracked
10117 repository.read_with(cx, |repository, _cx| {
10118 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10119 assert_eq!(
10120 repository
10121 .status_for_path(&repo_path(B_TXT))
10122 .unwrap()
10123 .status,
10124 FileStatus::Untracked,
10125 );
10126 assert_eq!(
10127 repository
10128 .status_for_path(&repo_path(E_TXT))
10129 .unwrap()
10130 .status,
10131 StatusCode::Modified.worktree(),
10132 );
10133 });
10134
10135 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10136 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10137 std::fs::write(
10138 work_dir.join(DOTGITIGNORE),
10139 [IGNORE_RULE, "f.txt"].join("\n"),
10140 )
10141 .unwrap();
10142
10143 git_add(Path::new(DOTGITIGNORE), &repo);
10144 git_commit("Committing modified git ignore", &repo);
10145
10146 tree.flush_fs_events(cx).await;
10147 cx.executor().run_until_parked();
10148
10149 let mut renamed_dir_name = "first_directory/second_directory";
10150 const RENAMED_FILE: &str = "rf.txt";
10151
10152 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10153 std::fs::write(
10154 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10155 "new-contents",
10156 )
10157 .unwrap();
10158
10159 tree.flush_fs_events(cx).await;
10160 project
10161 .update(cx, |project, cx| project.git_scans_complete(cx))
10162 .await;
10163 cx.executor().run_until_parked();
10164
10165 repository.read_with(cx, |repository, _cx| {
10166 assert_eq!(
10167 repository
10168 .status_for_path(&RepoPath::from_rel_path(
10169 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10170 ))
10171 .unwrap()
10172 .status,
10173 FileStatus::Untracked,
10174 );
10175 });
10176
10177 renamed_dir_name = "new_first_directory/second_directory";
10178
10179 std::fs::rename(
10180 work_dir.join("first_directory"),
10181 work_dir.join("new_first_directory"),
10182 )
10183 .unwrap();
10184
10185 tree.flush_fs_events(cx).await;
10186 project
10187 .update(cx, |project, cx| project.git_scans_complete(cx))
10188 .await;
10189 cx.executor().run_until_parked();
10190
10191 repository.read_with(cx, |repository, _cx| {
10192 assert_eq!(
10193 repository
10194 .status_for_path(&RepoPath::from_rel_path(
10195 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10196 ))
10197 .unwrap()
10198 .status,
10199 FileStatus::Untracked,
10200 );
10201 });
10202}
10203
10204#[gpui::test]
10205#[ignore]
10206async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10207 init_test(cx);
10208 cx.executor().allow_parking();
10209
10210 const IGNORE_RULE: &str = "**/target";
10211
10212 let root = TempTree::new(json!({
10213 "project": {
10214 "src": {
10215 "main.rs": "fn main() {}"
10216 },
10217 "target": {
10218 "debug": {
10219 "important_text.txt": "important text",
10220 },
10221 },
10222 ".gitignore": IGNORE_RULE
10223 },
10224
10225 }));
10226 let root_path = root.path();
10227
10228 // Set up git repository before creating the worktree.
10229 let work_dir = root.path().join("project");
10230 let repo = git_init(work_dir.as_path());
10231 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10232 git_add("src/main.rs", &repo);
10233 git_add(".gitignore", &repo);
10234 git_commit("Initial commit", &repo);
10235
10236 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10237 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10238 let project_events = Arc::new(Mutex::new(Vec::new()));
10239 project.update(cx, |project, cx| {
10240 let repo_events = repository_updates.clone();
10241 cx.subscribe(project.git_store(), move |_, _, e, _| {
10242 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10243 repo_events.lock().push(e.clone());
10244 }
10245 })
10246 .detach();
10247 let project_events = project_events.clone();
10248 cx.subscribe_self(move |_, e, _| {
10249 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10250 project_events.lock().extend(
10251 updates
10252 .iter()
10253 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10254 .filter(|(path, _)| path != "fs-event-sentinel"),
10255 );
10256 }
10257 })
10258 .detach();
10259 });
10260
10261 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10262 tree.flush_fs_events(cx).await;
10263 tree.update(cx, |tree, cx| {
10264 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10265 })
10266 .await
10267 .unwrap();
10268 tree.update(cx, |tree, _| {
10269 assert_eq!(
10270 tree.entries(true, 0)
10271 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10272 .collect::<Vec<_>>(),
10273 vec![
10274 (rel_path(""), false),
10275 (rel_path("project/"), false),
10276 (rel_path("project/.gitignore"), false),
10277 (rel_path("project/src"), false),
10278 (rel_path("project/src/main.rs"), false),
10279 (rel_path("project/target"), true),
10280 (rel_path("project/target/debug"), true),
10281 (rel_path("project/target/debug/important_text.txt"), true),
10282 ]
10283 );
10284 });
10285
10286 assert_eq!(
10287 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10288 vec![
10289 RepositoryEvent::StatusesChanged,
10290 RepositoryEvent::MergeHeadsChanged,
10291 ],
10292 "Initial worktree scan should produce a repo update event"
10293 );
10294 assert_eq!(
10295 project_events.lock().drain(..).collect::<Vec<_>>(),
10296 vec![
10297 ("project/target".to_string(), PathChange::Loaded),
10298 ("project/target/debug".to_string(), PathChange::Loaded),
10299 (
10300 "project/target/debug/important_text.txt".to_string(),
10301 PathChange::Loaded
10302 ),
10303 ],
10304 "Initial project changes should show that all not-ignored and all opened files are loaded"
10305 );
10306
10307 let deps_dir = work_dir.join("target").join("debug").join("deps");
10308 std::fs::create_dir_all(&deps_dir).unwrap();
10309 tree.flush_fs_events(cx).await;
10310 project
10311 .update(cx, |project, cx| project.git_scans_complete(cx))
10312 .await;
10313 cx.executor().run_until_parked();
10314 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10315 tree.flush_fs_events(cx).await;
10316 project
10317 .update(cx, |project, cx| project.git_scans_complete(cx))
10318 .await;
10319 cx.executor().run_until_parked();
10320 std::fs::remove_dir_all(&deps_dir).unwrap();
10321 tree.flush_fs_events(cx).await;
10322 project
10323 .update(cx, |project, cx| project.git_scans_complete(cx))
10324 .await;
10325 cx.executor().run_until_parked();
10326
10327 tree.update(cx, |tree, _| {
10328 assert_eq!(
10329 tree.entries(true, 0)
10330 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10331 .collect::<Vec<_>>(),
10332 vec![
10333 (rel_path(""), false),
10334 (rel_path("project/"), false),
10335 (rel_path("project/.gitignore"), false),
10336 (rel_path("project/src"), false),
10337 (rel_path("project/src/main.rs"), false),
10338 (rel_path("project/target"), true),
10339 (rel_path("project/target/debug"), true),
10340 (rel_path("project/target/debug/important_text.txt"), true),
10341 ],
10342 "No stray temp files should be left after the flycheck changes"
10343 );
10344 });
10345
10346 assert_eq!(
10347 repository_updates
10348 .lock()
10349 .iter()
10350 .cloned()
10351 .collect::<Vec<_>>(),
10352 Vec::new(),
10353 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10354 );
10355 assert_eq!(
10356 project_events.lock().as_slice(),
10357 vec![
10358 ("project/target/debug/deps".to_string(), PathChange::Added),
10359 ("project/target/debug/deps".to_string(), PathChange::Removed),
10360 ],
10361 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10362 No updates for more nested directories should happen as those are ignored",
10363 );
10364}
10365
10366// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10367// to different timings/ordering of events.
10368#[ignore]
10369#[gpui::test]
10370async fn test_odd_events_for_ignored_dirs(
10371 executor: BackgroundExecutor,
10372 cx: &mut gpui::TestAppContext,
10373) {
10374 init_test(cx);
10375 let fs = FakeFs::new(executor);
10376 fs.insert_tree(
10377 path!("/root"),
10378 json!({
10379 ".git": {},
10380 ".gitignore": "**/target/",
10381 "src": {
10382 "main.rs": "fn main() {}",
10383 },
10384 "target": {
10385 "debug": {
10386 "foo.txt": "foo",
10387 "deps": {}
10388 }
10389 }
10390 }),
10391 )
10392 .await;
10393 fs.set_head_and_index_for_repo(
10394 path!("/root/.git").as_ref(),
10395 &[
10396 (".gitignore", "**/target/".into()),
10397 ("src/main.rs", "fn main() {}".into()),
10398 ],
10399 );
10400
10401 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10402 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10403 let project_events = Arc::new(Mutex::new(Vec::new()));
10404 project.update(cx, |project, cx| {
10405 let repository_updates = repository_updates.clone();
10406 cx.subscribe(project.git_store(), move |_, _, e, _| {
10407 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10408 repository_updates.lock().push(e.clone());
10409 }
10410 })
10411 .detach();
10412 let project_events = project_events.clone();
10413 cx.subscribe_self(move |_, e, _| {
10414 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10415 project_events.lock().extend(
10416 updates
10417 .iter()
10418 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10419 .filter(|(path, _)| path != "fs-event-sentinel"),
10420 );
10421 }
10422 })
10423 .detach();
10424 });
10425
10426 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10427 tree.update(cx, |tree, cx| {
10428 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10429 })
10430 .await
10431 .unwrap();
10432 tree.flush_fs_events(cx).await;
10433 project
10434 .update(cx, |project, cx| project.git_scans_complete(cx))
10435 .await;
10436 cx.run_until_parked();
10437 tree.update(cx, |tree, _| {
10438 assert_eq!(
10439 tree.entries(true, 0)
10440 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10441 .collect::<Vec<_>>(),
10442 vec![
10443 (rel_path(""), false),
10444 (rel_path(".gitignore"), false),
10445 (rel_path("src"), false),
10446 (rel_path("src/main.rs"), false),
10447 (rel_path("target"), true),
10448 (rel_path("target/debug"), true),
10449 (rel_path("target/debug/deps"), true),
10450 (rel_path("target/debug/foo.txt"), true),
10451 ]
10452 );
10453 });
10454
10455 assert_eq!(
10456 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10457 vec![
10458 RepositoryEvent::MergeHeadsChanged,
10459 RepositoryEvent::BranchChanged,
10460 RepositoryEvent::StatusesChanged,
10461 RepositoryEvent::StatusesChanged,
10462 ],
10463 "Initial worktree scan should produce a repo update event"
10464 );
10465 assert_eq!(
10466 project_events.lock().drain(..).collect::<Vec<_>>(),
10467 vec![
10468 ("target".to_string(), PathChange::Loaded),
10469 ("target/debug".to_string(), PathChange::Loaded),
10470 ("target/debug/deps".to_string(), PathChange::Loaded),
10471 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10472 ],
10473 "All non-ignored entries and all opened firs should be getting a project event",
10474 );
10475
10476 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10477 // This may happen multiple times during a single flycheck, but once is enough for testing.
10478 fs.emit_fs_event("/root/target/debug/deps", None);
10479 tree.flush_fs_events(cx).await;
10480 project
10481 .update(cx, |project, cx| project.git_scans_complete(cx))
10482 .await;
10483 cx.executor().run_until_parked();
10484
10485 assert_eq!(
10486 repository_updates
10487 .lock()
10488 .iter()
10489 .cloned()
10490 .collect::<Vec<_>>(),
10491 Vec::new(),
10492 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10493 );
10494 assert_eq!(
10495 project_events.lock().as_slice(),
10496 Vec::new(),
10497 "No further project events should happen, as only ignored dirs received FS events",
10498 );
10499}
10500
10501#[gpui::test]
10502async fn test_repos_in_invisible_worktrees(
10503 executor: BackgroundExecutor,
10504 cx: &mut gpui::TestAppContext,
10505) {
10506 init_test(cx);
10507 let fs = FakeFs::new(executor);
10508 fs.insert_tree(
10509 path!("/root"),
10510 json!({
10511 "dir1": {
10512 ".git": {},
10513 "dep1": {
10514 ".git": {},
10515 "src": {
10516 "a.txt": "",
10517 },
10518 },
10519 "b.txt": "",
10520 },
10521 }),
10522 )
10523 .await;
10524
10525 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10526 let _visible_worktree =
10527 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10528 project
10529 .update(cx, |project, cx| project.git_scans_complete(cx))
10530 .await;
10531
10532 let repos = project.read_with(cx, |project, cx| {
10533 project
10534 .repositories(cx)
10535 .values()
10536 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10537 .collect::<Vec<_>>()
10538 });
10539 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10540
10541 let (_invisible_worktree, _) = project
10542 .update(cx, |project, cx| {
10543 project.worktree_store().update(cx, |worktree_store, cx| {
10544 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10545 })
10546 })
10547 .await
10548 .expect("failed to create worktree");
10549 project
10550 .update(cx, |project, cx| project.git_scans_complete(cx))
10551 .await;
10552
10553 let repos = project.read_with(cx, |project, cx| {
10554 project
10555 .repositories(cx)
10556 .values()
10557 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10558 .collect::<Vec<_>>()
10559 });
10560 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10561}
10562
10563#[gpui::test(iterations = 10)]
10564async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10565 init_test(cx);
10566 cx.update(|cx| {
10567 cx.update_global::<SettingsStore, _>(|store, cx| {
10568 store.update_user_settings(cx, |settings| {
10569 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10570 });
10571 });
10572 });
10573 let fs = FakeFs::new(cx.background_executor.clone());
10574 fs.insert_tree(
10575 path!("/root"),
10576 json!({
10577 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10578 "tree": {
10579 ".git": {},
10580 ".gitignore": "ignored-dir\n",
10581 "tracked-dir": {
10582 "tracked-file1": "",
10583 "ancestor-ignored-file1": "",
10584 },
10585 "ignored-dir": {
10586 "ignored-file1": ""
10587 }
10588 }
10589 }),
10590 )
10591 .await;
10592 fs.set_head_and_index_for_repo(
10593 path!("/root/tree/.git").as_ref(),
10594 &[
10595 (".gitignore", "ignored-dir\n".into()),
10596 ("tracked-dir/tracked-file1", "".into()),
10597 ],
10598 );
10599
10600 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10601
10602 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10603 tree.flush_fs_events(cx).await;
10604 project
10605 .update(cx, |project, cx| project.git_scans_complete(cx))
10606 .await;
10607 cx.executor().run_until_parked();
10608
10609 let repository = project.read_with(cx, |project, cx| {
10610 project.repositories(cx).values().next().unwrap().clone()
10611 });
10612
10613 tree.read_with(cx, |tree, _| {
10614 tree.as_local()
10615 .unwrap()
10616 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10617 })
10618 .recv()
10619 .await;
10620
10621 cx.read(|cx| {
10622 assert_entry_git_state(
10623 tree.read(cx),
10624 repository.read(cx),
10625 "tracked-dir/tracked-file1",
10626 None,
10627 false,
10628 );
10629 assert_entry_git_state(
10630 tree.read(cx),
10631 repository.read(cx),
10632 "tracked-dir/ancestor-ignored-file1",
10633 None,
10634 false,
10635 );
10636 assert_entry_git_state(
10637 tree.read(cx),
10638 repository.read(cx),
10639 "ignored-dir/ignored-file1",
10640 None,
10641 true,
10642 );
10643 });
10644
10645 fs.create_file(
10646 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10647 Default::default(),
10648 )
10649 .await
10650 .unwrap();
10651 fs.set_index_for_repo(
10652 path!("/root/tree/.git").as_ref(),
10653 &[
10654 (".gitignore", "ignored-dir\n".into()),
10655 ("tracked-dir/tracked-file1", "".into()),
10656 ("tracked-dir/tracked-file2", "".into()),
10657 ],
10658 );
10659 fs.create_file(
10660 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10661 Default::default(),
10662 )
10663 .await
10664 .unwrap();
10665 fs.create_file(
10666 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10667 Default::default(),
10668 )
10669 .await
10670 .unwrap();
10671
10672 cx.executor().run_until_parked();
10673 cx.read(|cx| {
10674 assert_entry_git_state(
10675 tree.read(cx),
10676 repository.read(cx),
10677 "tracked-dir/tracked-file2",
10678 Some(StatusCode::Added),
10679 false,
10680 );
10681 assert_entry_git_state(
10682 tree.read(cx),
10683 repository.read(cx),
10684 "tracked-dir/ancestor-ignored-file2",
10685 None,
10686 false,
10687 );
10688 assert_entry_git_state(
10689 tree.read(cx),
10690 repository.read(cx),
10691 "ignored-dir/ignored-file2",
10692 None,
10693 true,
10694 );
10695 assert!(
10696 tree.read(cx)
10697 .entry_for_path(&rel_path(".git"))
10698 .unwrap()
10699 .is_ignored
10700 );
10701 });
10702}
10703
10704#[gpui::test]
10705async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10706 init_test(cx);
10707
10708 let fs = FakeFs::new(cx.executor());
10709 fs.insert_tree(
10710 path!("/project"),
10711 json!({
10712 ".git": {
10713 "worktrees": {
10714 "some-worktree": {
10715 "commondir": "../..\n",
10716 // For is_git_dir
10717 "HEAD": "",
10718 "config": ""
10719 }
10720 },
10721 "modules": {
10722 "subdir": {
10723 "some-submodule": {
10724 // For is_git_dir
10725 "HEAD": "",
10726 "config": "",
10727 }
10728 }
10729 }
10730 },
10731 "src": {
10732 "a.txt": "A",
10733 },
10734 "some-worktree": {
10735 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10736 "src": {
10737 "b.txt": "B",
10738 }
10739 },
10740 "subdir": {
10741 "some-submodule": {
10742 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10743 "c.txt": "C",
10744 }
10745 }
10746 }),
10747 )
10748 .await;
10749
10750 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10751 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10752 scan_complete.await;
10753
10754 let mut repositories = project.update(cx, |project, cx| {
10755 project
10756 .repositories(cx)
10757 .values()
10758 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10759 .collect::<Vec<_>>()
10760 });
10761 repositories.sort();
10762 pretty_assertions::assert_eq!(
10763 repositories,
10764 [
10765 Path::new(path!("/project")).into(),
10766 Path::new(path!("/project/some-worktree")).into(),
10767 Path::new(path!("/project/subdir/some-submodule")).into(),
10768 ]
10769 );
10770
10771 // Generate a git-related event for the worktree and check that it's refreshed.
10772 fs.with_git_state(
10773 path!("/project/some-worktree/.git").as_ref(),
10774 true,
10775 |state| {
10776 state
10777 .head_contents
10778 .insert(repo_path("src/b.txt"), "b".to_owned());
10779 state
10780 .index_contents
10781 .insert(repo_path("src/b.txt"), "b".to_owned());
10782 },
10783 )
10784 .unwrap();
10785 cx.run_until_parked();
10786
10787 let buffer = project
10788 .update(cx, |project, cx| {
10789 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10790 })
10791 .await
10792 .unwrap();
10793 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10794 let (repo, _) = project
10795 .git_store()
10796 .read(cx)
10797 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10798 .unwrap();
10799 pretty_assertions::assert_eq!(
10800 repo.read(cx).work_directory_abs_path,
10801 Path::new(path!("/project/some-worktree")).into(),
10802 );
10803 let barrier = repo.update(cx, |repo, _| repo.barrier());
10804 (repo.clone(), barrier)
10805 });
10806 barrier.await.unwrap();
10807 worktree_repo.update(cx, |repo, _| {
10808 pretty_assertions::assert_eq!(
10809 repo.status_for_path(&repo_path("src/b.txt"))
10810 .unwrap()
10811 .status,
10812 StatusCode::Modified.worktree(),
10813 );
10814 });
10815
10816 // The same for the submodule.
10817 fs.with_git_state(
10818 path!("/project/subdir/some-submodule/.git").as_ref(),
10819 true,
10820 |state| {
10821 state
10822 .head_contents
10823 .insert(repo_path("c.txt"), "c".to_owned());
10824 state
10825 .index_contents
10826 .insert(repo_path("c.txt"), "c".to_owned());
10827 },
10828 )
10829 .unwrap();
10830 cx.run_until_parked();
10831
10832 let buffer = project
10833 .update(cx, |project, cx| {
10834 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10835 })
10836 .await
10837 .unwrap();
10838 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10839 let (repo, _) = project
10840 .git_store()
10841 .read(cx)
10842 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10843 .unwrap();
10844 pretty_assertions::assert_eq!(
10845 repo.read(cx).work_directory_abs_path,
10846 Path::new(path!("/project/subdir/some-submodule")).into(),
10847 );
10848 let barrier = repo.update(cx, |repo, _| repo.barrier());
10849 (repo.clone(), barrier)
10850 });
10851 barrier.await.unwrap();
10852 submodule_repo.update(cx, |repo, _| {
10853 pretty_assertions::assert_eq!(
10854 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10855 StatusCode::Modified.worktree(),
10856 );
10857 });
10858}
10859
10860#[gpui::test]
10861async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10862 init_test(cx);
10863 let fs = FakeFs::new(cx.background_executor.clone());
10864 fs.insert_tree(
10865 path!("/root"),
10866 json!({
10867 "project": {
10868 ".git": {},
10869 "child1": {
10870 "a.txt": "A",
10871 },
10872 "child2": {
10873 "b.txt": "B",
10874 }
10875 }
10876 }),
10877 )
10878 .await;
10879
10880 let project = Project::test(
10881 fs.clone(),
10882 [
10883 path!("/root/project/child1").as_ref(),
10884 path!("/root/project/child2").as_ref(),
10885 ],
10886 cx,
10887 )
10888 .await;
10889
10890 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10891 tree.flush_fs_events(cx).await;
10892 project
10893 .update(cx, |project, cx| project.git_scans_complete(cx))
10894 .await;
10895 cx.executor().run_until_parked();
10896
10897 let repos = project.read_with(cx, |project, cx| {
10898 project
10899 .repositories(cx)
10900 .values()
10901 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10902 .collect::<Vec<_>>()
10903 });
10904 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10905}
10906
10907#[gpui::test]
10908async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10909 init_test(cx);
10910
10911 let file_1_committed = String::from(r#"file_1_committed"#);
10912 let file_1_staged = String::from(r#"file_1_staged"#);
10913 let file_2_committed = String::from(r#"file_2_committed"#);
10914 let file_2_staged = String::from(r#"file_2_staged"#);
10915 let buffer_contents = String::from(r#"buffer"#);
10916
10917 let fs = FakeFs::new(cx.background_executor.clone());
10918 fs.insert_tree(
10919 path!("/dir"),
10920 json!({
10921 ".git": {},
10922 "src": {
10923 "file_1.rs": file_1_committed.clone(),
10924 "file_2.rs": file_2_committed.clone(),
10925 }
10926 }),
10927 )
10928 .await;
10929
10930 fs.set_head_for_repo(
10931 path!("/dir/.git").as_ref(),
10932 &[
10933 ("src/file_1.rs", file_1_committed.clone()),
10934 ("src/file_2.rs", file_2_committed.clone()),
10935 ],
10936 "deadbeef",
10937 );
10938 fs.set_index_for_repo(
10939 path!("/dir/.git").as_ref(),
10940 &[
10941 ("src/file_1.rs", file_1_staged.clone()),
10942 ("src/file_2.rs", file_2_staged.clone()),
10943 ],
10944 );
10945
10946 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10947
10948 let buffer = project
10949 .update(cx, |project, cx| {
10950 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10951 })
10952 .await
10953 .unwrap();
10954
10955 buffer.update(cx, |buffer, cx| {
10956 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10957 });
10958
10959 let unstaged_diff = project
10960 .update(cx, |project, cx| {
10961 project.open_unstaged_diff(buffer.clone(), cx)
10962 })
10963 .await
10964 .unwrap();
10965
10966 cx.run_until_parked();
10967
10968 unstaged_diff.update(cx, |unstaged_diff, cx| {
10969 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10970 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10971 });
10972
10973 // Save the buffer as `file_2.rs`, which should trigger the
10974 // `BufferChangedFilePath` event.
10975 project
10976 .update(cx, |project, cx| {
10977 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10978 let path = ProjectPath {
10979 worktree_id,
10980 path: rel_path("src/file_2.rs").into(),
10981 };
10982 project.save_buffer_as(buffer.clone(), path, cx)
10983 })
10984 .await
10985 .unwrap();
10986
10987 cx.run_until_parked();
10988
10989 // Verify that the diff bases have been updated to file_2's contents due to
10990 // the `BufferChangedFilePath` event being handled.
10991 unstaged_diff.update(cx, |unstaged_diff, cx| {
10992 let snapshot = buffer.read(cx).snapshot();
10993 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10994 assert_eq!(
10995 base_text, file_2_staged,
10996 "Diff bases should be automatically updated to file_2 staged content"
10997 );
10998
10999 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11000 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11001 });
11002
11003 let uncommitted_diff = project
11004 .update(cx, |project, cx| {
11005 project.open_uncommitted_diff(buffer.clone(), cx)
11006 })
11007 .await
11008 .unwrap();
11009
11010 cx.run_until_parked();
11011
11012 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11013 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11014 assert_eq!(
11015 base_text, file_2_committed,
11016 "Uncommitted diff should compare against file_2 committed content"
11017 );
11018 });
11019}
11020
11021async fn search(
11022 project: &Entity<Project>,
11023 query: SearchQuery,
11024 cx: &mut gpui::TestAppContext,
11025) -> Result<HashMap<String, Vec<Range<usize>>>> {
11026 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11027 let mut results = HashMap::default();
11028 while let Ok(search_result) = search_rx.rx.recv().await {
11029 match search_result {
11030 SearchResult::Buffer { buffer, ranges } => {
11031 results.entry(buffer).or_insert(ranges);
11032 }
11033 SearchResult::LimitReached => {}
11034 }
11035 }
11036 Ok(results
11037 .into_iter()
11038 .map(|(buffer, ranges)| {
11039 buffer.update(cx, |buffer, cx| {
11040 let path = buffer
11041 .file()
11042 .unwrap()
11043 .full_path(cx)
11044 .to_string_lossy()
11045 .to_string();
11046 let ranges = ranges
11047 .into_iter()
11048 .map(|range| range.to_offset(buffer))
11049 .collect::<Vec<_>>();
11050 (path, ranges)
11051 })
11052 })
11053 .collect())
11054}
11055
11056pub fn init_test(cx: &mut gpui::TestAppContext) {
11057 zlog::init_test();
11058
11059 cx.update(|cx| {
11060 let settings_store = SettingsStore::test(cx);
11061 cx.set_global(settings_store);
11062 release_channel::init(semver::Version::new(0, 0, 0), cx);
11063 });
11064}
11065
11066fn json_lang() -> Arc<Language> {
11067 Arc::new(Language::new(
11068 LanguageConfig {
11069 name: "JSON".into(),
11070 matcher: LanguageMatcher {
11071 path_suffixes: vec!["json".to_string()],
11072 ..Default::default()
11073 },
11074 ..Default::default()
11075 },
11076 None,
11077 ))
11078}
11079
11080fn js_lang() -> Arc<Language> {
11081 Arc::new(Language::new(
11082 LanguageConfig {
11083 name: "JavaScript".into(),
11084 matcher: LanguageMatcher {
11085 path_suffixes: vec!["js".to_string()],
11086 ..Default::default()
11087 },
11088 ..Default::default()
11089 },
11090 None,
11091 ))
11092}
11093
11094fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11095 struct PythonMootToolchainLister(Arc<FakeFs>);
11096 #[async_trait]
11097 impl ToolchainLister for PythonMootToolchainLister {
11098 async fn list(
11099 &self,
11100 worktree_root: PathBuf,
11101 subroot_relative_path: Arc<RelPath>,
11102 _: Option<HashMap<String, String>>,
11103 _: &dyn Fs,
11104 ) -> ToolchainList {
11105 // This lister will always return a path .venv directories within ancestors
11106 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11107 let mut toolchains = vec![];
11108 for ancestor in ancestors {
11109 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11110 if self.0.is_dir(&venv_path).await {
11111 toolchains.push(Toolchain {
11112 name: SharedString::new("Python Venv"),
11113 path: venv_path.to_string_lossy().into_owned().into(),
11114 language_name: LanguageName(SharedString::new_static("Python")),
11115 as_json: serde_json::Value::Null,
11116 })
11117 }
11118 }
11119 ToolchainList {
11120 toolchains,
11121 ..Default::default()
11122 }
11123 }
11124 async fn resolve(
11125 &self,
11126 _: PathBuf,
11127 _: Option<HashMap<String, String>>,
11128 _: &dyn Fs,
11129 ) -> anyhow::Result<Toolchain> {
11130 Err(anyhow::anyhow!("Not implemented"))
11131 }
11132 fn meta(&self) -> ToolchainMetadata {
11133 ToolchainMetadata {
11134 term: SharedString::new_static("Virtual Environment"),
11135 new_toolchain_placeholder: SharedString::new_static(
11136 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11137 ),
11138 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11139 }
11140 }
11141 fn activation_script(
11142 &self,
11143 _: &Toolchain,
11144 _: ShellKind,
11145 _: &gpui::App,
11146 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11147 Box::pin(async { vec![] })
11148 }
11149 }
11150 Arc::new(
11151 Language::new(
11152 LanguageConfig {
11153 name: "Python".into(),
11154 matcher: LanguageMatcher {
11155 path_suffixes: vec!["py".to_string()],
11156 ..Default::default()
11157 },
11158 ..Default::default()
11159 },
11160 None, // We're not testing Python parsing with this language.
11161 )
11162 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11163 "pyproject.toml",
11164 ))))
11165 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11166 )
11167}
11168
11169fn typescript_lang() -> Arc<Language> {
11170 Arc::new(Language::new(
11171 LanguageConfig {
11172 name: "TypeScript".into(),
11173 matcher: LanguageMatcher {
11174 path_suffixes: vec!["ts".to_string()],
11175 ..Default::default()
11176 },
11177 ..Default::default()
11178 },
11179 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11180 ))
11181}
11182
11183fn tsx_lang() -> Arc<Language> {
11184 Arc::new(Language::new(
11185 LanguageConfig {
11186 name: "tsx".into(),
11187 matcher: LanguageMatcher {
11188 path_suffixes: vec!["tsx".to_string()],
11189 ..Default::default()
11190 },
11191 ..Default::default()
11192 },
11193 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11194 ))
11195}
11196
11197fn get_all_tasks(
11198 project: &Entity<Project>,
11199 task_contexts: Arc<TaskContexts>,
11200 cx: &mut App,
11201) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11202 let new_tasks = project.update(cx, |project, cx| {
11203 project.task_store().update(cx, |task_store, cx| {
11204 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11205 this.used_and_current_resolved_tasks(task_contexts, cx)
11206 })
11207 })
11208 });
11209
11210 cx.background_spawn(async move {
11211 let (mut old, new) = new_tasks.await;
11212 old.extend(new);
11213 old
11214 })
11215}
11216
11217#[track_caller]
11218fn assert_entry_git_state(
11219 tree: &Worktree,
11220 repository: &Repository,
11221 path: &str,
11222 index_status: Option<StatusCode>,
11223 is_ignored: bool,
11224) {
11225 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11226 let entry = tree
11227 .entry_for_path(&rel_path(path))
11228 .unwrap_or_else(|| panic!("entry {path} not found"));
11229 let status = repository
11230 .status_for_path(&repo_path(path))
11231 .map(|entry| entry.status);
11232 let expected = index_status.map(|index_status| {
11233 TrackedStatus {
11234 index_status,
11235 worktree_status: StatusCode::Unmodified,
11236 }
11237 .into()
11238 });
11239 assert_eq!(
11240 status, expected,
11241 "expected {path} to have git status: {expected:?}"
11242 );
11243 assert_eq!(
11244 entry.is_ignored, is_ignored,
11245 "expected {path} to have is_ignored: {is_ignored}"
11246 );
11247}
11248
11249#[track_caller]
11250fn git_init(path: &Path) -> git2::Repository {
11251 let mut init_opts = RepositoryInitOptions::new();
11252 init_opts.initial_head("main");
11253 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11254}
11255
11256#[track_caller]
11257fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11258 let path = path.as_ref();
11259 let mut index = repo.index().expect("Failed to get index");
11260 index.add_path(path).expect("Failed to add file");
11261 index.write().expect("Failed to write index");
11262}
11263
11264#[track_caller]
11265fn git_remove_index(path: &Path, repo: &git2::Repository) {
11266 let mut index = repo.index().expect("Failed to get index");
11267 index.remove_path(path).expect("Failed to add file");
11268 index.write().expect("Failed to write index");
11269}
11270
11271#[track_caller]
11272fn git_commit(msg: &'static str, repo: &git2::Repository) {
11273 use git2::Signature;
11274
11275 let signature = Signature::now("test", "test@zed.dev").unwrap();
11276 let oid = repo.index().unwrap().write_tree().unwrap();
11277 let tree = repo.find_tree(oid).unwrap();
11278 if let Ok(head) = repo.head() {
11279 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11280
11281 let parent_commit = parent_obj.as_commit().unwrap();
11282
11283 repo.commit(
11284 Some("HEAD"),
11285 &signature,
11286 &signature,
11287 msg,
11288 &tree,
11289 &[parent_commit],
11290 )
11291 .expect("Failed to commit with parent");
11292 } else {
11293 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11294 .expect("Failed to commit");
11295 }
11296}
11297
11298#[cfg(any())]
11299#[track_caller]
11300fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11301 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11302}
11303
11304#[track_caller]
11305fn git_stash(repo: &mut git2::Repository) {
11306 use git2::Signature;
11307
11308 let signature = Signature::now("test", "test@zed.dev").unwrap();
11309 repo.stash_save(&signature, "N/A", None)
11310 .expect("Failed to stash");
11311}
11312
11313#[track_caller]
11314fn git_reset(offset: usize, repo: &git2::Repository) {
11315 let head = repo.head().expect("Couldn't get repo head");
11316 let object = head.peel(git2::ObjectType::Commit).unwrap();
11317 let commit = object.as_commit().unwrap();
11318 let new_head = commit
11319 .parents()
11320 .inspect(|parnet| {
11321 parnet.message();
11322 })
11323 .nth(offset)
11324 .expect("Not enough history");
11325 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11326 .expect("Could not reset");
11327}
11328
11329#[cfg(any())]
11330#[track_caller]
11331fn git_branch(name: &str, repo: &git2::Repository) {
11332 let head = repo
11333 .head()
11334 .expect("Couldn't get repo head")
11335 .peel_to_commit()
11336 .expect("HEAD is not a commit");
11337 repo.branch(name, &head, false).expect("Failed to commit");
11338}
11339
11340#[cfg(any())]
11341#[track_caller]
11342fn git_checkout(name: &str, repo: &git2::Repository) {
11343 repo.set_head(name).expect("Failed to set head");
11344 repo.checkout_head(None).expect("Failed to check out head");
11345}
11346
11347#[cfg(any())]
11348#[track_caller]
11349fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11350 repo.statuses(None)
11351 .unwrap()
11352 .iter()
11353 .map(|status| (status.path().unwrap().to_string(), status.status()))
11354 .collect()
11355}
11356
11357#[gpui::test]
11358async fn test_find_project_path_abs(
11359 background_executor: BackgroundExecutor,
11360 cx: &mut gpui::TestAppContext,
11361) {
11362 // find_project_path should work with absolute paths
11363 init_test(cx);
11364
11365 let fs = FakeFs::new(background_executor);
11366 fs.insert_tree(
11367 path!("/root"),
11368 json!({
11369 "project1": {
11370 "file1.txt": "content1",
11371 "subdir": {
11372 "file2.txt": "content2"
11373 }
11374 },
11375 "project2": {
11376 "file3.txt": "content3"
11377 }
11378 }),
11379 )
11380 .await;
11381
11382 let project = Project::test(
11383 fs.clone(),
11384 [
11385 path!("/root/project1").as_ref(),
11386 path!("/root/project2").as_ref(),
11387 ],
11388 cx,
11389 )
11390 .await;
11391
11392 // Make sure the worktrees are fully initialized
11393 project
11394 .update(cx, |project, cx| project.git_scans_complete(cx))
11395 .await;
11396 cx.run_until_parked();
11397
11398 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11399 project.read_with(cx, |project, cx| {
11400 let worktrees: Vec<_> = project.worktrees(cx).collect();
11401 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11402 let id1 = worktrees[0].read(cx).id();
11403 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11404 let id2 = worktrees[1].read(cx).id();
11405 (abs_path1, id1, abs_path2, id2)
11406 });
11407
11408 project.update(cx, |project, cx| {
11409 let abs_path = project1_abs_path.join("file1.txt");
11410 let found_path = project.find_project_path(abs_path, cx).unwrap();
11411 assert_eq!(found_path.worktree_id, project1_id);
11412 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11413
11414 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11415 let found_path = project.find_project_path(abs_path, cx).unwrap();
11416 assert_eq!(found_path.worktree_id, project1_id);
11417 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11418
11419 let abs_path = project2_abs_path.join("file3.txt");
11420 let found_path = project.find_project_path(abs_path, cx).unwrap();
11421 assert_eq!(found_path.worktree_id, project2_id);
11422 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11423
11424 let abs_path = project1_abs_path.join("nonexistent.txt");
11425 let found_path = project.find_project_path(abs_path, cx);
11426 assert!(
11427 found_path.is_some(),
11428 "Should find project path for nonexistent file in worktree"
11429 );
11430
11431 // Test with an absolute path outside any worktree
11432 let abs_path = Path::new("/some/other/path");
11433 let found_path = project.find_project_path(abs_path, cx);
11434 assert!(
11435 found_path.is_none(),
11436 "Should not find project path for path outside any worktree"
11437 );
11438 });
11439}
11440
11441#[gpui::test]
11442async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11443 init_test(cx);
11444
11445 let fs = FakeFs::new(cx.executor());
11446 fs.insert_tree(
11447 path!("/root"),
11448 json!({
11449 "a": {
11450 ".git": {},
11451 "src": {
11452 "main.rs": "fn main() {}",
11453 }
11454 },
11455 "b": {
11456 ".git": {},
11457 "src": {
11458 "main.rs": "fn main() {}",
11459 },
11460 "script": {
11461 "run.sh": "#!/bin/bash"
11462 }
11463 }
11464 }),
11465 )
11466 .await;
11467
11468 let project = Project::test(
11469 fs.clone(),
11470 [
11471 path!("/root/a").as_ref(),
11472 path!("/root/b/script").as_ref(),
11473 path!("/root/b").as_ref(),
11474 ],
11475 cx,
11476 )
11477 .await;
11478 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11479 scan_complete.await;
11480
11481 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11482 assert_eq!(worktrees.len(), 3);
11483
11484 let worktree_id_by_abs_path = worktrees
11485 .into_iter()
11486 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11487 .collect::<HashMap<_, _>>();
11488 let worktree_id = worktree_id_by_abs_path
11489 .get(Path::new(path!("/root/b/script")))
11490 .unwrap();
11491
11492 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11493 assert_eq!(repos.len(), 2);
11494
11495 project.update(cx, |project, cx| {
11496 project.remove_worktree(*worktree_id, cx);
11497 });
11498 cx.run_until_parked();
11499
11500 let mut repo_paths = project
11501 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11502 .values()
11503 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11504 .collect::<Vec<_>>();
11505 repo_paths.sort();
11506
11507 pretty_assertions::assert_eq!(
11508 repo_paths,
11509 [
11510 Path::new(path!("/root/a")).into(),
11511 Path::new(path!("/root/b")).into(),
11512 ]
11513 );
11514
11515 let active_repo_path = project
11516 .read_with(cx, |p, cx| {
11517 p.active_repository(cx)
11518 .map(|r| r.read(cx).work_directory_abs_path.clone())
11519 })
11520 .unwrap();
11521 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11522
11523 let worktree_id = worktree_id_by_abs_path
11524 .get(Path::new(path!("/root/a")))
11525 .unwrap();
11526 project.update(cx, |project, cx| {
11527 project.remove_worktree(*worktree_id, cx);
11528 });
11529 cx.run_until_parked();
11530
11531 let active_repo_path = project
11532 .read_with(cx, |p, cx| {
11533 p.active_repository(cx)
11534 .map(|r| r.read(cx).work_directory_abs_path.clone())
11535 })
11536 .unwrap();
11537 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11538
11539 let worktree_id = worktree_id_by_abs_path
11540 .get(Path::new(path!("/root/b")))
11541 .unwrap();
11542 project.update(cx, |project, cx| {
11543 project.remove_worktree(*worktree_id, cx);
11544 });
11545 cx.run_until_parked();
11546
11547 let active_repo_path = project.read_with(cx, |p, cx| {
11548 p.active_repository(cx)
11549 .map(|r| r.read(cx).work_directory_abs_path.clone())
11550 });
11551 assert!(active_repo_path.is_none());
11552}
11553
11554#[gpui::test]
11555async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11556 use DiffHunkSecondaryStatus::*;
11557 init_test(cx);
11558
11559 let committed_contents = r#"
11560 one
11561 two
11562 three
11563 "#
11564 .unindent();
11565 let file_contents = r#"
11566 one
11567 TWO
11568 three
11569 "#
11570 .unindent();
11571
11572 let fs = FakeFs::new(cx.background_executor.clone());
11573 fs.insert_tree(
11574 path!("/dir"),
11575 json!({
11576 ".git": {},
11577 "file.txt": file_contents.clone()
11578 }),
11579 )
11580 .await;
11581
11582 fs.set_head_and_index_for_repo(
11583 path!("/dir/.git").as_ref(),
11584 &[("file.txt", committed_contents.clone())],
11585 );
11586
11587 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11588
11589 let buffer = project
11590 .update(cx, |project, cx| {
11591 project.open_local_buffer(path!("/dir/file.txt"), cx)
11592 })
11593 .await
11594 .unwrap();
11595 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11596 let uncommitted_diff = project
11597 .update(cx, |project, cx| {
11598 project.open_uncommitted_diff(buffer.clone(), cx)
11599 })
11600 .await
11601 .unwrap();
11602
11603 // The hunk is initially unstaged.
11604 uncommitted_diff.read_with(cx, |diff, cx| {
11605 assert_hunks(
11606 diff.snapshot(cx).hunks(&snapshot),
11607 &snapshot,
11608 &diff.base_text_string(cx).unwrap(),
11609 &[(
11610 1..2,
11611 "two\n",
11612 "TWO\n",
11613 DiffHunkStatus::modified(HasSecondaryHunk),
11614 )],
11615 );
11616 });
11617
11618 // Get the repository handle.
11619 let repo = project.read_with(cx, |project, cx| {
11620 project.repositories(cx).values().next().unwrap().clone()
11621 });
11622
11623 // Stage the file.
11624 let stage_task = repo.update(cx, |repo, cx| {
11625 repo.stage_entries(vec![repo_path("file.txt")], cx)
11626 });
11627
11628 // Run a few ticks to let the job start and mark hunks as pending,
11629 // but don't run_until_parked which would complete the entire operation.
11630 for _ in 0..10 {
11631 cx.executor().tick();
11632 let [hunk]: [_; 1] = uncommitted_diff
11633 .read_with(cx, |diff, cx| {
11634 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11635 })
11636 .try_into()
11637 .unwrap();
11638 match hunk.secondary_status {
11639 HasSecondaryHunk => {}
11640 SecondaryHunkRemovalPending => break,
11641 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11642 _ => panic!("unexpected hunk state"),
11643 }
11644 }
11645 uncommitted_diff.read_with(cx, |diff, cx| {
11646 assert_hunks(
11647 diff.snapshot(cx).hunks(&snapshot),
11648 &snapshot,
11649 &diff.base_text_string(cx).unwrap(),
11650 &[(
11651 1..2,
11652 "two\n",
11653 "TWO\n",
11654 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11655 )],
11656 );
11657 });
11658
11659 // Let the staging complete.
11660 stage_task.await.unwrap();
11661 cx.run_until_parked();
11662
11663 // The hunk is now fully staged.
11664 uncommitted_diff.read_with(cx, |diff, cx| {
11665 assert_hunks(
11666 diff.snapshot(cx).hunks(&snapshot),
11667 &snapshot,
11668 &diff.base_text_string(cx).unwrap(),
11669 &[(
11670 1..2,
11671 "two\n",
11672 "TWO\n",
11673 DiffHunkStatus::modified(NoSecondaryHunk),
11674 )],
11675 );
11676 });
11677
11678 // Simulate a commit by updating HEAD to match the current file contents.
11679 // The FakeGitRepository's commit method is a no-op, so we need to manually
11680 // update HEAD to simulate the commit completing.
11681 fs.set_head_for_repo(
11682 path!("/dir/.git").as_ref(),
11683 &[("file.txt", file_contents.clone())],
11684 "newhead",
11685 );
11686 cx.run_until_parked();
11687
11688 // After committing, there are no more hunks.
11689 uncommitted_diff.read_with(cx, |diff, cx| {
11690 assert_hunks(
11691 diff.snapshot(cx).hunks(&snapshot),
11692 &snapshot,
11693 &diff.base_text_string(cx).unwrap(),
11694 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11695 );
11696 });
11697}
11698
11699#[gpui::test]
11700async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11701 init_test(cx);
11702
11703 // Configure read_only_files setting
11704 cx.update(|cx| {
11705 cx.update_global::<SettingsStore, _>(|store, cx| {
11706 store.update_user_settings(cx, |settings| {
11707 settings.project.worktree.read_only_files = Some(vec![
11708 "**/generated/**".to_string(),
11709 "**/*.gen.rs".to_string(),
11710 ]);
11711 });
11712 });
11713 });
11714
11715 let fs = FakeFs::new(cx.background_executor.clone());
11716 fs.insert_tree(
11717 path!("/root"),
11718 json!({
11719 "src": {
11720 "main.rs": "fn main() {}",
11721 "types.gen.rs": "// Generated file",
11722 },
11723 "generated": {
11724 "schema.rs": "// Auto-generated schema",
11725 }
11726 }),
11727 )
11728 .await;
11729
11730 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11731
11732 // Open a regular file - should be read-write
11733 let regular_buffer = project
11734 .update(cx, |project, cx| {
11735 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11736 })
11737 .await
11738 .unwrap();
11739
11740 regular_buffer.read_with(cx, |buffer, _| {
11741 assert!(!buffer.read_only(), "Regular file should not be read-only");
11742 });
11743
11744 // Open a file matching *.gen.rs pattern - should be read-only
11745 let gen_buffer = project
11746 .update(cx, |project, cx| {
11747 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11748 })
11749 .await
11750 .unwrap();
11751
11752 gen_buffer.read_with(cx, |buffer, _| {
11753 assert!(
11754 buffer.read_only(),
11755 "File matching *.gen.rs pattern should be read-only"
11756 );
11757 });
11758
11759 // Open a file in generated directory - should be read-only
11760 let generated_buffer = project
11761 .update(cx, |project, cx| {
11762 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11763 })
11764 .await
11765 .unwrap();
11766
11767 generated_buffer.read_with(cx, |buffer, _| {
11768 assert!(
11769 buffer.read_only(),
11770 "File in generated directory should be read-only"
11771 );
11772 });
11773}
11774
11775#[gpui::test]
11776async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11777 init_test(cx);
11778
11779 // Explicitly set read_only_files to empty (default behavior)
11780 cx.update(|cx| {
11781 cx.update_global::<SettingsStore, _>(|store, cx| {
11782 store.update_user_settings(cx, |settings| {
11783 settings.project.worktree.read_only_files = Some(vec![]);
11784 });
11785 });
11786 });
11787
11788 let fs = FakeFs::new(cx.background_executor.clone());
11789 fs.insert_tree(
11790 path!("/root"),
11791 json!({
11792 "src": {
11793 "main.rs": "fn main() {}",
11794 },
11795 "generated": {
11796 "schema.rs": "// Auto-generated schema",
11797 }
11798 }),
11799 )
11800 .await;
11801
11802 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11803
11804 // All files should be read-write when read_only_files is empty
11805 let main_buffer = project
11806 .update(cx, |project, cx| {
11807 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11808 })
11809 .await
11810 .unwrap();
11811
11812 main_buffer.read_with(cx, |buffer, _| {
11813 assert!(
11814 !buffer.read_only(),
11815 "Files should not be read-only when read_only_files is empty"
11816 );
11817 });
11818
11819 let generated_buffer = project
11820 .update(cx, |project, cx| {
11821 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11822 })
11823 .await
11824 .unwrap();
11825
11826 generated_buffer.read_with(cx, |buffer, _| {
11827 assert!(
11828 !buffer.read_only(),
11829 "Generated files should not be read-only when read_only_files is empty"
11830 );
11831 });
11832}
11833
11834#[gpui::test]
11835async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11836 init_test(cx);
11837
11838 // Configure to make lock files read-only
11839 cx.update(|cx| {
11840 cx.update_global::<SettingsStore, _>(|store, cx| {
11841 store.update_user_settings(cx, |settings| {
11842 settings.project.worktree.read_only_files = Some(vec![
11843 "**/*.lock".to_string(),
11844 "**/package-lock.json".to_string(),
11845 ]);
11846 });
11847 });
11848 });
11849
11850 let fs = FakeFs::new(cx.background_executor.clone());
11851 fs.insert_tree(
11852 path!("/root"),
11853 json!({
11854 "Cargo.lock": "# Lock file",
11855 "Cargo.toml": "[package]",
11856 "package-lock.json": "{}",
11857 "package.json": "{}",
11858 }),
11859 )
11860 .await;
11861
11862 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11863
11864 // Cargo.lock should be read-only
11865 let cargo_lock = project
11866 .update(cx, |project, cx| {
11867 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11868 })
11869 .await
11870 .unwrap();
11871
11872 cargo_lock.read_with(cx, |buffer, _| {
11873 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11874 });
11875
11876 // Cargo.toml should be read-write
11877 let cargo_toml = project
11878 .update(cx, |project, cx| {
11879 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11880 })
11881 .await
11882 .unwrap();
11883
11884 cargo_toml.read_with(cx, |buffer, _| {
11885 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11886 });
11887
11888 // package-lock.json should be read-only
11889 let package_lock = project
11890 .update(cx, |project, cx| {
11891 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11892 })
11893 .await
11894 .unwrap();
11895
11896 package_lock.read_with(cx, |buffer, _| {
11897 assert!(buffer.read_only(), "package-lock.json should be read-only");
11898 });
11899
11900 // package.json should be read-write
11901 let package_json = project
11902 .update(cx, |project, cx| {
11903 project.open_local_buffer(path!("/root/package.json"), cx)
11904 })
11905 .await
11906 .unwrap();
11907
11908 package_json.read_with(cx, |buffer, _| {
11909 assert!(!buffer.read_only(), "package.json should not be read-only");
11910 });
11911}
11912
11913mod disable_ai_settings_tests {
11914 use gpui::TestAppContext;
11915 use project::*;
11916 use settings::{Settings, SettingsStore};
11917
11918 #[gpui::test]
11919 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
11920 cx.update(|cx| {
11921 settings::init(cx);
11922
11923 // Test 1: Default is false (AI enabled)
11924 assert!(
11925 !DisableAiSettings::get_global(cx).disable_ai,
11926 "Default should allow AI"
11927 );
11928 });
11929
11930 let disable_true = serde_json::json!({
11931 "disable_ai": true
11932 })
11933 .to_string();
11934 let disable_false = serde_json::json!({
11935 "disable_ai": false
11936 })
11937 .to_string();
11938
11939 cx.update_global::<SettingsStore, _>(|store, cx| {
11940 store.set_user_settings(&disable_false, cx).unwrap();
11941 store.set_global_settings(&disable_true, cx).unwrap();
11942 });
11943 cx.update(|cx| {
11944 assert!(
11945 DisableAiSettings::get_global(cx).disable_ai,
11946 "Local false cannot override global true"
11947 );
11948 });
11949
11950 cx.update_global::<SettingsStore, _>(|store, cx| {
11951 store.set_global_settings(&disable_false, cx).unwrap();
11952 store.set_user_settings(&disable_true, cx).unwrap();
11953 });
11954
11955 cx.update(|cx| {
11956 assert!(
11957 DisableAiSettings::get_global(cx).disable_ai,
11958 "Local false cannot override global true"
11959 );
11960 });
11961 }
11962}