1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
45 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
46 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
47 ToolchainMetadata,
48 language_settings::{LanguageSettings, LanguageSettingsContent},
49 markdown_lang, rust_lang, tree_sitter_typescript,
50};
51use lsp::{
52 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
53 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
54 Uri, WillRenameFiles, notification::DidRenameFiles,
55};
56use parking_lot::Mutex;
57use paths::{config_dir, global_gitignore_path, tasks_file};
58use postage::stream::Stream as _;
59use pretty_assertions::{assert_eq, assert_matches};
60use project::{
61 Event, TaskContexts,
62 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
63 search::{SearchQuery, SearchResult},
64 task_store::{TaskSettingsLocation, TaskStore},
65 *,
66};
67use rand::{Rng as _, rngs::StdRng};
68use serde_json::json;
69use settings::SettingsStore;
70#[cfg(not(windows))]
71use std::os;
72use std::{
73 cell::RefCell,
74 env, mem,
75 num::NonZeroU32,
76 ops::Range,
77 path::{Path, PathBuf},
78 rc::Rc,
79 str::FromStr,
80 sync::{Arc, OnceLock, atomic},
81 task::Poll,
82 time::Duration,
83};
84use sum_tree::SumTree;
85use task::{ResolvedTask, ShellKind, TaskContext};
86use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
87use unindent::Unindent as _;
88use util::{
89 TryFutureExt as _, assert_set_eq, maybe, path,
90 paths::{PathMatcher, PathStyle},
91 rel_path::{RelPath, rel_path},
92 test::{TempTree, marked_text_offsets},
93 uri,
94};
95use worktree::WorktreeModelHandle as _;
96
97#[gpui::test]
98async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
99 cx.executor().allow_parking();
100
101 let (tx, mut rx) = futures::channel::mpsc::unbounded();
102 let _thread = std::thread::spawn(move || {
103 #[cfg(not(target_os = "windows"))]
104 std::fs::metadata("/tmp").unwrap();
105 #[cfg(target_os = "windows")]
106 std::fs::metadata("C:/Windows").unwrap();
107 std::thread::sleep(Duration::from_millis(1000));
108 tx.unbounded_send(1).unwrap();
109 });
110 rx.next().await.unwrap();
111}
112
113#[gpui::test]
114async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
115 cx.executor().allow_parking();
116
117 let io_task = smol::unblock(move || {
118 println!("sleeping on thread {:?}", std::thread::current().id());
119 std::thread::sleep(Duration::from_millis(10));
120 1
121 });
122
123 let task = cx.foreground_executor().spawn(async move {
124 io_task.await;
125 });
126
127 task.await;
128}
129
130#[gpui::test]
131async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
132 cx: &mut gpui::TestAppContext,
133) {
134 init_test(cx);
135
136 let fs = FakeFs::new(cx.executor());
137 fs.insert_tree(
138 path!("/root"),
139 json!({
140 "dir-project": {
141 "src": {
142 "main.rs": "fn main() {}"
143 }
144 },
145 "single-file.rs": "fn helper() {}"
146 }),
147 )
148 .await;
149
150 let project = Project::test(
151 fs,
152 [
153 Path::new(path!("/root/single-file.rs")),
154 Path::new(path!("/root/dir-project")),
155 ],
156 cx,
157 )
158 .await;
159
160 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
161 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
162
163 assert_eq!(
164 ordered_paths,
165 vec![
166 PathBuf::from(path!("/root/dir-project")),
167 PathBuf::from(path!("/root")),
168 ]
169 );
170}
171
172#[gpui::test]
173async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
174 cx: &mut gpui::TestAppContext,
175) {
176 init_test(cx);
177
178 let fs = FakeFs::new(cx.executor());
179 let project = Project::test(fs, [], cx).await;
180
181 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
182 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
183
184 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
185}
186
187// NOTE:
188// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
189// we assume that they are not supported out of the box.
190#[cfg(not(windows))]
191#[gpui::test]
192async fn test_symlinks(cx: &mut gpui::TestAppContext) {
193 init_test(cx);
194 cx.executor().allow_parking();
195
196 let dir = TempTree::new(json!({
197 "root": {
198 "apple": "",
199 "banana": {
200 "carrot": {
201 "date": "",
202 "endive": "",
203 }
204 },
205 "fennel": {
206 "grape": "",
207 }
208 }
209 }));
210
211 let root_link_path = dir.path().join("root_link");
212 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
213 os::unix::fs::symlink(
214 dir.path().join("root/fennel"),
215 dir.path().join("root/finnochio"),
216 )
217 .unwrap();
218
219 let project = Project::test(
220 Arc::new(RealFs::new(None, cx.executor())),
221 [root_link_path.as_ref()],
222 cx,
223 )
224 .await;
225
226 project.update(cx, |project, cx| {
227 let tree = project.worktrees(cx).next().unwrap().read(cx);
228 assert_eq!(tree.file_count(), 5);
229 assert_eq!(
230 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
231 tree.entry_for_path(rel_path("finnochio/grape"))
232 .unwrap()
233 .inode
234 );
235 });
236}
237
238#[gpui::test]
239async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
240 init_test(cx);
241
242 let dir = TempTree::new(json!({
243 ".editorconfig": r#"
244 root = true
245 [*.rs]
246 indent_style = tab
247 indent_size = 3
248 end_of_line = lf
249 insert_final_newline = true
250 trim_trailing_whitespace = true
251 max_line_length = 120
252 [*.js]
253 tab_width = 10
254 max_line_length = off
255 "#,
256 ".zed": {
257 "settings.json": r#"{
258 "tab_size": 8,
259 "hard_tabs": false,
260 "ensure_final_newline_on_save": false,
261 "remove_trailing_whitespace_on_save": false,
262 "preferred_line_length": 64,
263 "soft_wrap": "editor_width",
264 }"#,
265 },
266 "a.rs": "fn a() {\n A\n}",
267 "b": {
268 ".editorconfig": r#"
269 [*.rs]
270 indent_size = 2
271 max_line_length = off,
272 "#,
273 "b.rs": "fn b() {\n B\n}",
274 },
275 "c.js": "def c\n C\nend",
276 "d": {
277 ".editorconfig": r#"
278 [*.rs]
279 indent_size = 1
280 "#,
281 "d.rs": "fn d() {\n D\n}",
282 },
283 "README.json": "tabs are better\n",
284 }));
285
286 let path = dir.path();
287 let fs = FakeFs::new(cx.executor());
288 fs.insert_tree_from_real_fs(path, path).await;
289 let project = Project::test(fs, [path], cx).await;
290
291 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
292 language_registry.add(js_lang());
293 language_registry.add(json_lang());
294 language_registry.add(rust_lang());
295
296 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
297
298 cx.executor().run_until_parked();
299
300 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
301 let buffer = project
302 .update(cx, |project, cx| {
303 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
304 })
305 .await
306 .unwrap();
307 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
308 };
309
310 let settings_a = settings_for("a.rs", cx).await;
311 let settings_b = settings_for("b/b.rs", cx).await;
312 let settings_c = settings_for("c.js", cx).await;
313 let settings_d = settings_for("d/d.rs", cx).await;
314 let settings_readme = settings_for("README.json", cx).await;
315 // .editorconfig overrides .zed/settings
316 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
317 assert_eq!(settings_a.hard_tabs, true);
318 assert_eq!(settings_a.ensure_final_newline_on_save, true);
319 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
320 assert_eq!(settings_a.preferred_line_length, 120);
321
322 // .editorconfig in b/ overrides .editorconfig in root
323 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
324
325 // .editorconfig in subdirectory overrides .editorconfig in root
326 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
327
328 // "indent_size" is not set, so "tab_width" is used
329 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
330
331 // When max_line_length is "off", default to .zed/settings.json
332 assert_eq!(settings_b.preferred_line_length, 64);
333 assert_eq!(settings_c.preferred_line_length, 64);
334
335 // README.md should not be affected by .editorconfig's globe "*.rs"
336 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
337}
338
339#[gpui::test]
340async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
341 init_test(cx);
342
343 let fs = FakeFs::new(cx.executor());
344 fs.insert_tree(
345 path!("/grandparent"),
346 json!({
347 ".editorconfig": "[*]\nindent_size = 4\n",
348 "parent": {
349 ".editorconfig": "[*.rs]\nindent_size = 2\n",
350 "worktree": {
351 ".editorconfig": "[*.md]\nindent_size = 3\n",
352 "main.rs": "fn main() {}",
353 "README.md": "# README",
354 "other.txt": "other content",
355 }
356 }
357 }),
358 )
359 .await;
360
361 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
362
363 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
364 language_registry.add(rust_lang());
365 language_registry.add(markdown_lang());
366
367 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
368
369 cx.executor().run_until_parked();
370 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
371 let buffer = project
372 .update(cx, |project, cx| {
373 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
374 })
375 .await
376 .unwrap();
377 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
378 };
379
380 let settings_rs = settings_for("main.rs", cx).await;
381 let settings_md = settings_for("README.md", cx).await;
382 let settings_txt = settings_for("other.txt", cx).await;
383
384 // main.rs gets indent_size = 2 from parent's external .editorconfig
385 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
386
387 // README.md gets indent_size = 3 from internal worktree .editorconfig
388 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
389
390 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
391 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
392}
393
394#[gpui::test]
395async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
396 init_test(cx);
397
398 let fs = FakeFs::new(cx.executor());
399 fs.insert_tree(
400 path!("/worktree"),
401 json!({
402 ".editorconfig": "[*]\nindent_size = 99\n",
403 "src": {
404 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
405 "file.rs": "fn main() {}",
406 }
407 }),
408 )
409 .await;
410
411 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
412
413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
414 language_registry.add(rust_lang());
415
416 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
417
418 cx.executor().run_until_parked();
419
420 let buffer = project
421 .update(cx, |project, cx| {
422 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
423 })
424 .await
425 .unwrap();
426 cx.update(|cx| {
427 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
428 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
429 });
430}
431
432#[gpui::test]
433async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
434 init_test(cx);
435
436 let fs = FakeFs::new(cx.executor());
437 fs.insert_tree(
438 path!("/parent"),
439 json!({
440 ".editorconfig": "[*]\nindent_size = 99\n",
441 "worktree": {
442 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
443 "file.rs": "fn main() {}",
444 }
445 }),
446 )
447 .await;
448
449 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
450
451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
452 language_registry.add(rust_lang());
453
454 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
455
456 cx.executor().run_until_parked();
457
458 let buffer = project
459 .update(cx, |project, cx| {
460 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
461 })
462 .await
463 .unwrap();
464
465 cx.update(|cx| {
466 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
467
468 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
469 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
470 });
471}
472
473#[gpui::test]
474async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
475 init_test(cx);
476
477 let fs = FakeFs::new(cx.executor());
478 fs.insert_tree(
479 path!("/grandparent"),
480 json!({
481 ".editorconfig": "[*]\nindent_size = 99\n",
482 "parent": {
483 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
484 "worktree": {
485 "file.rs": "fn main() {}",
486 }
487 }
488 }),
489 )
490 .await;
491
492 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
493
494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
495 language_registry.add(rust_lang());
496
497 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
498
499 cx.executor().run_until_parked();
500
501 let buffer = project
502 .update(cx, |project, cx| {
503 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
504 })
505 .await
506 .unwrap();
507
508 cx.update(|cx| {
509 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
510
511 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
512 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
513 });
514}
515
516#[gpui::test]
517async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/parent"),
523 json!({
524 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
525 "worktree_a": {
526 "file.rs": "fn a() {}",
527 ".editorconfig": "[*]\ninsert_final_newline = true\n",
528 },
529 "worktree_b": {
530 "file.rs": "fn b() {}",
531 ".editorconfig": "[*]\ninsert_final_newline = false\n",
532 }
533 }),
534 )
535 .await;
536
537 let project = Project::test(
538 fs,
539 [
540 path!("/parent/worktree_a").as_ref(),
541 path!("/parent/worktree_b").as_ref(),
542 ],
543 cx,
544 )
545 .await;
546
547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
548 language_registry.add(rust_lang());
549
550 cx.executor().run_until_parked();
551
552 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
553 assert_eq!(worktrees.len(), 2);
554
555 for worktree in worktrees {
556 let buffer = project
557 .update(cx, |project, cx| {
558 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
559 })
560 .await
561 .unwrap();
562
563 cx.update(|cx| {
564 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
565
566 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
567 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
568 });
569 }
570}
571
572#[gpui::test]
573async fn test_external_editorconfig_not_loaded_without_internal_config(
574 cx: &mut gpui::TestAppContext,
575) {
576 init_test(cx);
577
578 let fs = FakeFs::new(cx.executor());
579 fs.insert_tree(
580 path!("/parent"),
581 json!({
582 ".editorconfig": "[*]\nindent_size = 99\n",
583 "worktree": {
584 "file.rs": "fn main() {}",
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
591
592 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
593 language_registry.add(rust_lang());
594
595 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
596
597 cx.executor().run_until_parked();
598
599 let buffer = project
600 .update(cx, |project, cx| {
601 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
602 })
603 .await
604 .unwrap();
605
606 cx.update(|cx| {
607 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
608
609 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
610 // because without an internal .editorconfig, external configs are not loaded
611 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
612 });
613}
614
615#[gpui::test]
616async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 4\n",
624 "worktree": {
625 ".editorconfig": "[*]\n",
626 "file.rs": "fn main() {}",
627 }
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
633
634 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
635 language_registry.add(rust_lang());
636
637 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
638
639 cx.executor().run_until_parked();
640
641 let buffer = project
642 .update(cx, |project, cx| {
643 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
644 })
645 .await
646 .unwrap();
647
648 cx.update(|cx| {
649 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
650
651 // Test initial settings: tab_size = 4 from parent's external .editorconfig
652 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
653 });
654
655 fs.atomic_write(
656 PathBuf::from(path!("/parent/.editorconfig")),
657 "[*]\nindent_size = 8\n".to_owned(),
658 )
659 .await
660 .unwrap();
661
662 cx.executor().run_until_parked();
663
664 let buffer = project
665 .update(cx, |project, cx| {
666 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
667 })
668 .await
669 .unwrap();
670
671 cx.update(|cx| {
672 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
673
674 // Test settings updated: tab_size = 8
675 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
676 });
677}
678
679#[gpui::test]
680async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
681 init_test(cx);
682
683 let fs = FakeFs::new(cx.executor());
684 fs.insert_tree(
685 path!("/parent"),
686 json!({
687 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
688 "existing_worktree": {
689 ".editorconfig": "[*]\n",
690 "file.rs": "fn a() {}",
691 },
692 "new_worktree": {
693 ".editorconfig": "[*]\n",
694 "file.rs": "fn b() {}",
695 }
696 }),
697 )
698 .await;
699
700 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
701
702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
703 language_registry.add(rust_lang());
704
705 cx.executor().run_until_parked();
706
707 let buffer = project
708 .update(cx, |project, cx| {
709 let id = project.worktrees(cx).next().unwrap().read(cx).id();
710 project.open_buffer((id, rel_path("file.rs")), cx)
711 })
712 .await
713 .unwrap();
714
715 cx.update(|cx| {
716 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
717
718 // Test existing worktree has tab_size = 7
719 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
720 });
721
722 let (new_worktree, _) = project
723 .update(cx, |project, cx| {
724 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
725 })
726 .await
727 .unwrap();
728
729 cx.executor().run_until_parked();
730
731 let buffer = project
732 .update(cx, |project, cx| {
733 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
734 })
735 .await
736 .unwrap();
737
738 cx.update(|cx| {
739 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
740
741 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
742 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
743 });
744}
745
746#[gpui::test]
747async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
748 init_test(cx);
749
750 let fs = FakeFs::new(cx.executor());
751 fs.insert_tree(
752 path!("/parent"),
753 json!({
754 ".editorconfig": "[*]\nindent_size = 6\n",
755 "worktree": {
756 ".editorconfig": "[*]\n",
757 "file.rs": "fn main() {}",
758 }
759 }),
760 )
761 .await;
762
763 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
764
765 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
766 language_registry.add(rust_lang());
767
768 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
769 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
770
771 cx.executor().run_until_parked();
772
773 cx.update(|cx| {
774 let store = cx.global::<SettingsStore>();
775 let (worktree_ids, external_paths, watcher_paths) =
776 store.editorconfig_store.read(cx).test_state();
777
778 // Test external config is loaded
779 assert!(worktree_ids.contains(&worktree_id));
780 assert!(!external_paths.is_empty());
781 assert!(!watcher_paths.is_empty());
782 });
783
784 project.update(cx, |project, cx| {
785 project.remove_worktree(worktree_id, cx);
786 });
787
788 cx.executor().run_until_parked();
789
790 cx.update(|cx| {
791 let store = cx.global::<SettingsStore>();
792 let (worktree_ids, external_paths, watcher_paths) =
793 store.editorconfig_store.read(cx).test_state();
794
795 // Test worktree state, external configs, and watchers all removed
796 assert!(!worktree_ids.contains(&worktree_id));
797 assert!(external_paths.is_empty());
798 assert!(watcher_paths.is_empty());
799 });
800}
801
802#[gpui::test]
803async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
804 cx: &mut gpui::TestAppContext,
805) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor());
809 fs.insert_tree(
810 path!("/parent"),
811 json!({
812 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
813 "worktree_a": {
814 ".editorconfig": "[*]\n",
815 "file.rs": "fn a() {}",
816 },
817 "worktree_b": {
818 ".editorconfig": "[*]\n",
819 "file.rs": "fn b() {}",
820 }
821 }),
822 )
823 .await;
824
825 let project = Project::test(
826 fs,
827 [
828 path!("/parent/worktree_a").as_ref(),
829 path!("/parent/worktree_b").as_ref(),
830 ],
831 cx,
832 )
833 .await;
834
835 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
836 language_registry.add(rust_lang());
837
838 cx.executor().run_until_parked();
839
840 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
841 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
842 assert_eq!(worktrees.len(), 2);
843
844 let worktree_a = &worktrees[0];
845 let worktree_b = &worktrees[1];
846 let worktree_a_id = worktree_a.read(cx).id();
847 let worktree_b_id = worktree_b.read(cx).id();
848 (worktree_a_id, worktree_b.clone(), worktree_b_id)
849 });
850
851 cx.update(|cx| {
852 let store = cx.global::<SettingsStore>();
853 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
854
855 // Test both worktrees have settings and share external config
856 assert!(worktree_ids.contains(&worktree_a_id));
857 assert!(worktree_ids.contains(&worktree_b_id));
858 assert_eq!(external_paths.len(), 1); // single shared external config
859 });
860
861 project.update(cx, |project, cx| {
862 project.remove_worktree(worktree_a_id, cx);
863 });
864
865 cx.executor().run_until_parked();
866
867 cx.update(|cx| {
868 let store = cx.global::<SettingsStore>();
869 let (worktree_ids, external_paths, watcher_paths) =
870 store.editorconfig_store.read(cx).test_state();
871
872 // Test worktree_a is gone but external config remains for worktree_b
873 assert!(!worktree_ids.contains(&worktree_a_id));
874 assert!(worktree_ids.contains(&worktree_b_id));
875 // External config should still exist because worktree_b uses it
876 assert_eq!(external_paths.len(), 1);
877 assert_eq!(watcher_paths.len(), 1);
878 });
879
880 let buffer = project
881 .update(cx, |project, cx| {
882 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
883 })
884 .await
885 .unwrap();
886
887 cx.update(|cx| {
888 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
889
890 // Test worktree_b still has correct settings
891 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
892 });
893}
894
895#[gpui::test]
896async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
897 init_test(cx);
898 cx.update(|cx| {
899 GitHostingProviderRegistry::default_global(cx);
900 git_hosting_providers::init(cx);
901 });
902
903 let fs = FakeFs::new(cx.executor());
904 let str_path = path!("/dir");
905 let path = Path::new(str_path);
906
907 fs.insert_tree(
908 path!("/dir"),
909 json!({
910 ".zed": {
911 "settings.json": r#"{
912 "git_hosting_providers": [
913 {
914 "provider": "gitlab",
915 "base_url": "https://google.com",
916 "name": "foo"
917 }
918 ]
919 }"#
920 },
921 }),
922 )
923 .await;
924
925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
926 let (_worktree, _) =
927 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
928 cx.executor().run_until_parked();
929
930 cx.update(|cx| {
931 let provider = GitHostingProviderRegistry::global(cx);
932 assert!(
933 provider
934 .list_hosting_providers()
935 .into_iter()
936 .any(|provider| provider.name() == "foo")
937 );
938 });
939
940 fs.atomic_write(
941 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
942 "{}".into(),
943 )
944 .await
945 .unwrap();
946
947 cx.run_until_parked();
948
949 cx.update(|cx| {
950 let provider = GitHostingProviderRegistry::global(cx);
951 assert!(
952 !provider
953 .list_hosting_providers()
954 .into_iter()
955 .any(|provider| provider.name() == "foo")
956 );
957 });
958}
959
960#[gpui::test]
961async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
962 init_test(cx);
963 TaskStore::init(None);
964
965 let fs = FakeFs::new(cx.executor());
966 fs.insert_tree(
967 path!("/dir"),
968 json!({
969 ".zed": {
970 "settings.json": r#"{ "tab_size": 8 }"#,
971 "tasks.json": r#"[{
972 "label": "cargo check all",
973 "command": "cargo",
974 "args": ["check", "--all"]
975 },]"#,
976 },
977 "a": {
978 "a.rs": "fn a() {\n A\n}"
979 },
980 "b": {
981 ".zed": {
982 "settings.json": r#"{ "tab_size": 2 }"#,
983 "tasks.json": r#"[{
984 "label": "cargo check",
985 "command": "cargo",
986 "args": ["check"]
987 },]"#,
988 },
989 "b.rs": "fn b() {\n B\n}"
990 }
991 }),
992 )
993 .await;
994
995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
996 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
997
998 cx.executor().run_until_parked();
999 let worktree_id = cx.update(|cx| {
1000 project.update(cx, |project, cx| {
1001 project.worktrees(cx).next().unwrap().read(cx).id()
1002 })
1003 });
1004
1005 let mut task_contexts = TaskContexts::default();
1006 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1007 let task_contexts = Arc::new(task_contexts);
1008
1009 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1010 id: worktree_id,
1011 directory_in_worktree: rel_path(".zed").into(),
1012 id_base: "local worktree tasks from directory \".zed\"".into(),
1013 };
1014
1015 let buffer_a = project
1016 .update(cx, |project, cx| {
1017 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1018 })
1019 .await
1020 .unwrap();
1021 let buffer_b = project
1022 .update(cx, |project, cx| {
1023 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1024 })
1025 .await
1026 .unwrap();
1027 cx.update(|cx| {
1028 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1029 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1030
1031 assert_eq!(settings_a.tab_size.get(), 8);
1032 assert_eq!(settings_b.tab_size.get(), 2);
1033 });
1034
1035 let all_tasks = cx
1036 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1037 .await
1038 .into_iter()
1039 .map(|(source_kind, task)| {
1040 let resolved = task.resolved;
1041 (
1042 source_kind,
1043 task.resolved_label,
1044 resolved.args,
1045 resolved.env,
1046 )
1047 })
1048 .collect::<Vec<_>>();
1049 assert_eq!(
1050 all_tasks,
1051 vec![
1052 (
1053 TaskSourceKind::Worktree {
1054 id: worktree_id,
1055 directory_in_worktree: rel_path("b/.zed").into(),
1056 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1057 },
1058 "cargo check".to_string(),
1059 vec!["check".to_string()],
1060 HashMap::default(),
1061 ),
1062 (
1063 topmost_local_task_source_kind.clone(),
1064 "cargo check all".to_string(),
1065 vec!["check".to_string(), "--all".to_string()],
1066 HashMap::default(),
1067 ),
1068 ]
1069 );
1070
1071 let (_, resolved_task) = cx
1072 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1073 .await
1074 .into_iter()
1075 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1076 .expect("should have one global task");
1077 project.update(cx, |project, cx| {
1078 let task_inventory = project
1079 .task_store()
1080 .read(cx)
1081 .task_inventory()
1082 .cloned()
1083 .unwrap();
1084 task_inventory.update(cx, |inventory, _| {
1085 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1086 inventory
1087 .update_file_based_tasks(
1088 TaskSettingsLocation::Global(tasks_file()),
1089 Some(
1090 &json!([{
1091 "label": "cargo check unstable",
1092 "command": "cargo",
1093 "args": [
1094 "check",
1095 "--all",
1096 "--all-targets"
1097 ],
1098 "env": {
1099 "RUSTFLAGS": "-Zunstable-options"
1100 }
1101 }])
1102 .to_string(),
1103 ),
1104 )
1105 .unwrap();
1106 });
1107 });
1108 cx.run_until_parked();
1109
1110 let all_tasks = cx
1111 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1112 .await
1113 .into_iter()
1114 .map(|(source_kind, task)| {
1115 let resolved = task.resolved;
1116 (
1117 source_kind,
1118 task.resolved_label,
1119 resolved.args,
1120 resolved.env,
1121 )
1122 })
1123 .collect::<Vec<_>>();
1124 assert_eq!(
1125 all_tasks,
1126 vec![
1127 (
1128 topmost_local_task_source_kind.clone(),
1129 "cargo check all".to_string(),
1130 vec!["check".to_string(), "--all".to_string()],
1131 HashMap::default(),
1132 ),
1133 (
1134 TaskSourceKind::Worktree {
1135 id: worktree_id,
1136 directory_in_worktree: rel_path("b/.zed").into(),
1137 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1138 },
1139 "cargo check".to_string(),
1140 vec!["check".to_string()],
1141 HashMap::default(),
1142 ),
1143 (
1144 TaskSourceKind::AbsPath {
1145 abs_path: paths::tasks_file().clone(),
1146 id_base: "global tasks.json".into(),
1147 },
1148 "cargo check unstable".to_string(),
1149 vec![
1150 "check".to_string(),
1151 "--all".to_string(),
1152 "--all-targets".to_string(),
1153 ],
1154 HashMap::from_iter(Some((
1155 "RUSTFLAGS".to_string(),
1156 "-Zunstable-options".to_string()
1157 ))),
1158 ),
1159 ]
1160 );
1161}
1162
1163#[gpui::test]
1164async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1165 init_test(cx);
1166 TaskStore::init(None);
1167
1168 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1169 // event is emitted before we havd a chance to setup the event subscription.
1170 let fs = FakeFs::new(cx.executor());
1171 fs.insert_tree(
1172 path!("/dir"),
1173 json!({
1174 ".zed": {
1175 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1176 },
1177 "file.rs": ""
1178 }),
1179 )
1180 .await;
1181
1182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1183 let saw_toast = Rc::new(RefCell::new(false));
1184
1185 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1186 // later assert that the `Event::Toast` even is emitted.
1187 fs.save(
1188 path!("/dir/.zed/tasks.json").as_ref(),
1189 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1190 Default::default(),
1191 )
1192 .await
1193 .unwrap();
1194
1195 project.update(cx, |_, cx| {
1196 let saw_toast = saw_toast.clone();
1197
1198 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1199 Event::Toast {
1200 notification_id,
1201 message,
1202 link: Some(ToastLink { url, .. }),
1203 } => {
1204 assert!(notification_id.starts_with("local-tasks-"));
1205 assert!(message.contains("ZED_FOO"));
1206 assert_eq!(*url, "https://zed.dev/docs/tasks");
1207 *saw_toast.borrow_mut() = true;
1208 }
1209 _ => {}
1210 })
1211 .detach();
1212 });
1213
1214 cx.run_until_parked();
1215 assert!(
1216 *saw_toast.borrow(),
1217 "Expected `Event::Toast` was never emitted"
1218 );
1219}
1220
1221#[gpui::test]
1222async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1223 init_test(cx);
1224 TaskStore::init(None);
1225
1226 let fs = FakeFs::new(cx.executor());
1227 fs.insert_tree(
1228 path!("/dir"),
1229 json!({
1230 ".zed": {
1231 "tasks.json": r#"[{
1232 "label": "test worktree root",
1233 "command": "echo $ZED_WORKTREE_ROOT"
1234 }]"#,
1235 },
1236 "a": {
1237 "a.rs": "fn a() {\n A\n}"
1238 },
1239 }),
1240 )
1241 .await;
1242
1243 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1244 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1245
1246 cx.executor().run_until_parked();
1247 let worktree_id = cx.update(|cx| {
1248 project.update(cx, |project, cx| {
1249 project.worktrees(cx).next().unwrap().read(cx).id()
1250 })
1251 });
1252
1253 let active_non_worktree_item_tasks = cx
1254 .update(|cx| {
1255 get_all_tasks(
1256 &project,
1257 Arc::new(TaskContexts {
1258 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1259 active_worktree_context: None,
1260 other_worktree_contexts: Vec::new(),
1261 lsp_task_sources: HashMap::default(),
1262 latest_selection: None,
1263 }),
1264 cx,
1265 )
1266 })
1267 .await;
1268 assert!(
1269 active_non_worktree_item_tasks.is_empty(),
1270 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1271 );
1272
1273 let active_worktree_tasks = cx
1274 .update(|cx| {
1275 get_all_tasks(
1276 &project,
1277 Arc::new(TaskContexts {
1278 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1279 active_worktree_context: Some((worktree_id, {
1280 let mut worktree_context = TaskContext::default();
1281 worktree_context
1282 .task_variables
1283 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1284 worktree_context
1285 })),
1286 other_worktree_contexts: Vec::new(),
1287 lsp_task_sources: HashMap::default(),
1288 latest_selection: None,
1289 }),
1290 cx,
1291 )
1292 })
1293 .await;
1294 assert_eq!(
1295 active_worktree_tasks
1296 .into_iter()
1297 .map(|(source_kind, task)| {
1298 let resolved = task.resolved;
1299 (source_kind, resolved.command.unwrap())
1300 })
1301 .collect::<Vec<_>>(),
1302 vec![(
1303 TaskSourceKind::Worktree {
1304 id: worktree_id,
1305 directory_in_worktree: rel_path(".zed").into(),
1306 id_base: "local worktree tasks from directory \".zed\"".into(),
1307 },
1308 "echo /dir".to_string(),
1309 )]
1310 );
1311}
1312
1313#[gpui::test]
1314async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1315 cx: &mut gpui::TestAppContext,
1316) {
1317 pub(crate) struct PyprojectTomlManifestProvider;
1318
1319 impl ManifestProvider for PyprojectTomlManifestProvider {
1320 fn name(&self) -> ManifestName {
1321 SharedString::new_static("pyproject.toml").into()
1322 }
1323
1324 fn search(
1325 &self,
1326 ManifestQuery {
1327 path,
1328 depth,
1329 delegate,
1330 }: ManifestQuery,
1331 ) -> Option<Arc<RelPath>> {
1332 for path in path.ancestors().take(depth) {
1333 let p = path.join(rel_path("pyproject.toml"));
1334 if delegate.exists(&p, Some(false)) {
1335 return Some(path.into());
1336 }
1337 }
1338
1339 None
1340 }
1341 }
1342
1343 init_test(cx);
1344 let fs = FakeFs::new(cx.executor());
1345
1346 fs.insert_tree(
1347 path!("/the-root"),
1348 json!({
1349 ".zed": {
1350 "settings.json": r#"
1351 {
1352 "languages": {
1353 "Python": {
1354 "language_servers": ["ty"]
1355 }
1356 }
1357 }"#
1358 },
1359 "project-a": {
1360 ".venv": {},
1361 "file.py": "",
1362 "pyproject.toml": ""
1363 },
1364 "project-b": {
1365 ".venv": {},
1366 "source_file.py":"",
1367 "another_file.py": "",
1368 "pyproject.toml": ""
1369 }
1370 }),
1371 )
1372 .await;
1373 cx.update(|cx| {
1374 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1375 });
1376
1377 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1379 let _fake_python_server = language_registry.register_fake_lsp(
1380 "Python",
1381 FakeLspAdapter {
1382 name: "ty",
1383 capabilities: lsp::ServerCapabilities {
1384 ..Default::default()
1385 },
1386 ..Default::default()
1387 },
1388 );
1389
1390 language_registry.add(python_lang(fs.clone()));
1391 let (first_buffer, _handle) = project
1392 .update(cx, |project, cx| {
1393 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1394 })
1395 .await
1396 .unwrap();
1397 cx.executor().run_until_parked();
1398 let servers = project.update(cx, |project, cx| {
1399 project.lsp_store().update(cx, |this, cx| {
1400 first_buffer.update(cx, |buffer, cx| {
1401 this.running_language_servers_for_local_buffer(buffer, cx)
1402 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1403 .collect::<Vec<_>>()
1404 })
1405 })
1406 });
1407 cx.executor().run_until_parked();
1408 assert_eq!(servers.len(), 1);
1409 let (adapter, server) = servers.into_iter().next().unwrap();
1410 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1411 assert_eq!(server.server_id(), LanguageServerId(0));
1412 // `workspace_folders` are set to the rooting point.
1413 assert_eq!(
1414 server.workspace_folders(),
1415 BTreeSet::from_iter(
1416 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1417 )
1418 );
1419
1420 let (second_project_buffer, _other_handle) = project
1421 .update(cx, |project, cx| {
1422 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1423 })
1424 .await
1425 .unwrap();
1426 cx.executor().run_until_parked();
1427 let servers = project.update(cx, |project, cx| {
1428 project.lsp_store().update(cx, |this, cx| {
1429 second_project_buffer.update(cx, |buffer, cx| {
1430 this.running_language_servers_for_local_buffer(buffer, cx)
1431 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1432 .collect::<Vec<_>>()
1433 })
1434 })
1435 });
1436 cx.executor().run_until_parked();
1437 assert_eq!(servers.len(), 1);
1438 let (adapter, server) = servers.into_iter().next().unwrap();
1439 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1440 // We're not using venvs at all here, so both folders should fall under the same root.
1441 assert_eq!(server.server_id(), LanguageServerId(0));
1442 // Now, let's select a different toolchain for one of subprojects.
1443
1444 let Toolchains {
1445 toolchains: available_toolchains_for_b,
1446 root_path,
1447 ..
1448 } = project
1449 .update(cx, |this, cx| {
1450 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1451 this.available_toolchains(
1452 ProjectPath {
1453 worktree_id,
1454 path: rel_path("project-b/source_file.py").into(),
1455 },
1456 LanguageName::new_static("Python"),
1457 cx,
1458 )
1459 })
1460 .await
1461 .expect("A toolchain to be discovered");
1462 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1463 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1464 let currently_active_toolchain = project
1465 .update(cx, |this, cx| {
1466 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1467 this.active_toolchain(
1468 ProjectPath {
1469 worktree_id,
1470 path: rel_path("project-b/source_file.py").into(),
1471 },
1472 LanguageName::new_static("Python"),
1473 cx,
1474 )
1475 })
1476 .await;
1477
1478 assert!(currently_active_toolchain.is_none());
1479 let _ = project
1480 .update(cx, |this, cx| {
1481 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1482 this.activate_toolchain(
1483 ProjectPath {
1484 worktree_id,
1485 path: root_path,
1486 },
1487 available_toolchains_for_b
1488 .toolchains
1489 .into_iter()
1490 .next()
1491 .unwrap(),
1492 cx,
1493 )
1494 })
1495 .await
1496 .unwrap();
1497 cx.run_until_parked();
1498 let servers = project.update(cx, |project, cx| {
1499 project.lsp_store().update(cx, |this, cx| {
1500 second_project_buffer.update(cx, |buffer, cx| {
1501 this.running_language_servers_for_local_buffer(buffer, cx)
1502 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1503 .collect::<Vec<_>>()
1504 })
1505 })
1506 });
1507 cx.executor().run_until_parked();
1508 assert_eq!(servers.len(), 1);
1509 let (adapter, server) = servers.into_iter().next().unwrap();
1510 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1511 // There's a new language server in town.
1512 assert_eq!(server.server_id(), LanguageServerId(1));
1513}
1514
1515#[gpui::test]
1516async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree(
1521 path!("/dir"),
1522 json!({
1523 "test.rs": "const A: i32 = 1;",
1524 "test2.rs": "",
1525 "Cargo.toml": "a = 1",
1526 "package.json": "{\"a\": 1}",
1527 }),
1528 )
1529 .await;
1530
1531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1533
1534 let mut fake_rust_servers = language_registry.register_fake_lsp(
1535 "Rust",
1536 FakeLspAdapter {
1537 name: "the-rust-language-server",
1538 capabilities: lsp::ServerCapabilities {
1539 completion_provider: Some(lsp::CompletionOptions {
1540 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1541 ..Default::default()
1542 }),
1543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1544 lsp::TextDocumentSyncOptions {
1545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1546 ..Default::default()
1547 },
1548 )),
1549 ..Default::default()
1550 },
1551 ..Default::default()
1552 },
1553 );
1554 let mut fake_json_servers = language_registry.register_fake_lsp(
1555 "JSON",
1556 FakeLspAdapter {
1557 name: "the-json-language-server",
1558 capabilities: lsp::ServerCapabilities {
1559 completion_provider: Some(lsp::CompletionOptions {
1560 trigger_characters: Some(vec![":".to_string()]),
1561 ..Default::default()
1562 }),
1563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1564 lsp::TextDocumentSyncOptions {
1565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1566 ..Default::default()
1567 },
1568 )),
1569 ..Default::default()
1570 },
1571 ..Default::default()
1572 },
1573 );
1574
1575 // Open a buffer without an associated language server.
1576 let (toml_buffer, _handle) = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 // Open a buffer with an associated language server before the language for it has been loaded.
1584 let (rust_buffer, _handle2) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590 rust_buffer.update(cx, |buffer, _| {
1591 assert_eq!(buffer.language().map(|l| l.name()), None);
1592 });
1593
1594 // Now we add the languages to the project, and ensure they get assigned to all
1595 // the relevant open buffers.
1596 language_registry.add(json_lang());
1597 language_registry.add(rust_lang());
1598 cx.executor().run_until_parked();
1599 rust_buffer.update(cx, |buffer, _| {
1600 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1601 });
1602
1603 // A server is started up, and it is notified about Rust files.
1604 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1605 assert_eq!(
1606 fake_rust_server
1607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1608 .await
1609 .text_document,
1610 lsp::TextDocumentItem {
1611 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1612 version: 0,
1613 text: "const A: i32 = 1;".to_string(),
1614 language_id: "rust".to_string(),
1615 }
1616 );
1617
1618 // The buffer is configured based on the language server's capabilities.
1619 rust_buffer.update(cx, |buffer, _| {
1620 assert_eq!(
1621 buffer
1622 .completion_triggers()
1623 .iter()
1624 .cloned()
1625 .collect::<Vec<_>>(),
1626 &[".".to_string(), "::".to_string()]
1627 );
1628 });
1629 toml_buffer.update(cx, |buffer, _| {
1630 assert!(buffer.completion_triggers().is_empty());
1631 });
1632
1633 // Edit a buffer. The changes are reported to the language server.
1634 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1635 assert_eq!(
1636 fake_rust_server
1637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1638 .await
1639 .text_document,
1640 lsp::VersionedTextDocumentIdentifier::new(
1641 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1642 1
1643 )
1644 );
1645
1646 // Open a third buffer with a different associated language server.
1647 let (json_buffer, _json_handle) = project
1648 .update(cx, |project, cx| {
1649 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1650 })
1651 .await
1652 .unwrap();
1653
1654 // A json language server is started up and is only notified about the json buffer.
1655 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1656 assert_eq!(
1657 fake_json_server
1658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1659 .await
1660 .text_document,
1661 lsp::TextDocumentItem {
1662 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1663 version: 0,
1664 text: "{\"a\": 1}".to_string(),
1665 language_id: "json".to_string(),
1666 }
1667 );
1668
1669 // This buffer is configured based on the second language server's
1670 // capabilities.
1671 json_buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .completion_triggers()
1675 .iter()
1676 .cloned()
1677 .collect::<Vec<_>>(),
1678 &[":".to_string()]
1679 );
1680 });
1681
1682 // When opening another buffer whose language server is already running,
1683 // it is also configured based on the existing language server's capabilities.
1684 let (rust_buffer2, _handle4) = project
1685 .update(cx, |project, cx| {
1686 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1687 })
1688 .await
1689 .unwrap();
1690 rust_buffer2.update(cx, |buffer, _| {
1691 assert_eq!(
1692 buffer
1693 .completion_triggers()
1694 .iter()
1695 .cloned()
1696 .collect::<Vec<_>>(),
1697 &[".".to_string(), "::".to_string()]
1698 );
1699 });
1700
1701 // Changes are reported only to servers matching the buffer's language.
1702 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1703 rust_buffer2.update(cx, |buffer, cx| {
1704 buffer.edit([(0..0, "let x = 1;")], None, cx)
1705 });
1706 assert_eq!(
1707 fake_rust_server
1708 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1709 .await
1710 .text_document,
1711 lsp::VersionedTextDocumentIdentifier::new(
1712 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1713 1
1714 )
1715 );
1716
1717 // Save notifications are reported to all servers.
1718 project
1719 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1720 .await
1721 .unwrap();
1722 assert_eq!(
1723 fake_rust_server
1724 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1725 .await
1726 .text_document,
1727 lsp::TextDocumentIdentifier::new(
1728 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1729 )
1730 );
1731 assert_eq!(
1732 fake_json_server
1733 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1734 .await
1735 .text_document,
1736 lsp::TextDocumentIdentifier::new(
1737 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1738 )
1739 );
1740
1741 // Renames are reported only to servers matching the buffer's language.
1742 fs.rename(
1743 Path::new(path!("/dir/test2.rs")),
1744 Path::new(path!("/dir/test3.rs")),
1745 Default::default(),
1746 )
1747 .await
1748 .unwrap();
1749 assert_eq!(
1750 fake_rust_server
1751 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1752 .await
1753 .text_document,
1754 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1755 );
1756 assert_eq!(
1757 fake_rust_server
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document,
1761 lsp::TextDocumentItem {
1762 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1763 version: 0,
1764 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1765 language_id: "rust".to_string(),
1766 },
1767 );
1768
1769 rust_buffer2.update(cx, |buffer, cx| {
1770 buffer.update_diagnostics(
1771 LanguageServerId(0),
1772 DiagnosticSet::from_sorted_entries(
1773 vec![DiagnosticEntry {
1774 diagnostic: Default::default(),
1775 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1776 }],
1777 &buffer.snapshot(),
1778 ),
1779 cx,
1780 );
1781 assert_eq!(
1782 buffer
1783 .snapshot()
1784 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1785 .count(),
1786 1
1787 );
1788 });
1789
1790 // When the rename changes the extension of the file, the buffer gets closed on the old
1791 // language server and gets opened on the new one.
1792 fs.rename(
1793 Path::new(path!("/dir/test3.rs")),
1794 Path::new(path!("/dir/test3.json")),
1795 Default::default(),
1796 )
1797 .await
1798 .unwrap();
1799 assert_eq!(
1800 fake_rust_server
1801 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1802 .await
1803 .text_document,
1804 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1805 );
1806 assert_eq!(
1807 fake_json_server
1808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1809 .await
1810 .text_document,
1811 lsp::TextDocumentItem {
1812 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1813 version: 0,
1814 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1815 language_id: "json".to_string(),
1816 },
1817 );
1818
1819 // We clear the diagnostics, since the language has changed.
1820 rust_buffer2.update(cx, |buffer, _| {
1821 assert_eq!(
1822 buffer
1823 .snapshot()
1824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1825 .count(),
1826 0
1827 );
1828 });
1829
1830 // The renamed file's version resets after changing language server.
1831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1832 assert_eq!(
1833 fake_json_server
1834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1835 .await
1836 .text_document,
1837 lsp::VersionedTextDocumentIdentifier::new(
1838 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1839 1
1840 )
1841 );
1842
1843 // Restart language servers
1844 project.update(cx, |project, cx| {
1845 project.restart_language_servers_for_buffers(
1846 vec![rust_buffer.clone(), json_buffer.clone()],
1847 HashSet::default(),
1848 cx,
1849 );
1850 });
1851
1852 let mut rust_shutdown_requests = fake_rust_server
1853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1854 let mut json_shutdown_requests = fake_json_server
1855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1857
1858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1860
1861 // Ensure rust document is reopened in new rust language server
1862 assert_eq!(
1863 fake_rust_server
1864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1865 .await
1866 .text_document,
1867 lsp::TextDocumentItem {
1868 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1869 version: 0,
1870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1871 language_id: "rust".to_string(),
1872 }
1873 );
1874
1875 // Ensure json documents are reopened in new json language server
1876 assert_set_eq!(
1877 [
1878 fake_json_server
1879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1880 .await
1881 .text_document,
1882 fake_json_server
1883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1884 .await
1885 .text_document,
1886 ],
1887 [
1888 lsp::TextDocumentItem {
1889 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1890 version: 0,
1891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1892 language_id: "json".to_string(),
1893 },
1894 lsp::TextDocumentItem {
1895 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1896 version: 0,
1897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1898 language_id: "json".to_string(),
1899 }
1900 ]
1901 );
1902
1903 // Close notifications are reported only to servers matching the buffer's language.
1904 cx.update(|_| drop(_json_handle));
1905 let close_message = lsp::DidCloseTextDocumentParams {
1906 text_document: lsp::TextDocumentIdentifier::new(
1907 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1908 ),
1909 };
1910 assert_eq!(
1911 fake_json_server
1912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1913 .await,
1914 close_message,
1915 );
1916}
1917
1918#[gpui::test]
1919async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1920 init_test(cx);
1921
1922 let settings_json_contents = json!({
1923 "languages": {
1924 "Rust": {
1925 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1926 }
1927 },
1928 "lsp": {
1929 "my_fake_lsp": {
1930 "binary": {
1931 // file exists, so this is treated as a relative path
1932 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1933 }
1934 },
1935 "lsp_on_path": {
1936 "binary": {
1937 // file doesn't exist, so it will fall back on PATH env var
1938 "path": path!("lsp_on_path.exe").to_string(),
1939 }
1940 }
1941 },
1942 });
1943
1944 let fs = FakeFs::new(cx.executor());
1945 fs.insert_tree(
1946 path!("/the-root"),
1947 json!({
1948 ".zed": {
1949 "settings.json": settings_json_contents.to_string(),
1950 },
1951 ".relative_path": {
1952 "to": {
1953 "my_fake_lsp.exe": "",
1954 },
1955 },
1956 "src": {
1957 "main.rs": "",
1958 }
1959 }),
1960 )
1961 .await;
1962
1963 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1965 language_registry.add(rust_lang());
1966
1967 let mut my_fake_lsp = language_registry.register_fake_lsp(
1968 "Rust",
1969 FakeLspAdapter {
1970 name: "my_fake_lsp",
1971 ..Default::default()
1972 },
1973 );
1974 let mut lsp_on_path = language_registry.register_fake_lsp(
1975 "Rust",
1976 FakeLspAdapter {
1977 name: "lsp_on_path",
1978 ..Default::default()
1979 },
1980 );
1981
1982 cx.run_until_parked();
1983
1984 // Start the language server by opening a buffer with a compatible file extension.
1985 project
1986 .update(cx, |project, cx| {
1987 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1988 })
1989 .await
1990 .unwrap();
1991
1992 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1993 assert_eq!(
1994 lsp_path.to_string_lossy(),
1995 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1996 );
1997
1998 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1999 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2000}
2001
2002#[gpui::test]
2003async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let settings_json_contents = json!({
2007 "languages": {
2008 "Rust": {
2009 "language_servers": ["tilde_lsp"]
2010 }
2011 },
2012 "lsp": {
2013 "tilde_lsp": {
2014 "binary": {
2015 "path": "~/.local/bin/rust-analyzer",
2016 }
2017 }
2018 },
2019 });
2020
2021 let fs = FakeFs::new(cx.executor());
2022 fs.insert_tree(
2023 path!("/root"),
2024 json!({
2025 ".zed": {
2026 "settings.json": settings_json_contents.to_string(),
2027 },
2028 "src": {
2029 "main.rs": "fn main() {}",
2030 }
2031 }),
2032 )
2033 .await;
2034
2035 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2036 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2037 language_registry.add(rust_lang());
2038
2039 let mut tilde_lsp = language_registry.register_fake_lsp(
2040 "Rust",
2041 FakeLspAdapter {
2042 name: "tilde_lsp",
2043 ..Default::default()
2044 },
2045 );
2046 cx.run_until_parked();
2047
2048 project
2049 .update(cx, |project, cx| {
2050 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2051 })
2052 .await
2053 .unwrap();
2054
2055 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2056 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2057 assert_eq!(
2058 lsp_path, expected_path,
2059 "Tilde path should expand to home directory"
2060 );
2061}
2062
2063#[gpui::test]
2064async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2065 cx: &mut gpui::TestAppContext,
2066) {
2067 init_test(cx);
2068
2069 let fs = FakeFs::new(cx.executor());
2070 fs.insert_tree(
2071 path!("/the-root"),
2072 json!({
2073 "Cargo.lock": "",
2074 "src": {
2075 "a.rs": "",
2076 }
2077 }),
2078 )
2079 .await;
2080
2081 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2082 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2083 (project.languages().clone(), project.lsp_store())
2084 });
2085 language_registry.add(rust_lang());
2086 let mut fake_servers = language_registry.register_fake_lsp(
2087 "Rust",
2088 FakeLspAdapter {
2089 name: "the-language-server",
2090 ..Default::default()
2091 },
2092 );
2093
2094 cx.executor().run_until_parked();
2095
2096 project
2097 .update(cx, |project, cx| {
2098 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2099 })
2100 .await
2101 .unwrap();
2102
2103 let fake_server = fake_servers.next().await.unwrap();
2104 cx.executor().run_until_parked();
2105
2106 let file_changes = Arc::new(Mutex::new(Vec::new()));
2107 fake_server
2108 .request::<lsp::request::RegisterCapability>(
2109 lsp::RegistrationParams {
2110 registrations: vec![lsp::Registration {
2111 id: Default::default(),
2112 method: "workspace/didChangeWatchedFiles".to_string(),
2113 register_options: serde_json::to_value(
2114 lsp::DidChangeWatchedFilesRegistrationOptions {
2115 watchers: vec![lsp::FileSystemWatcher {
2116 glob_pattern: lsp::GlobPattern::String(
2117 path!("/the-root/Cargo.lock").to_string(),
2118 ),
2119 kind: None,
2120 }],
2121 },
2122 )
2123 .ok(),
2124 }],
2125 },
2126 DEFAULT_LSP_REQUEST_TIMEOUT,
2127 )
2128 .await
2129 .into_response()
2130 .unwrap();
2131 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2132 let file_changes = file_changes.clone();
2133 move |params, _| {
2134 let mut file_changes = file_changes.lock();
2135 file_changes.extend(params.changes);
2136 }
2137 });
2138
2139 cx.executor().run_until_parked();
2140 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2141
2142 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2143 cx.executor().run_until_parked();
2144
2145 assert_eq!(
2146 &*file_changes.lock(),
2147 &[lsp::FileEvent {
2148 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2149 typ: lsp::FileChangeType::CHANGED,
2150 }]
2151 );
2152}
2153
2154#[gpui::test]
2155async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let fs = FakeFs::new(cx.executor());
2159 fs.insert_tree(
2160 path!("/the-root"),
2161 json!({
2162 ".gitignore": "target\n",
2163 "Cargo.lock": "",
2164 "src": {
2165 "a.rs": "",
2166 "b.rs": "",
2167 },
2168 "target": {
2169 "x": {
2170 "out": {
2171 "x.rs": ""
2172 }
2173 },
2174 "y": {
2175 "out": {
2176 "y.rs": "",
2177 }
2178 },
2179 "z": {
2180 "out": {
2181 "z.rs": ""
2182 }
2183 }
2184 }
2185 }),
2186 )
2187 .await;
2188 fs.insert_tree(
2189 path!("/the-registry"),
2190 json!({
2191 "dep1": {
2192 "src": {
2193 "dep1.rs": "",
2194 }
2195 },
2196 "dep2": {
2197 "src": {
2198 "dep2.rs": "",
2199 }
2200 },
2201 }),
2202 )
2203 .await;
2204 fs.insert_tree(
2205 path!("/the/stdlib"),
2206 json!({
2207 "LICENSE": "",
2208 "src": {
2209 "string.rs": "",
2210 }
2211 }),
2212 )
2213 .await;
2214
2215 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2216 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2217 (project.languages().clone(), project.lsp_store())
2218 });
2219 language_registry.add(rust_lang());
2220 let mut fake_servers = language_registry.register_fake_lsp(
2221 "Rust",
2222 FakeLspAdapter {
2223 name: "the-language-server",
2224 ..Default::default()
2225 },
2226 );
2227
2228 cx.executor().run_until_parked();
2229
2230 // Start the language server by opening a buffer with a compatible file extension.
2231 project
2232 .update(cx, |project, cx| {
2233 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2234 })
2235 .await
2236 .unwrap();
2237
2238 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2239 project.update(cx, |project, cx| {
2240 let worktree = project.worktrees(cx).next().unwrap();
2241 assert_eq!(
2242 worktree
2243 .read(cx)
2244 .snapshot()
2245 .entries(true, 0)
2246 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2247 .collect::<Vec<_>>(),
2248 &[
2249 ("", false),
2250 (".gitignore", false),
2251 ("Cargo.lock", false),
2252 ("src", false),
2253 ("src/a.rs", false),
2254 ("src/b.rs", false),
2255 ("target", true),
2256 ]
2257 );
2258 });
2259
2260 let prev_read_dir_count = fs.read_dir_call_count();
2261
2262 let fake_server = fake_servers.next().await.unwrap();
2263 cx.executor().run_until_parked();
2264 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2265 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2266 id
2267 });
2268
2269 // Simulate jumping to a definition in a dependency outside of the worktree.
2270 let _out_of_worktree_buffer = project
2271 .update(cx, |project, cx| {
2272 project.open_local_buffer_via_lsp(
2273 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2274 server_id,
2275 cx,
2276 )
2277 })
2278 .await
2279 .unwrap();
2280
2281 // Keep track of the FS events reported to the language server.
2282 let file_changes = Arc::new(Mutex::new(Vec::new()));
2283 fake_server
2284 .request::<lsp::request::RegisterCapability>(
2285 lsp::RegistrationParams {
2286 registrations: vec![lsp::Registration {
2287 id: Default::default(),
2288 method: "workspace/didChangeWatchedFiles".to_string(),
2289 register_options: serde_json::to_value(
2290 lsp::DidChangeWatchedFilesRegistrationOptions {
2291 watchers: vec![
2292 lsp::FileSystemWatcher {
2293 glob_pattern: lsp::GlobPattern::String(
2294 path!("/the-root/Cargo.toml").to_string(),
2295 ),
2296 kind: None,
2297 },
2298 lsp::FileSystemWatcher {
2299 glob_pattern: lsp::GlobPattern::String(
2300 path!("/the-root/src/*.{rs,c}").to_string(),
2301 ),
2302 kind: None,
2303 },
2304 lsp::FileSystemWatcher {
2305 glob_pattern: lsp::GlobPattern::String(
2306 path!("/the-root/target/y/**/*.rs").to_string(),
2307 ),
2308 kind: None,
2309 },
2310 lsp::FileSystemWatcher {
2311 glob_pattern: lsp::GlobPattern::String(
2312 path!("/the/stdlib/src/**/*.rs").to_string(),
2313 ),
2314 kind: None,
2315 },
2316 lsp::FileSystemWatcher {
2317 glob_pattern: lsp::GlobPattern::String(
2318 path!("**/Cargo.lock").to_string(),
2319 ),
2320 kind: None,
2321 },
2322 ],
2323 },
2324 )
2325 .ok(),
2326 }],
2327 },
2328 DEFAULT_LSP_REQUEST_TIMEOUT,
2329 )
2330 .await
2331 .into_response()
2332 .unwrap();
2333 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2334 let file_changes = file_changes.clone();
2335 move |params, _| {
2336 let mut file_changes = file_changes.lock();
2337 file_changes.extend(params.changes);
2338 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2339 }
2340 });
2341
2342 cx.executor().run_until_parked();
2343 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2344 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2345
2346 let mut new_watched_paths = fs.watched_paths();
2347 new_watched_paths.retain(|path| {
2348 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2349 });
2350 assert_eq!(
2351 &new_watched_paths,
2352 &[
2353 Path::new(path!("/the-root")),
2354 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2355 Path::new(path!("/the/stdlib/src"))
2356 ]
2357 );
2358
2359 // Now the language server has asked us to watch an ignored directory path,
2360 // so we recursively load it.
2361 project.update(cx, |project, cx| {
2362 let worktree = project.visible_worktrees(cx).next().unwrap();
2363 assert_eq!(
2364 worktree
2365 .read(cx)
2366 .snapshot()
2367 .entries(true, 0)
2368 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2369 .collect::<Vec<_>>(),
2370 &[
2371 ("", false),
2372 (".gitignore", false),
2373 ("Cargo.lock", false),
2374 ("src", false),
2375 ("src/a.rs", false),
2376 ("src/b.rs", false),
2377 ("target", true),
2378 ("target/x", true),
2379 ("target/y", true),
2380 ("target/y/out", true),
2381 ("target/y/out/y.rs", true),
2382 ("target/z", true),
2383 ]
2384 );
2385 });
2386
2387 // Perform some file system mutations, two of which match the watched patterns,
2388 // and one of which does not.
2389 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2390 .await
2391 .unwrap();
2392 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2393 .await
2394 .unwrap();
2395 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2396 .await
2397 .unwrap();
2398 fs.create_file(
2399 path!("/the-root/target/x/out/x2.rs").as_ref(),
2400 Default::default(),
2401 )
2402 .await
2403 .unwrap();
2404 fs.create_file(
2405 path!("/the-root/target/y/out/y2.rs").as_ref(),
2406 Default::default(),
2407 )
2408 .await
2409 .unwrap();
2410 fs.save(
2411 path!("/the-root/Cargo.lock").as_ref(),
2412 &"".into(),
2413 Default::default(),
2414 )
2415 .await
2416 .unwrap();
2417 fs.save(
2418 path!("/the-stdlib/LICENSE").as_ref(),
2419 &"".into(),
2420 Default::default(),
2421 )
2422 .await
2423 .unwrap();
2424 fs.save(
2425 path!("/the/stdlib/src/string.rs").as_ref(),
2426 &"".into(),
2427 Default::default(),
2428 )
2429 .await
2430 .unwrap();
2431
2432 // The language server receives events for the FS mutations that match its watch patterns.
2433 cx.executor().run_until_parked();
2434 assert_eq!(
2435 &*file_changes.lock(),
2436 &[
2437 lsp::FileEvent {
2438 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2439 typ: lsp::FileChangeType::CHANGED,
2440 },
2441 lsp::FileEvent {
2442 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2443 typ: lsp::FileChangeType::DELETED,
2444 },
2445 lsp::FileEvent {
2446 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2447 typ: lsp::FileChangeType::CREATED,
2448 },
2449 lsp::FileEvent {
2450 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2451 typ: lsp::FileChangeType::CREATED,
2452 },
2453 lsp::FileEvent {
2454 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2455 typ: lsp::FileChangeType::CHANGED,
2456 },
2457 ]
2458 );
2459}
2460
2461#[gpui::test]
2462async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 path!("/dir"),
2468 json!({
2469 "a.rs": "let a = 1;",
2470 "b.rs": "let b = 2;"
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(
2476 fs,
2477 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2478 cx,
2479 )
2480 .await;
2481 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2482
2483 let buffer_a = project
2484 .update(cx, |project, cx| {
2485 project.open_local_buffer(path!("/dir/a.rs"), cx)
2486 })
2487 .await
2488 .unwrap();
2489 let buffer_b = project
2490 .update(cx, |project, cx| {
2491 project.open_local_buffer(path!("/dir/b.rs"), cx)
2492 })
2493 .await
2494 .unwrap();
2495
2496 lsp_store.update(cx, |lsp_store, cx| {
2497 lsp_store
2498 .update_diagnostics(
2499 LanguageServerId(0),
2500 lsp::PublishDiagnosticsParams {
2501 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: None,
2503 diagnostics: vec![lsp::Diagnostic {
2504 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2505 severity: Some(lsp::DiagnosticSeverity::ERROR),
2506 message: "error 1".to_string(),
2507 ..Default::default()
2508 }],
2509 },
2510 None,
2511 DiagnosticSourceKind::Pushed,
2512 &[],
2513 cx,
2514 )
2515 .unwrap();
2516 lsp_store
2517 .update_diagnostics(
2518 LanguageServerId(0),
2519 lsp::PublishDiagnosticsParams {
2520 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2521 version: None,
2522 diagnostics: vec![lsp::Diagnostic {
2523 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2524 severity: Some(DiagnosticSeverity::WARNING),
2525 message: "error 2".to_string(),
2526 ..Default::default()
2527 }],
2528 },
2529 None,
2530 DiagnosticSourceKind::Pushed,
2531 &[],
2532 cx,
2533 )
2534 .unwrap();
2535 });
2536
2537 buffer_a.update(cx, |buffer, _| {
2538 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2539 assert_eq!(
2540 chunks
2541 .iter()
2542 .map(|(s, d)| (s.as_str(), *d))
2543 .collect::<Vec<_>>(),
2544 &[
2545 ("let ", None),
2546 ("a", Some(DiagnosticSeverity::ERROR)),
2547 (" = 1;", None),
2548 ]
2549 );
2550 });
2551 buffer_b.update(cx, |buffer, _| {
2552 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2553 assert_eq!(
2554 chunks
2555 .iter()
2556 .map(|(s, d)| (s.as_str(), *d))
2557 .collect::<Vec<_>>(),
2558 &[
2559 ("let ", None),
2560 ("b", Some(DiagnosticSeverity::WARNING)),
2561 (" = 2;", None),
2562 ]
2563 );
2564 });
2565}
2566
2567#[gpui::test]
2568async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2569 init_test(cx);
2570
2571 let fs = FakeFs::new(cx.executor());
2572 fs.insert_tree(
2573 path!("/root"),
2574 json!({
2575 "dir": {
2576 ".git": {
2577 "HEAD": "ref: refs/heads/main",
2578 },
2579 ".gitignore": "b.rs",
2580 "a.rs": "let a = 1;",
2581 "b.rs": "let b = 2;",
2582 },
2583 "other.rs": "let b = c;"
2584 }),
2585 )
2586 .await;
2587
2588 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2589 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2590 let (worktree, _) = project
2591 .update(cx, |project, cx| {
2592 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2593 })
2594 .await
2595 .unwrap();
2596 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2597
2598 let (worktree, _) = project
2599 .update(cx, |project, cx| {
2600 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2601 })
2602 .await
2603 .unwrap();
2604 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2605
2606 let server_id = LanguageServerId(0);
2607 lsp_store.update(cx, |lsp_store, cx| {
2608 lsp_store
2609 .update_diagnostics(
2610 server_id,
2611 lsp::PublishDiagnosticsParams {
2612 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2613 version: None,
2614 diagnostics: vec![lsp::Diagnostic {
2615 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2616 severity: Some(lsp::DiagnosticSeverity::ERROR),
2617 message: "unused variable 'b'".to_string(),
2618 ..Default::default()
2619 }],
2620 },
2621 None,
2622 DiagnosticSourceKind::Pushed,
2623 &[],
2624 cx,
2625 )
2626 .unwrap();
2627 lsp_store
2628 .update_diagnostics(
2629 server_id,
2630 lsp::PublishDiagnosticsParams {
2631 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2632 version: None,
2633 diagnostics: vec![lsp::Diagnostic {
2634 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2635 severity: Some(lsp::DiagnosticSeverity::ERROR),
2636 message: "unknown variable 'c'".to_string(),
2637 ..Default::default()
2638 }],
2639 },
2640 None,
2641 DiagnosticSourceKind::Pushed,
2642 &[],
2643 cx,
2644 )
2645 .unwrap();
2646 });
2647
2648 let main_ignored_buffer = project
2649 .update(cx, |project, cx| {
2650 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2651 })
2652 .await
2653 .unwrap();
2654 main_ignored_buffer.update(cx, |buffer, _| {
2655 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2656 assert_eq!(
2657 chunks
2658 .iter()
2659 .map(|(s, d)| (s.as_str(), *d))
2660 .collect::<Vec<_>>(),
2661 &[
2662 ("let ", None),
2663 ("b", Some(DiagnosticSeverity::ERROR)),
2664 (" = 2;", None),
2665 ],
2666 "Gigitnored buffers should still get in-buffer diagnostics",
2667 );
2668 });
2669 let other_buffer = project
2670 .update(cx, |project, cx| {
2671 project.open_buffer((other_worktree_id, rel_path("")), cx)
2672 })
2673 .await
2674 .unwrap();
2675 other_buffer.update(cx, |buffer, _| {
2676 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2677 assert_eq!(
2678 chunks
2679 .iter()
2680 .map(|(s, d)| (s.as_str(), *d))
2681 .collect::<Vec<_>>(),
2682 &[
2683 ("let b = ", None),
2684 ("c", Some(DiagnosticSeverity::ERROR)),
2685 (";", None),
2686 ],
2687 "Buffers from hidden projects should still get in-buffer diagnostics"
2688 );
2689 });
2690
2691 project.update(cx, |project, cx| {
2692 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2693 assert_eq!(
2694 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2695 vec![(
2696 ProjectPath {
2697 worktree_id: main_worktree_id,
2698 path: rel_path("b.rs").into(),
2699 },
2700 server_id,
2701 DiagnosticSummary {
2702 error_count: 1,
2703 warning_count: 0,
2704 }
2705 )]
2706 );
2707 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2708 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2709 });
2710}
2711
2712#[gpui::test]
2713async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2714 init_test(cx);
2715
2716 let progress_token = "the-progress-token";
2717
2718 let fs = FakeFs::new(cx.executor());
2719 fs.insert_tree(
2720 path!("/dir"),
2721 json!({
2722 "a.rs": "fn a() { A }",
2723 "b.rs": "const y: i32 = 1",
2724 }),
2725 )
2726 .await;
2727
2728 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2730
2731 language_registry.add(rust_lang());
2732 let mut fake_servers = language_registry.register_fake_lsp(
2733 "Rust",
2734 FakeLspAdapter {
2735 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2736 disk_based_diagnostics_sources: vec!["disk".into()],
2737 ..Default::default()
2738 },
2739 );
2740
2741 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2742
2743 // Cause worktree to start the fake language server
2744 let _ = project
2745 .update(cx, |project, cx| {
2746 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2747 })
2748 .await
2749 .unwrap();
2750
2751 let mut events = cx.events(&project);
2752
2753 let fake_server = fake_servers.next().await.unwrap();
2754 assert_eq!(
2755 events.next().await.unwrap(),
2756 Event::LanguageServerAdded(
2757 LanguageServerId(0),
2758 fake_server.server.name(),
2759 Some(worktree_id)
2760 ),
2761 );
2762
2763 fake_server
2764 .start_progress(format!("{}/0", progress_token))
2765 .await;
2766 assert_eq!(
2767 events.next().await.unwrap(),
2768 Event::DiskBasedDiagnosticsStarted {
2769 language_server_id: LanguageServerId(0),
2770 }
2771 );
2772
2773 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2774 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2775 version: None,
2776 diagnostics: vec![lsp::Diagnostic {
2777 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2778 severity: Some(lsp::DiagnosticSeverity::ERROR),
2779 message: "undefined variable 'A'".to_string(),
2780 ..Default::default()
2781 }],
2782 });
2783 assert_eq!(
2784 events.next().await.unwrap(),
2785 Event::DiagnosticsUpdated {
2786 language_server_id: LanguageServerId(0),
2787 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2788 }
2789 );
2790
2791 fake_server.end_progress(format!("{}/0", progress_token));
2792 assert_eq!(
2793 events.next().await.unwrap(),
2794 Event::DiskBasedDiagnosticsFinished {
2795 language_server_id: LanguageServerId(0)
2796 }
2797 );
2798
2799 let buffer = project
2800 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2801 .await
2802 .unwrap();
2803
2804 buffer.update(cx, |buffer, _| {
2805 let snapshot = buffer.snapshot();
2806 let diagnostics = snapshot
2807 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2808 .collect::<Vec<_>>();
2809 assert_eq!(
2810 diagnostics,
2811 &[DiagnosticEntryRef {
2812 range: Point::new(0, 9)..Point::new(0, 10),
2813 diagnostic: &Diagnostic {
2814 severity: lsp::DiagnosticSeverity::ERROR,
2815 message: "undefined variable 'A'".to_string(),
2816 group_id: 0,
2817 is_primary: true,
2818 source_kind: DiagnosticSourceKind::Pushed,
2819 ..Diagnostic::default()
2820 }
2821 }]
2822 )
2823 });
2824
2825 // Ensure publishing empty diagnostics twice only results in one update event.
2826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2827 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2828 version: None,
2829 diagnostics: Default::default(),
2830 });
2831 assert_eq!(
2832 events.next().await.unwrap(),
2833 Event::DiagnosticsUpdated {
2834 language_server_id: LanguageServerId(0),
2835 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2836 }
2837 );
2838
2839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2840 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2841 version: None,
2842 diagnostics: Default::default(),
2843 });
2844 cx.executor().run_until_parked();
2845 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2846}
2847
2848#[gpui::test]
2849async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2850 init_test(cx);
2851
2852 let progress_token = "the-progress-token";
2853
2854 let fs = FakeFs::new(cx.executor());
2855 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2856
2857 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2858
2859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2860 language_registry.add(rust_lang());
2861 let mut fake_servers = language_registry.register_fake_lsp(
2862 "Rust",
2863 FakeLspAdapter {
2864 name: "the-language-server",
2865 disk_based_diagnostics_sources: vec!["disk".into()],
2866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2867 ..FakeLspAdapter::default()
2868 },
2869 );
2870
2871 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2872
2873 let (buffer, _handle) = project
2874 .update(cx, |project, cx| {
2875 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2876 })
2877 .await
2878 .unwrap();
2879 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2880 // Simulate diagnostics starting to update.
2881 let fake_server = fake_servers.next().await.unwrap();
2882 cx.executor().run_until_parked();
2883 fake_server.start_progress(progress_token).await;
2884
2885 // Restart the server before the diagnostics finish updating.
2886 project.update(cx, |project, cx| {
2887 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2888 });
2889 let mut events = cx.events(&project);
2890
2891 // Simulate the newly started server sending more diagnostics.
2892 let fake_server = fake_servers.next().await.unwrap();
2893 cx.executor().run_until_parked();
2894 assert_eq!(
2895 events.next().await.unwrap(),
2896 Event::LanguageServerRemoved(LanguageServerId(0))
2897 );
2898 assert_eq!(
2899 events.next().await.unwrap(),
2900 Event::LanguageServerAdded(
2901 LanguageServerId(1),
2902 fake_server.server.name(),
2903 Some(worktree_id)
2904 )
2905 );
2906 fake_server.start_progress(progress_token).await;
2907 assert_eq!(
2908 events.next().await.unwrap(),
2909 Event::LanguageServerBufferRegistered {
2910 server_id: LanguageServerId(1),
2911 buffer_id,
2912 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2913 name: Some(fake_server.server.name())
2914 }
2915 );
2916 assert_eq!(
2917 events.next().await.unwrap(),
2918 Event::DiskBasedDiagnosticsStarted {
2919 language_server_id: LanguageServerId(1)
2920 }
2921 );
2922 project.update(cx, |project, cx| {
2923 assert_eq!(
2924 project
2925 .language_servers_running_disk_based_diagnostics(cx)
2926 .collect::<Vec<_>>(),
2927 [LanguageServerId(1)]
2928 );
2929 });
2930
2931 // All diagnostics are considered done, despite the old server's diagnostic
2932 // task never completing.
2933 fake_server.end_progress(progress_token);
2934 assert_eq!(
2935 events.next().await.unwrap(),
2936 Event::DiskBasedDiagnosticsFinished {
2937 language_server_id: LanguageServerId(1)
2938 }
2939 );
2940 project.update(cx, |project, cx| {
2941 assert_eq!(
2942 project
2943 .language_servers_running_disk_based_diagnostics(cx)
2944 .collect::<Vec<_>>(),
2945 [] as [language::LanguageServerId; 0]
2946 );
2947 });
2948}
2949
2950#[gpui::test]
2951async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2952 init_test(cx);
2953
2954 let fs = FakeFs::new(cx.executor());
2955 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2956
2957 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2958
2959 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2960 language_registry.add(rust_lang());
2961 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2962
2963 let (buffer, _) = project
2964 .update(cx, |project, cx| {
2965 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2966 })
2967 .await
2968 .unwrap();
2969
2970 // Publish diagnostics
2971 let fake_server = fake_servers.next().await.unwrap();
2972 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2973 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2974 version: None,
2975 diagnostics: vec![lsp::Diagnostic {
2976 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2977 severity: Some(lsp::DiagnosticSeverity::ERROR),
2978 message: "the message".to_string(),
2979 ..Default::default()
2980 }],
2981 });
2982
2983 cx.executor().run_until_parked();
2984 buffer.update(cx, |buffer, _| {
2985 assert_eq!(
2986 buffer
2987 .snapshot()
2988 .diagnostics_in_range::<_, usize>(0..1, false)
2989 .map(|entry| entry.diagnostic.message.clone())
2990 .collect::<Vec<_>>(),
2991 ["the message".to_string()]
2992 );
2993 });
2994 project.update(cx, |project, cx| {
2995 assert_eq!(
2996 project.diagnostic_summary(false, cx),
2997 DiagnosticSummary {
2998 error_count: 1,
2999 warning_count: 0,
3000 }
3001 );
3002 });
3003
3004 project.update(cx, |project, cx| {
3005 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3006 });
3007
3008 // The diagnostics are cleared.
3009 cx.executor().run_until_parked();
3010 buffer.update(cx, |buffer, _| {
3011 assert_eq!(
3012 buffer
3013 .snapshot()
3014 .diagnostics_in_range::<_, usize>(0..1, false)
3015 .map(|entry| entry.diagnostic.message.clone())
3016 .collect::<Vec<_>>(),
3017 Vec::<String>::new(),
3018 );
3019 });
3020 project.update(cx, |project, cx| {
3021 assert_eq!(
3022 project.diagnostic_summary(false, cx),
3023 DiagnosticSummary {
3024 error_count: 0,
3025 warning_count: 0,
3026 }
3027 );
3028 });
3029}
3030
3031#[gpui::test]
3032async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let fs = FakeFs::new(cx.executor());
3036 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3037
3038 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3039 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3040
3041 language_registry.add(rust_lang());
3042 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3043
3044 let (buffer, _handle) = project
3045 .update(cx, |project, cx| {
3046 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3047 })
3048 .await
3049 .unwrap();
3050
3051 // Before restarting the server, report diagnostics with an unknown buffer version.
3052 let fake_server = fake_servers.next().await.unwrap();
3053 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3054 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3055 version: Some(10000),
3056 diagnostics: Vec::new(),
3057 });
3058 cx.executor().run_until_parked();
3059 project.update(cx, |project, cx| {
3060 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3061 });
3062
3063 let mut fake_server = fake_servers.next().await.unwrap();
3064 let notification = fake_server
3065 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3066 .await
3067 .text_document;
3068 assert_eq!(notification.version, 0);
3069}
3070
3071#[gpui::test]
3072async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let progress_token = "the-progress-token";
3076
3077 let fs = FakeFs::new(cx.executor());
3078 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3079
3080 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3081
3082 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3083 language_registry.add(rust_lang());
3084 let mut fake_servers = language_registry.register_fake_lsp(
3085 "Rust",
3086 FakeLspAdapter {
3087 name: "the-language-server",
3088 disk_based_diagnostics_sources: vec!["disk".into()],
3089 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3090 ..Default::default()
3091 },
3092 );
3093
3094 let (buffer, _handle) = project
3095 .update(cx, |project, cx| {
3096 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3097 })
3098 .await
3099 .unwrap();
3100
3101 // Simulate diagnostics starting to update.
3102 let mut fake_server = fake_servers.next().await.unwrap();
3103 fake_server
3104 .start_progress_with(
3105 "another-token",
3106 lsp::WorkDoneProgressBegin {
3107 cancellable: Some(false),
3108 ..Default::default()
3109 },
3110 DEFAULT_LSP_REQUEST_TIMEOUT,
3111 )
3112 .await;
3113 // Ensure progress notification is fully processed before starting the next one
3114 cx.executor().run_until_parked();
3115
3116 fake_server
3117 .start_progress_with(
3118 progress_token,
3119 lsp::WorkDoneProgressBegin {
3120 cancellable: Some(true),
3121 ..Default::default()
3122 },
3123 DEFAULT_LSP_REQUEST_TIMEOUT,
3124 )
3125 .await;
3126 // Ensure progress notification is fully processed before cancelling
3127 cx.executor().run_until_parked();
3128
3129 project.update(cx, |project, cx| {
3130 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3131 });
3132 cx.executor().run_until_parked();
3133
3134 let cancel_notification = fake_server
3135 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3136 .await;
3137 assert_eq!(
3138 cancel_notification.token,
3139 NumberOrString::String(progress_token.into())
3140 );
3141}
3142
3143#[gpui::test]
3144async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3149 .await;
3150
3151 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3153
3154 let mut fake_rust_servers = language_registry.register_fake_lsp(
3155 "Rust",
3156 FakeLspAdapter {
3157 name: "rust-lsp",
3158 ..Default::default()
3159 },
3160 );
3161 let mut fake_js_servers = language_registry.register_fake_lsp(
3162 "JavaScript",
3163 FakeLspAdapter {
3164 name: "js-lsp",
3165 ..Default::default()
3166 },
3167 );
3168 language_registry.add(rust_lang());
3169 language_registry.add(js_lang());
3170
3171 let _rs_buffer = project
3172 .update(cx, |project, cx| {
3173 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3174 })
3175 .await
3176 .unwrap();
3177 let _js_buffer = project
3178 .update(cx, |project, cx| {
3179 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3180 })
3181 .await
3182 .unwrap();
3183
3184 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3185 assert_eq!(
3186 fake_rust_server_1
3187 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3188 .await
3189 .text_document
3190 .uri
3191 .as_str(),
3192 uri!("file:///dir/a.rs")
3193 );
3194
3195 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3196 assert_eq!(
3197 fake_js_server
3198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3199 .await
3200 .text_document
3201 .uri
3202 .as_str(),
3203 uri!("file:///dir/b.js")
3204 );
3205
3206 // Disable Rust language server, ensuring only that server gets stopped.
3207 cx.update(|cx| {
3208 SettingsStore::update_global(cx, |settings, cx| {
3209 settings.update_user_settings(cx, |settings| {
3210 settings.languages_mut().insert(
3211 "Rust".into(),
3212 LanguageSettingsContent {
3213 enable_language_server: Some(false),
3214 ..Default::default()
3215 },
3216 );
3217 });
3218 })
3219 });
3220 fake_rust_server_1
3221 .receive_notification::<lsp::notification::Exit>()
3222 .await;
3223
3224 // Enable Rust and disable JavaScript language servers, ensuring that the
3225 // former gets started again and that the latter stops.
3226 cx.update(|cx| {
3227 SettingsStore::update_global(cx, |settings, cx| {
3228 settings.update_user_settings(cx, |settings| {
3229 settings.languages_mut().insert(
3230 "Rust".into(),
3231 LanguageSettingsContent {
3232 enable_language_server: Some(true),
3233 ..Default::default()
3234 },
3235 );
3236 settings.languages_mut().insert(
3237 "JavaScript".into(),
3238 LanguageSettingsContent {
3239 enable_language_server: Some(false),
3240 ..Default::default()
3241 },
3242 );
3243 });
3244 })
3245 });
3246 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3247 assert_eq!(
3248 fake_rust_server_2
3249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3250 .await
3251 .text_document
3252 .uri
3253 .as_str(),
3254 uri!("file:///dir/a.rs")
3255 );
3256 fake_js_server
3257 .receive_notification::<lsp::notification::Exit>()
3258 .await;
3259}
3260
3261#[gpui::test(iterations = 3)]
3262async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3263 init_test(cx);
3264
3265 let text = "
3266 fn a() { A }
3267 fn b() { BB }
3268 fn c() { CCC }
3269 "
3270 .unindent();
3271
3272 let fs = FakeFs::new(cx.executor());
3273 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3274
3275 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3276 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3277
3278 language_registry.add(rust_lang());
3279 let mut fake_servers = language_registry.register_fake_lsp(
3280 "Rust",
3281 FakeLspAdapter {
3282 disk_based_diagnostics_sources: vec!["disk".into()],
3283 ..Default::default()
3284 },
3285 );
3286
3287 let buffer = project
3288 .update(cx, |project, cx| {
3289 project.open_local_buffer(path!("/dir/a.rs"), cx)
3290 })
3291 .await
3292 .unwrap();
3293
3294 let _handle = project.update(cx, |project, cx| {
3295 project.register_buffer_with_language_servers(&buffer, cx)
3296 });
3297
3298 let mut fake_server = fake_servers.next().await.unwrap();
3299 let open_notification = fake_server
3300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3301 .await;
3302
3303 // Edit the buffer, moving the content down
3304 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3305 let change_notification_1 = fake_server
3306 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3307 .await;
3308 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3309
3310 // Report some diagnostics for the initial version of the buffer
3311 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3312 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3313 version: Some(open_notification.text_document.version),
3314 diagnostics: vec![
3315 lsp::Diagnostic {
3316 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3317 severity: Some(DiagnosticSeverity::ERROR),
3318 message: "undefined variable 'A'".to_string(),
3319 source: Some("disk".to_string()),
3320 ..Default::default()
3321 },
3322 lsp::Diagnostic {
3323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3324 severity: Some(DiagnosticSeverity::ERROR),
3325 message: "undefined variable 'BB'".to_string(),
3326 source: Some("disk".to_string()),
3327 ..Default::default()
3328 },
3329 lsp::Diagnostic {
3330 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3331 severity: Some(DiagnosticSeverity::ERROR),
3332 source: Some("disk".to_string()),
3333 message: "undefined variable 'CCC'".to_string(),
3334 ..Default::default()
3335 },
3336 ],
3337 });
3338
3339 // The diagnostics have moved down since they were created.
3340 cx.executor().run_until_parked();
3341 buffer.update(cx, |buffer, _| {
3342 assert_eq!(
3343 buffer
3344 .snapshot()
3345 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3346 .collect::<Vec<_>>(),
3347 &[
3348 DiagnosticEntry {
3349 range: Point::new(3, 9)..Point::new(3, 11),
3350 diagnostic: Diagnostic {
3351 source: Some("disk".into()),
3352 severity: DiagnosticSeverity::ERROR,
3353 message: "undefined variable 'BB'".to_string(),
3354 is_disk_based: true,
3355 group_id: 1,
3356 is_primary: true,
3357 source_kind: DiagnosticSourceKind::Pushed,
3358 ..Diagnostic::default()
3359 },
3360 },
3361 DiagnosticEntry {
3362 range: Point::new(4, 9)..Point::new(4, 12),
3363 diagnostic: Diagnostic {
3364 source: Some("disk".into()),
3365 severity: DiagnosticSeverity::ERROR,
3366 message: "undefined variable 'CCC'".to_string(),
3367 is_disk_based: true,
3368 group_id: 2,
3369 is_primary: true,
3370 source_kind: DiagnosticSourceKind::Pushed,
3371 ..Diagnostic::default()
3372 }
3373 }
3374 ]
3375 );
3376 assert_eq!(
3377 chunks_with_diagnostics(buffer, 0..buffer.len()),
3378 [
3379 ("\n\nfn a() { ".to_string(), None),
3380 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3381 (" }\nfn b() { ".to_string(), None),
3382 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3383 (" }\nfn c() { ".to_string(), None),
3384 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3385 (" }\n".to_string(), None),
3386 ]
3387 );
3388 assert_eq!(
3389 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3390 [
3391 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3392 (" }\nfn c() { ".to_string(), None),
3393 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3394 ]
3395 );
3396 });
3397
3398 // Ensure overlapping diagnostics are highlighted correctly.
3399 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3400 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3401 version: Some(open_notification.text_document.version),
3402 diagnostics: vec![
3403 lsp::Diagnostic {
3404 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3405 severity: Some(DiagnosticSeverity::ERROR),
3406 message: "undefined variable 'A'".to_string(),
3407 source: Some("disk".to_string()),
3408 ..Default::default()
3409 },
3410 lsp::Diagnostic {
3411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3412 severity: Some(DiagnosticSeverity::WARNING),
3413 message: "unreachable statement".to_string(),
3414 source: Some("disk".to_string()),
3415 ..Default::default()
3416 },
3417 ],
3418 });
3419
3420 cx.executor().run_until_parked();
3421 buffer.update(cx, |buffer, _| {
3422 assert_eq!(
3423 buffer
3424 .snapshot()
3425 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3426 .collect::<Vec<_>>(),
3427 &[
3428 DiagnosticEntry {
3429 range: Point::new(2, 9)..Point::new(2, 12),
3430 diagnostic: Diagnostic {
3431 source: Some("disk".into()),
3432 severity: DiagnosticSeverity::WARNING,
3433 message: "unreachable statement".to_string(),
3434 is_disk_based: true,
3435 group_id: 4,
3436 is_primary: true,
3437 source_kind: DiagnosticSourceKind::Pushed,
3438 ..Diagnostic::default()
3439 }
3440 },
3441 DiagnosticEntry {
3442 range: Point::new(2, 9)..Point::new(2, 10),
3443 diagnostic: Diagnostic {
3444 source: Some("disk".into()),
3445 severity: DiagnosticSeverity::ERROR,
3446 message: "undefined variable 'A'".to_string(),
3447 is_disk_based: true,
3448 group_id: 3,
3449 is_primary: true,
3450 source_kind: DiagnosticSourceKind::Pushed,
3451 ..Diagnostic::default()
3452 },
3453 }
3454 ]
3455 );
3456 assert_eq!(
3457 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3458 [
3459 ("fn a() { ".to_string(), None),
3460 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3461 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3462 ("\n".to_string(), None),
3463 ]
3464 );
3465 assert_eq!(
3466 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3467 [
3468 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3469 ("\n".to_string(), None),
3470 ]
3471 );
3472 });
3473
3474 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3475 // changes since the last save.
3476 buffer.update(cx, |buffer, cx| {
3477 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3478 buffer.edit(
3479 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3480 None,
3481 cx,
3482 );
3483 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3484 });
3485 let change_notification_2 = fake_server
3486 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3487 .await;
3488 assert!(
3489 change_notification_2.text_document.version > change_notification_1.text_document.version
3490 );
3491
3492 // Handle out-of-order diagnostics
3493 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3494 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3495 version: Some(change_notification_2.text_document.version),
3496 diagnostics: vec![
3497 lsp::Diagnostic {
3498 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3499 severity: Some(DiagnosticSeverity::ERROR),
3500 message: "undefined variable 'BB'".to_string(),
3501 source: Some("disk".to_string()),
3502 ..Default::default()
3503 },
3504 lsp::Diagnostic {
3505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3506 severity: Some(DiagnosticSeverity::WARNING),
3507 message: "undefined variable 'A'".to_string(),
3508 source: Some("disk".to_string()),
3509 ..Default::default()
3510 },
3511 ],
3512 });
3513
3514 cx.executor().run_until_parked();
3515 buffer.update(cx, |buffer, _| {
3516 assert_eq!(
3517 buffer
3518 .snapshot()
3519 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3520 .collect::<Vec<_>>(),
3521 &[
3522 DiagnosticEntry {
3523 range: Point::new(2, 21)..Point::new(2, 22),
3524 diagnostic: Diagnostic {
3525 source: Some("disk".into()),
3526 severity: DiagnosticSeverity::WARNING,
3527 message: "undefined variable 'A'".to_string(),
3528 is_disk_based: true,
3529 group_id: 6,
3530 is_primary: true,
3531 source_kind: DiagnosticSourceKind::Pushed,
3532 ..Diagnostic::default()
3533 }
3534 },
3535 DiagnosticEntry {
3536 range: Point::new(3, 9)..Point::new(3, 14),
3537 diagnostic: Diagnostic {
3538 source: Some("disk".into()),
3539 severity: DiagnosticSeverity::ERROR,
3540 message: "undefined variable 'BB'".to_string(),
3541 is_disk_based: true,
3542 group_id: 5,
3543 is_primary: true,
3544 source_kind: DiagnosticSourceKind::Pushed,
3545 ..Diagnostic::default()
3546 },
3547 }
3548 ]
3549 );
3550 });
3551}
3552
3553#[gpui::test]
3554async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3555 init_test(cx);
3556
3557 let text = concat!(
3558 "let one = ;\n", //
3559 "let two = \n",
3560 "let three = 3;\n",
3561 );
3562
3563 let fs = FakeFs::new(cx.executor());
3564 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3565
3566 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3567 let buffer = project
3568 .update(cx, |project, cx| {
3569 project.open_local_buffer(path!("/dir/a.rs"), cx)
3570 })
3571 .await
3572 .unwrap();
3573
3574 project.update(cx, |project, cx| {
3575 project.lsp_store().update(cx, |lsp_store, cx| {
3576 lsp_store
3577 .update_diagnostic_entries(
3578 LanguageServerId(0),
3579 PathBuf::from(path!("/dir/a.rs")),
3580 None,
3581 None,
3582 vec![
3583 DiagnosticEntry {
3584 range: Unclipped(PointUtf16::new(0, 10))
3585 ..Unclipped(PointUtf16::new(0, 10)),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::ERROR,
3588 message: "syntax error 1".to_string(),
3589 source_kind: DiagnosticSourceKind::Pushed,
3590 ..Diagnostic::default()
3591 },
3592 },
3593 DiagnosticEntry {
3594 range: Unclipped(PointUtf16::new(1, 10))
3595 ..Unclipped(PointUtf16::new(1, 10)),
3596 diagnostic: Diagnostic {
3597 severity: DiagnosticSeverity::ERROR,
3598 message: "syntax error 2".to_string(),
3599 source_kind: DiagnosticSourceKind::Pushed,
3600 ..Diagnostic::default()
3601 },
3602 },
3603 ],
3604 cx,
3605 )
3606 .unwrap();
3607 })
3608 });
3609
3610 // An empty range is extended forward to include the following character.
3611 // At the end of a line, an empty range is extended backward to include
3612 // the preceding character.
3613 buffer.update(cx, |buffer, _| {
3614 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3615 assert_eq!(
3616 chunks
3617 .iter()
3618 .map(|(s, d)| (s.as_str(), *d))
3619 .collect::<Vec<_>>(),
3620 &[
3621 ("let one = ", None),
3622 (";", Some(DiagnosticSeverity::ERROR)),
3623 ("\nlet two =", None),
3624 (" ", Some(DiagnosticSeverity::ERROR)),
3625 ("\nlet three = 3;\n", None)
3626 ]
3627 );
3628 });
3629}
3630
3631#[gpui::test]
3632async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3633 init_test(cx);
3634
3635 let fs = FakeFs::new(cx.executor());
3636 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3637 .await;
3638
3639 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 lsp_store.update(cx, |lsp_store, cx| {
3643 lsp_store
3644 .update_diagnostic_entries(
3645 LanguageServerId(0),
3646 Path::new(path!("/dir/a.rs")).to_owned(),
3647 None,
3648 None,
3649 vec![DiagnosticEntry {
3650 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3651 diagnostic: Diagnostic {
3652 severity: DiagnosticSeverity::ERROR,
3653 is_primary: true,
3654 message: "syntax error a1".to_string(),
3655 source_kind: DiagnosticSourceKind::Pushed,
3656 ..Diagnostic::default()
3657 },
3658 }],
3659 cx,
3660 )
3661 .unwrap();
3662 lsp_store
3663 .update_diagnostic_entries(
3664 LanguageServerId(1),
3665 Path::new(path!("/dir/a.rs")).to_owned(),
3666 None,
3667 None,
3668 vec![DiagnosticEntry {
3669 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3670 diagnostic: Diagnostic {
3671 severity: DiagnosticSeverity::ERROR,
3672 is_primary: true,
3673 message: "syntax error b1".to_string(),
3674 source_kind: DiagnosticSourceKind::Pushed,
3675 ..Diagnostic::default()
3676 },
3677 }],
3678 cx,
3679 )
3680 .unwrap();
3681
3682 assert_eq!(
3683 lsp_store.diagnostic_summary(false, cx),
3684 DiagnosticSummary {
3685 error_count: 2,
3686 warning_count: 0,
3687 }
3688 );
3689 });
3690}
3691
3692#[gpui::test]
3693async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3694 cx: &mut gpui::TestAppContext,
3695) {
3696 init_test(cx);
3697
3698 let fs = FakeFs::new(cx.executor());
3699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3700 .await;
3701
3702 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3703 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3704
3705 lsp_store.update(cx, |lsp_store, cx| {
3706 lsp_store
3707 .update_diagnostic_entries(
3708 LanguageServerId(0),
3709 Path::new(path!("/dir/a.rs")).to_owned(),
3710 None,
3711 None,
3712 vec![DiagnosticEntry {
3713 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3714 diagnostic: Diagnostic {
3715 severity: DiagnosticSeverity::ERROR,
3716 is_primary: true,
3717 message: "error in a".to_string(),
3718 source_kind: DiagnosticSourceKind::Pushed,
3719 ..Diagnostic::default()
3720 },
3721 }],
3722 cx,
3723 )
3724 .unwrap();
3725 lsp_store
3726 .update_diagnostic_entries(
3727 LanguageServerId(0),
3728 Path::new(path!("/dir/b.rs")).to_owned(),
3729 None,
3730 None,
3731 vec![DiagnosticEntry {
3732 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3733 diagnostic: Diagnostic {
3734 severity: DiagnosticSeverity::WARNING,
3735 is_primary: true,
3736 message: "warning in b".to_string(),
3737 source_kind: DiagnosticSourceKind::Pushed,
3738 ..Diagnostic::default()
3739 },
3740 }],
3741 cx,
3742 )
3743 .unwrap();
3744
3745 assert_eq!(
3746 lsp_store.diagnostic_summary(false, cx),
3747 DiagnosticSummary {
3748 error_count: 1,
3749 warning_count: 1,
3750 }
3751 );
3752 });
3753
3754 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3755 .await
3756 .unwrap();
3757 cx.executor().run_until_parked();
3758
3759 lsp_store.update(cx, |lsp_store, cx| {
3760 assert_eq!(
3761 lsp_store.diagnostic_summary(false, cx),
3762 DiagnosticSummary {
3763 error_count: 0,
3764 warning_count: 1,
3765 },
3766 );
3767 });
3768}
3769
3770#[gpui::test]
3771async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3772 init_test(cx);
3773
3774 let fs = FakeFs::new(cx.executor());
3775 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3776
3777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3778
3779 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3780 language_registry.add(rust_lang());
3781 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3782
3783 let (buffer, _handle) = project
3784 .update(cx, |project, cx| {
3785 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3786 })
3787 .await
3788 .unwrap();
3789
3790 let fake_server = fake_servers.next().await.unwrap();
3791 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3792 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3793 version: None,
3794 diagnostics: vec![lsp::Diagnostic {
3795 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3796 severity: Some(lsp::DiagnosticSeverity::ERROR),
3797 message: "error before restart".to_string(),
3798 ..Default::default()
3799 }],
3800 });
3801 cx.executor().run_until_parked();
3802
3803 project.update(cx, |project, cx| {
3804 assert_eq!(
3805 project.diagnostic_summary(false, cx),
3806 DiagnosticSummary {
3807 error_count: 1,
3808 warning_count: 0,
3809 }
3810 );
3811 });
3812
3813 let mut events = cx.events(&project);
3814
3815 project.update(cx, |project, cx| {
3816 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3817 });
3818 cx.executor().run_until_parked();
3819
3820 let mut received_diagnostics_updated = false;
3821 while let Some(Some(event)) =
3822 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3823 {
3824 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3825 received_diagnostics_updated = true;
3826 }
3827 }
3828 assert!(
3829 received_diagnostics_updated,
3830 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3831 );
3832
3833 project.update(cx, |project, cx| {
3834 assert_eq!(
3835 project.diagnostic_summary(false, cx),
3836 DiagnosticSummary {
3837 error_count: 0,
3838 warning_count: 0,
3839 }
3840 );
3841 });
3842}
3843
3844#[gpui::test]
3845async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3846 init_test(cx);
3847
3848 let fs = FakeFs::new(cx.executor());
3849 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3850 .await;
3851
3852 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3853
3854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3855 language_registry.add(rust_lang());
3856 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3857 let closure_pull_count = pull_count.clone();
3858 let mut fake_servers = language_registry.register_fake_lsp(
3859 "Rust",
3860 FakeLspAdapter {
3861 capabilities: lsp::ServerCapabilities {
3862 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3863 lsp::DiagnosticOptions {
3864 identifier: Some("test-reload".to_string()),
3865 inter_file_dependencies: true,
3866 workspace_diagnostics: false,
3867 work_done_progress_options: Default::default(),
3868 },
3869 )),
3870 ..lsp::ServerCapabilities::default()
3871 },
3872 initializer: Some(Box::new(move |fake_server| {
3873 let pull_count = closure_pull_count.clone();
3874 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3875 move |_, _| {
3876 let pull_count = pull_count.clone();
3877 async move {
3878 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3879 Ok(lsp::DocumentDiagnosticReportResult::Report(
3880 lsp::DocumentDiagnosticReport::Full(
3881 lsp::RelatedFullDocumentDiagnosticReport {
3882 related_documents: None,
3883 full_document_diagnostic_report:
3884 lsp::FullDocumentDiagnosticReport {
3885 result_id: None,
3886 items: Vec::new(),
3887 },
3888 },
3889 ),
3890 ))
3891 }
3892 },
3893 );
3894 })),
3895 ..FakeLspAdapter::default()
3896 },
3897 );
3898
3899 let (_buffer, _handle) = project
3900 .update(cx, |project, cx| {
3901 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3902 })
3903 .await
3904 .unwrap();
3905
3906 let fake_server = fake_servers.next().await.unwrap();
3907 cx.executor().run_until_parked();
3908
3909 // Publish initial diagnostics via the fake server.
3910 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3911 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3912 version: None,
3913 diagnostics: vec![lsp::Diagnostic {
3914 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3915 severity: Some(lsp::DiagnosticSeverity::ERROR),
3916 message: "error in a".to_string(),
3917 ..Default::default()
3918 }],
3919 });
3920 cx.executor().run_until_parked();
3921
3922 project.update(cx, |project, cx| {
3923 assert_eq!(
3924 project.diagnostic_summary(false, cx),
3925 DiagnosticSummary {
3926 error_count: 1,
3927 warning_count: 0,
3928 }
3929 );
3930 });
3931
3932 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3933
3934 // Change the file on disk. The FS event triggers buffer reload,
3935 // which in turn triggers pull_diagnostics_for_buffer.
3936 fs.save(
3937 path!("/dir/a.rs").as_ref(),
3938 &"fixed content".into(),
3939 LineEnding::Unix,
3940 )
3941 .await
3942 .unwrap();
3943 cx.executor().run_until_parked();
3944
3945 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3946 assert!(
3947 pulls_after > pulls_before,
3948 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3949 );
3950}
3951
3952#[gpui::test]
3953async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3954 init_test(cx);
3955
3956 let text = "
3957 fn a() {
3958 f1();
3959 }
3960 fn b() {
3961 f2();
3962 }
3963 fn c() {
3964 f3();
3965 }
3966 "
3967 .unindent();
3968
3969 let fs = FakeFs::new(cx.executor());
3970 fs.insert_tree(
3971 path!("/dir"),
3972 json!({
3973 "a.rs": text.clone(),
3974 }),
3975 )
3976 .await;
3977
3978 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3979 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3980
3981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3982 language_registry.add(rust_lang());
3983 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3984
3985 let (buffer, _handle) = project
3986 .update(cx, |project, cx| {
3987 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3988 })
3989 .await
3990 .unwrap();
3991
3992 let mut fake_server = fake_servers.next().await.unwrap();
3993 let lsp_document_version = fake_server
3994 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3995 .await
3996 .text_document
3997 .version;
3998
3999 // Simulate editing the buffer after the language server computes some edits.
4000 buffer.update(cx, |buffer, cx| {
4001 buffer.edit(
4002 [(
4003 Point::new(0, 0)..Point::new(0, 0),
4004 "// above first function\n",
4005 )],
4006 None,
4007 cx,
4008 );
4009 buffer.edit(
4010 [(
4011 Point::new(2, 0)..Point::new(2, 0),
4012 " // inside first function\n",
4013 )],
4014 None,
4015 cx,
4016 );
4017 buffer.edit(
4018 [(
4019 Point::new(6, 4)..Point::new(6, 4),
4020 "// inside second function ",
4021 )],
4022 None,
4023 cx,
4024 );
4025
4026 assert_eq!(
4027 buffer.text(),
4028 "
4029 // above first function
4030 fn a() {
4031 // inside first function
4032 f1();
4033 }
4034 fn b() {
4035 // inside second function f2();
4036 }
4037 fn c() {
4038 f3();
4039 }
4040 "
4041 .unindent()
4042 );
4043 });
4044
4045 let edits = lsp_store
4046 .update(cx, |lsp_store, cx| {
4047 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4048 &buffer,
4049 vec![
4050 // replace body of first function
4051 lsp::TextEdit {
4052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4053 new_text: "
4054 fn a() {
4055 f10();
4056 }
4057 "
4058 .unindent(),
4059 },
4060 // edit inside second function
4061 lsp::TextEdit {
4062 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4063 new_text: "00".into(),
4064 },
4065 // edit inside third function via two distinct edits
4066 lsp::TextEdit {
4067 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4068 new_text: "4000".into(),
4069 },
4070 lsp::TextEdit {
4071 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4072 new_text: "".into(),
4073 },
4074 ],
4075 LanguageServerId(0),
4076 Some(lsp_document_version),
4077 cx,
4078 )
4079 })
4080 .await
4081 .unwrap();
4082
4083 buffer.update(cx, |buffer, cx| {
4084 for (range, new_text) in edits {
4085 buffer.edit([(range, new_text)], None, cx);
4086 }
4087 assert_eq!(
4088 buffer.text(),
4089 "
4090 // above first function
4091 fn a() {
4092 // inside first function
4093 f10();
4094 }
4095 fn b() {
4096 // inside second function f200();
4097 }
4098 fn c() {
4099 f4000();
4100 }
4101 "
4102 .unindent()
4103 );
4104 });
4105}
4106
4107#[gpui::test]
4108async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4109 init_test(cx);
4110
4111 let text = "
4112 use a::b;
4113 use a::c;
4114
4115 fn f() {
4116 b();
4117 c();
4118 }
4119 "
4120 .unindent();
4121
4122 let fs = FakeFs::new(cx.executor());
4123 fs.insert_tree(
4124 path!("/dir"),
4125 json!({
4126 "a.rs": text.clone(),
4127 }),
4128 )
4129 .await;
4130
4131 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4132 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4133 let buffer = project
4134 .update(cx, |project, cx| {
4135 project.open_local_buffer(path!("/dir/a.rs"), cx)
4136 })
4137 .await
4138 .unwrap();
4139
4140 // Simulate the language server sending us a small edit in the form of a very large diff.
4141 // Rust-analyzer does this when performing a merge-imports code action.
4142 let edits = lsp_store
4143 .update(cx, |lsp_store, cx| {
4144 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4145 &buffer,
4146 [
4147 // Replace the first use statement without editing the semicolon.
4148 lsp::TextEdit {
4149 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4150 new_text: "a::{b, c}".into(),
4151 },
4152 // Reinsert the remainder of the file between the semicolon and the final
4153 // newline of the file.
4154 lsp::TextEdit {
4155 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4156 new_text: "\n\n".into(),
4157 },
4158 lsp::TextEdit {
4159 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4160 new_text: "
4161 fn f() {
4162 b();
4163 c();
4164 }"
4165 .unindent(),
4166 },
4167 // Delete everything after the first newline of the file.
4168 lsp::TextEdit {
4169 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4170 new_text: "".into(),
4171 },
4172 ],
4173 LanguageServerId(0),
4174 None,
4175 cx,
4176 )
4177 })
4178 .await
4179 .unwrap();
4180
4181 buffer.update(cx, |buffer, cx| {
4182 let edits = edits
4183 .into_iter()
4184 .map(|(range, text)| {
4185 (
4186 range.start.to_point(buffer)..range.end.to_point(buffer),
4187 text,
4188 )
4189 })
4190 .collect::<Vec<_>>();
4191
4192 assert_eq!(
4193 edits,
4194 [
4195 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4196 (Point::new(1, 0)..Point::new(2, 0), "".into())
4197 ]
4198 );
4199
4200 for (range, new_text) in edits {
4201 buffer.edit([(range, new_text)], None, cx);
4202 }
4203 assert_eq!(
4204 buffer.text(),
4205 "
4206 use a::{b, c};
4207
4208 fn f() {
4209 b();
4210 c();
4211 }
4212 "
4213 .unindent()
4214 );
4215 });
4216}
4217
4218#[gpui::test]
4219async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4220 cx: &mut gpui::TestAppContext,
4221) {
4222 init_test(cx);
4223
4224 let text = "Path()";
4225
4226 let fs = FakeFs::new(cx.executor());
4227 fs.insert_tree(
4228 path!("/dir"),
4229 json!({
4230 "a.rs": text
4231 }),
4232 )
4233 .await;
4234
4235 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4236 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4237 let buffer = project
4238 .update(cx, |project, cx| {
4239 project.open_local_buffer(path!("/dir/a.rs"), cx)
4240 })
4241 .await
4242 .unwrap();
4243
4244 // Simulate the language server sending us a pair of edits at the same location,
4245 // with an insertion following a replacement (which violates the LSP spec).
4246 let edits = lsp_store
4247 .update(cx, |lsp_store, cx| {
4248 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4249 &buffer,
4250 [
4251 lsp::TextEdit {
4252 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4253 new_text: "Path".into(),
4254 },
4255 lsp::TextEdit {
4256 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4257 new_text: "from path import Path\n\n\n".into(),
4258 },
4259 ],
4260 LanguageServerId(0),
4261 None,
4262 cx,
4263 )
4264 })
4265 .await
4266 .unwrap();
4267
4268 buffer.update(cx, |buffer, cx| {
4269 buffer.edit(edits, None, cx);
4270 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4271 });
4272}
4273
4274#[gpui::test]
4275async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4276 init_test(cx);
4277
4278 let text = "
4279 use a::b;
4280 use a::c;
4281
4282 fn f() {
4283 b();
4284 c();
4285 }
4286 "
4287 .unindent();
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 path!("/dir"),
4292 json!({
4293 "a.rs": text.clone(),
4294 }),
4295 )
4296 .await;
4297
4298 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4299 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4300 let buffer = project
4301 .update(cx, |project, cx| {
4302 project.open_local_buffer(path!("/dir/a.rs"), cx)
4303 })
4304 .await
4305 .unwrap();
4306
4307 // Simulate the language server sending us edits in a non-ordered fashion,
4308 // with ranges sometimes being inverted or pointing to invalid locations.
4309 let edits = lsp_store
4310 .update(cx, |lsp_store, cx| {
4311 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4312 &buffer,
4313 [
4314 lsp::TextEdit {
4315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4316 new_text: "\n\n".into(),
4317 },
4318 lsp::TextEdit {
4319 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4320 new_text: "a::{b, c}".into(),
4321 },
4322 lsp::TextEdit {
4323 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4324 new_text: "".into(),
4325 },
4326 lsp::TextEdit {
4327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4328 new_text: "
4329 fn f() {
4330 b();
4331 c();
4332 }"
4333 .unindent(),
4334 },
4335 ],
4336 LanguageServerId(0),
4337 None,
4338 cx,
4339 )
4340 })
4341 .await
4342 .unwrap();
4343
4344 buffer.update(cx, |buffer, cx| {
4345 let edits = edits
4346 .into_iter()
4347 .map(|(range, text)| {
4348 (
4349 range.start.to_point(buffer)..range.end.to_point(buffer),
4350 text,
4351 )
4352 })
4353 .collect::<Vec<_>>();
4354
4355 assert_eq!(
4356 edits,
4357 [
4358 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4359 (Point::new(1, 0)..Point::new(2, 0), "".into())
4360 ]
4361 );
4362
4363 for (range, new_text) in edits {
4364 buffer.edit([(range, new_text)], None, cx);
4365 }
4366 assert_eq!(
4367 buffer.text(),
4368 "
4369 use a::{b, c};
4370
4371 fn f() {
4372 b();
4373 c();
4374 }
4375 "
4376 .unindent()
4377 );
4378 });
4379}
4380
4381fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4382 buffer: &Buffer,
4383 range: Range<T>,
4384) -> Vec<(String, Option<DiagnosticSeverity>)> {
4385 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4386 for chunk in buffer.snapshot().chunks(
4387 range,
4388 LanguageAwareStyling {
4389 tree_sitter: true,
4390 diagnostics: true,
4391 },
4392 ) {
4393 if chunks
4394 .last()
4395 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4396 {
4397 chunks.last_mut().unwrap().0.push_str(chunk.text);
4398 } else {
4399 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4400 }
4401 }
4402 chunks
4403}
4404
4405#[gpui::test(iterations = 10)]
4406async fn test_definition(cx: &mut gpui::TestAppContext) {
4407 init_test(cx);
4408
4409 let fs = FakeFs::new(cx.executor());
4410 fs.insert_tree(
4411 path!("/dir"),
4412 json!({
4413 "a.rs": "const fn a() { A }",
4414 "b.rs": "const y: i32 = crate::a()",
4415 }),
4416 )
4417 .await;
4418
4419 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4420
4421 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4422 language_registry.add(rust_lang());
4423 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4424
4425 let (buffer, _handle) = project
4426 .update(cx, |project, cx| {
4427 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4428 })
4429 .await
4430 .unwrap();
4431
4432 let fake_server = fake_servers.next().await.unwrap();
4433 cx.executor().run_until_parked();
4434
4435 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4436 let params = params.text_document_position_params;
4437 assert_eq!(
4438 params.text_document.uri.to_file_path().unwrap(),
4439 Path::new(path!("/dir/b.rs")),
4440 );
4441 assert_eq!(params.position, lsp::Position::new(0, 22));
4442
4443 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4444 lsp::Location::new(
4445 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4446 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4447 ),
4448 )))
4449 });
4450 let mut definitions = project
4451 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4452 .await
4453 .unwrap()
4454 .unwrap();
4455
4456 // Assert no new language server started
4457 cx.executor().run_until_parked();
4458 assert!(fake_servers.try_recv().is_err());
4459
4460 assert_eq!(definitions.len(), 1);
4461 let definition = definitions.pop().unwrap();
4462 cx.update(|cx| {
4463 let target_buffer = definition.target.buffer.read(cx);
4464 assert_eq!(
4465 target_buffer
4466 .file()
4467 .unwrap()
4468 .as_local()
4469 .unwrap()
4470 .abs_path(cx),
4471 Path::new(path!("/dir/a.rs")),
4472 );
4473 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4474 assert_eq!(
4475 list_worktrees(&project, cx),
4476 [
4477 (path!("/dir/a.rs").as_ref(), false),
4478 (path!("/dir/b.rs").as_ref(), true)
4479 ],
4480 );
4481
4482 drop(definition);
4483 });
4484 cx.update(|cx| {
4485 assert_eq!(
4486 list_worktrees(&project, cx),
4487 [(path!("/dir/b.rs").as_ref(), true)]
4488 );
4489 });
4490
4491 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4492 project
4493 .read(cx)
4494 .worktrees(cx)
4495 .map(|worktree| {
4496 let worktree = worktree.read(cx);
4497 (
4498 worktree.as_local().unwrap().abs_path().as_ref(),
4499 worktree.is_visible(),
4500 )
4501 })
4502 .collect::<Vec<_>>()
4503 }
4504}
4505
4506#[gpui::test]
4507async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4508 init_test(cx);
4509
4510 let fs = FakeFs::new(cx.executor());
4511 fs.insert_tree(
4512 path!("/dir"),
4513 json!({
4514 "a.ts": "",
4515 }),
4516 )
4517 .await;
4518
4519 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4520
4521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4522 language_registry.add(typescript_lang());
4523 let mut fake_language_servers = language_registry.register_fake_lsp(
4524 "TypeScript",
4525 FakeLspAdapter {
4526 capabilities: lsp::ServerCapabilities {
4527 completion_provider: Some(lsp::CompletionOptions {
4528 trigger_characters: Some(vec![".".to_string()]),
4529 ..Default::default()
4530 }),
4531 ..Default::default()
4532 },
4533 ..Default::default()
4534 },
4535 );
4536
4537 let (buffer, _handle) = project
4538 .update(cx, |p, cx| {
4539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4540 })
4541 .await
4542 .unwrap();
4543
4544 let fake_server = fake_language_servers.next().await.unwrap();
4545 cx.executor().run_until_parked();
4546
4547 // When text_edit exists, it takes precedence over insert_text and label
4548 let text = "let a = obj.fqn";
4549 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4550 let completions = project.update(cx, |project, cx| {
4551 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4552 });
4553
4554 fake_server
4555 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4556 Ok(Some(lsp::CompletionResponse::Array(vec![
4557 lsp::CompletionItem {
4558 label: "labelText".into(),
4559 insert_text: Some("insertText".into()),
4560 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4561 range: lsp::Range::new(
4562 lsp::Position::new(0, text.len() as u32 - 3),
4563 lsp::Position::new(0, text.len() as u32),
4564 ),
4565 new_text: "textEditText".into(),
4566 })),
4567 ..Default::default()
4568 },
4569 ])))
4570 })
4571 .next()
4572 .await;
4573
4574 let completions = completions
4575 .await
4576 .unwrap()
4577 .into_iter()
4578 .flat_map(|response| response.completions)
4579 .collect::<Vec<_>>();
4580 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4581
4582 assert_eq!(completions.len(), 1);
4583 assert_eq!(completions[0].new_text, "textEditText");
4584 assert_eq!(
4585 completions[0].replace_range.to_offset(&snapshot),
4586 text.len() - 3..text.len()
4587 );
4588}
4589
4590#[gpui::test]
4591async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4592 init_test(cx);
4593
4594 let fs = FakeFs::new(cx.executor());
4595 fs.insert_tree(
4596 path!("/dir"),
4597 json!({
4598 "a.ts": "",
4599 }),
4600 )
4601 .await;
4602
4603 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4604
4605 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4606 language_registry.add(typescript_lang());
4607 let mut fake_language_servers = language_registry.register_fake_lsp(
4608 "TypeScript",
4609 FakeLspAdapter {
4610 capabilities: lsp::ServerCapabilities {
4611 completion_provider: Some(lsp::CompletionOptions {
4612 trigger_characters: Some(vec![".".to_string()]),
4613 ..Default::default()
4614 }),
4615 ..Default::default()
4616 },
4617 ..Default::default()
4618 },
4619 );
4620
4621 let (buffer, _handle) = project
4622 .update(cx, |p, cx| {
4623 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4624 })
4625 .await
4626 .unwrap();
4627
4628 let fake_server = fake_language_servers.next().await.unwrap();
4629 cx.executor().run_until_parked();
4630 let text = "let a = obj.fqn";
4631
4632 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4633 {
4634 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4635 let completions = project.update(cx, |project, cx| {
4636 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4637 });
4638
4639 fake_server
4640 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4641 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4642 is_incomplete: false,
4643 item_defaults: Some(lsp::CompletionListItemDefaults {
4644 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4645 lsp::Range::new(
4646 lsp::Position::new(0, text.len() as u32 - 3),
4647 lsp::Position::new(0, text.len() as u32),
4648 ),
4649 )),
4650 ..Default::default()
4651 }),
4652 items: vec![lsp::CompletionItem {
4653 label: "labelText".into(),
4654 text_edit_text: Some("textEditText".into()),
4655 text_edit: None,
4656 ..Default::default()
4657 }],
4658 })))
4659 })
4660 .next()
4661 .await;
4662
4663 let completions = completions
4664 .await
4665 .unwrap()
4666 .into_iter()
4667 .flat_map(|response| response.completions)
4668 .collect::<Vec<_>>();
4669 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4670
4671 assert_eq!(completions.len(), 1);
4672 assert_eq!(completions[0].new_text, "textEditText");
4673 assert_eq!(
4674 completions[0].replace_range.to_offset(&snapshot),
4675 text.len() - 3..text.len()
4676 );
4677 }
4678
4679 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4680 {
4681 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4682 let completions = project.update(cx, |project, cx| {
4683 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4684 });
4685
4686 fake_server
4687 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4688 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4689 is_incomplete: false,
4690 item_defaults: Some(lsp::CompletionListItemDefaults {
4691 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4692 lsp::Range::new(
4693 lsp::Position::new(0, text.len() as u32 - 3),
4694 lsp::Position::new(0, text.len() as u32),
4695 ),
4696 )),
4697 ..Default::default()
4698 }),
4699 items: vec![lsp::CompletionItem {
4700 label: "labelText".into(),
4701 text_edit_text: None,
4702 insert_text: Some("irrelevant".into()),
4703 text_edit: None,
4704 ..Default::default()
4705 }],
4706 })))
4707 })
4708 .next()
4709 .await;
4710
4711 let completions = completions
4712 .await
4713 .unwrap()
4714 .into_iter()
4715 .flat_map(|response| response.completions)
4716 .collect::<Vec<_>>();
4717 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4718
4719 assert_eq!(completions.len(), 1);
4720 assert_eq!(completions[0].new_text, "labelText");
4721 assert_eq!(
4722 completions[0].replace_range.to_offset(&snapshot),
4723 text.len() - 3..text.len()
4724 );
4725 }
4726}
4727
4728#[gpui::test]
4729async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4730 init_test(cx);
4731
4732 let fs = FakeFs::new(cx.executor());
4733 fs.insert_tree(
4734 path!("/dir"),
4735 json!({
4736 "a.ts": "",
4737 }),
4738 )
4739 .await;
4740
4741 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4742
4743 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4744 language_registry.add(typescript_lang());
4745 let mut fake_language_servers = language_registry.register_fake_lsp(
4746 "TypeScript",
4747 FakeLspAdapter {
4748 capabilities: lsp::ServerCapabilities {
4749 completion_provider: Some(lsp::CompletionOptions {
4750 trigger_characters: Some(vec![":".to_string()]),
4751 ..Default::default()
4752 }),
4753 ..Default::default()
4754 },
4755 ..Default::default()
4756 },
4757 );
4758
4759 let (buffer, _handle) = project
4760 .update(cx, |p, cx| {
4761 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4762 })
4763 .await
4764 .unwrap();
4765
4766 let fake_server = fake_language_servers.next().await.unwrap();
4767 cx.executor().run_until_parked();
4768
4769 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4770 let text = "let a = b.fqn";
4771 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4772 let completions = project.update(cx, |project, cx| {
4773 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4774 });
4775
4776 fake_server
4777 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4778 Ok(Some(lsp::CompletionResponse::Array(vec![
4779 lsp::CompletionItem {
4780 label: "fullyQualifiedName?".into(),
4781 insert_text: Some("fullyQualifiedName".into()),
4782 ..Default::default()
4783 },
4784 ])))
4785 })
4786 .next()
4787 .await;
4788 let completions = completions
4789 .await
4790 .unwrap()
4791 .into_iter()
4792 .flat_map(|response| response.completions)
4793 .collect::<Vec<_>>();
4794 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4795 assert_eq!(completions.len(), 1);
4796 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4797 assert_eq!(
4798 completions[0].replace_range.to_offset(&snapshot),
4799 text.len() - 3..text.len()
4800 );
4801
4802 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4803 let text = "let a = \"atoms/cmp\"";
4804 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4805 let completions = project.update(cx, |project, cx| {
4806 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4807 });
4808
4809 fake_server
4810 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4811 Ok(Some(lsp::CompletionResponse::Array(vec![
4812 lsp::CompletionItem {
4813 label: "component".into(),
4814 ..Default::default()
4815 },
4816 ])))
4817 })
4818 .next()
4819 .await;
4820 let completions = completions
4821 .await
4822 .unwrap()
4823 .into_iter()
4824 .flat_map(|response| response.completions)
4825 .collect::<Vec<_>>();
4826 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4827 assert_eq!(completions.len(), 1);
4828 assert_eq!(completions[0].new_text, "component");
4829 assert_eq!(
4830 completions[0].replace_range.to_offset(&snapshot),
4831 text.len() - 4..text.len() - 1
4832 );
4833}
4834
4835#[gpui::test]
4836async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4837 init_test(cx);
4838
4839 let fs = FakeFs::new(cx.executor());
4840 fs.insert_tree(
4841 path!("/dir"),
4842 json!({
4843 "a.ts": "",
4844 }),
4845 )
4846 .await;
4847
4848 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4849
4850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4851 language_registry.add(typescript_lang());
4852 let mut fake_language_servers = language_registry.register_fake_lsp(
4853 "TypeScript",
4854 FakeLspAdapter {
4855 capabilities: lsp::ServerCapabilities {
4856 completion_provider: Some(lsp::CompletionOptions {
4857 trigger_characters: Some(vec![":".to_string()]),
4858 ..Default::default()
4859 }),
4860 ..Default::default()
4861 },
4862 ..Default::default()
4863 },
4864 );
4865
4866 let (buffer, _handle) = project
4867 .update(cx, |p, cx| {
4868 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4869 })
4870 .await
4871 .unwrap();
4872
4873 let fake_server = fake_language_servers.next().await.unwrap();
4874 cx.executor().run_until_parked();
4875
4876 let text = "let a = b.fqn";
4877 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4878 let completions = project.update(cx, |project, cx| {
4879 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4880 });
4881
4882 fake_server
4883 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4884 Ok(Some(lsp::CompletionResponse::Array(vec![
4885 lsp::CompletionItem {
4886 label: "fullyQualifiedName?".into(),
4887 insert_text: Some("fully\rQualified\r\nName".into()),
4888 ..Default::default()
4889 },
4890 ])))
4891 })
4892 .next()
4893 .await;
4894 let completions = completions
4895 .await
4896 .unwrap()
4897 .into_iter()
4898 .flat_map(|response| response.completions)
4899 .collect::<Vec<_>>();
4900 assert_eq!(completions.len(), 1);
4901 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4902}
4903
4904#[gpui::test(iterations = 10)]
4905async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4906 init_test(cx);
4907
4908 let fs = FakeFs::new(cx.executor());
4909 fs.insert_tree(
4910 path!("/dir"),
4911 json!({
4912 "a.ts": "a",
4913 }),
4914 )
4915 .await;
4916
4917 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4918
4919 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4920 language_registry.add(typescript_lang());
4921 let mut fake_language_servers = language_registry.register_fake_lsp(
4922 "TypeScript",
4923 FakeLspAdapter {
4924 capabilities: lsp::ServerCapabilities {
4925 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4926 lsp::CodeActionOptions {
4927 resolve_provider: Some(true),
4928 ..lsp::CodeActionOptions::default()
4929 },
4930 )),
4931 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4932 commands: vec!["_the/command".to_string()],
4933 ..lsp::ExecuteCommandOptions::default()
4934 }),
4935 ..lsp::ServerCapabilities::default()
4936 },
4937 ..FakeLspAdapter::default()
4938 },
4939 );
4940
4941 let (buffer, _handle) = project
4942 .update(cx, |p, cx| {
4943 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4944 })
4945 .await
4946 .unwrap();
4947
4948 let fake_server = fake_language_servers.next().await.unwrap();
4949 cx.executor().run_until_parked();
4950
4951 // Language server returns code actions that contain commands, and not edits.
4952 let actions = project.update(cx, |project, cx| {
4953 project.code_actions(&buffer, 0..0, None, cx)
4954 });
4955 fake_server
4956 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4957 Ok(Some(vec![
4958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4959 title: "The code action".into(),
4960 data: Some(serde_json::json!({
4961 "command": "_the/command",
4962 })),
4963 ..lsp::CodeAction::default()
4964 }),
4965 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4966 title: "two".into(),
4967 ..lsp::CodeAction::default()
4968 }),
4969 ]))
4970 })
4971 .next()
4972 .await;
4973
4974 let action = actions.await.unwrap().unwrap()[0].clone();
4975 let apply = project.update(cx, |project, cx| {
4976 project.apply_code_action(buffer.clone(), action, true, cx)
4977 });
4978
4979 // Resolving the code action does not populate its edits. In absence of
4980 // edits, we must execute the given command.
4981 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4982 |mut action, _| async move {
4983 if action.data.is_some() {
4984 action.command = Some(lsp::Command {
4985 title: "The command".into(),
4986 command: "_the/command".into(),
4987 arguments: Some(vec![json!("the-argument")]),
4988 });
4989 }
4990 Ok(action)
4991 },
4992 );
4993
4994 // While executing the command, the language server sends the editor
4995 // a `workspaceEdit` request.
4996 fake_server
4997 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4998 let fake = fake_server.clone();
4999 move |params, _| {
5000 assert_eq!(params.command, "_the/command");
5001 let fake = fake.clone();
5002 async move {
5003 fake.server
5004 .request::<lsp::request::ApplyWorkspaceEdit>(
5005 lsp::ApplyWorkspaceEditParams {
5006 label: None,
5007 edit: lsp::WorkspaceEdit {
5008 changes: Some(
5009 [(
5010 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5011 vec![lsp::TextEdit {
5012 range: lsp::Range::new(
5013 lsp::Position::new(0, 0),
5014 lsp::Position::new(0, 0),
5015 ),
5016 new_text: "X".into(),
5017 }],
5018 )]
5019 .into_iter()
5020 .collect(),
5021 ),
5022 ..Default::default()
5023 },
5024 },
5025 DEFAULT_LSP_REQUEST_TIMEOUT,
5026 )
5027 .await
5028 .into_response()
5029 .unwrap();
5030 Ok(Some(json!(null)))
5031 }
5032 }
5033 })
5034 .next()
5035 .await;
5036
5037 // Applying the code action returns a project transaction containing the edits
5038 // sent by the language server in its `workspaceEdit` request.
5039 let transaction = apply.await.unwrap();
5040 assert!(transaction.0.contains_key(&buffer));
5041 buffer.update(cx, |buffer, cx| {
5042 assert_eq!(buffer.text(), "Xa");
5043 buffer.undo(cx);
5044 assert_eq!(buffer.text(), "a");
5045 });
5046}
5047
5048#[gpui::test]
5049async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5050 init_test(cx);
5051 let fs = FakeFs::new(cx.background_executor.clone());
5052 let expected_contents = "content";
5053 fs.as_fake()
5054 .insert_tree(
5055 "/root",
5056 json!({
5057 "test.txt": expected_contents
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5063
5064 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5065 let worktree = project.worktrees(cx).next().unwrap();
5066 let entry_id = worktree
5067 .read(cx)
5068 .entry_for_path(rel_path("test.txt"))
5069 .unwrap()
5070 .id;
5071 (worktree, entry_id)
5072 });
5073 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5074 let _result = project
5075 .update(cx, |project, cx| {
5076 project.rename_entry(
5077 entry_id,
5078 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5079 cx,
5080 )
5081 })
5082 .await
5083 .unwrap();
5084 worktree.read_with(cx, |worktree, _| {
5085 assert!(
5086 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5087 "Old file should have been removed"
5088 );
5089 assert!(
5090 worktree
5091 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5092 .is_some(),
5093 "Whole directory hierarchy and the new file should have been created"
5094 );
5095 });
5096 assert_eq!(
5097 worktree
5098 .update(cx, |worktree, cx| {
5099 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5100 })
5101 .await
5102 .unwrap()
5103 .text,
5104 expected_contents,
5105 "Moved file's contents should be preserved"
5106 );
5107
5108 let entry_id = worktree.read_with(cx, |worktree, _| {
5109 worktree
5110 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5111 .unwrap()
5112 .id
5113 });
5114
5115 let _result = project
5116 .update(cx, |project, cx| {
5117 project.rename_entry(
5118 entry_id,
5119 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5120 cx,
5121 )
5122 })
5123 .await
5124 .unwrap();
5125 worktree.read_with(cx, |worktree, _| {
5126 assert!(
5127 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5128 "First file should not reappear"
5129 );
5130 assert!(
5131 worktree
5132 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5133 .is_none(),
5134 "Old file should have been removed"
5135 );
5136 assert!(
5137 worktree
5138 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5139 .is_some(),
5140 "No error should have occurred after moving into existing directory"
5141 );
5142 });
5143 assert_eq!(
5144 worktree
5145 .update(cx, |worktree, cx| {
5146 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5147 })
5148 .await
5149 .unwrap()
5150 .text,
5151 expected_contents,
5152 "Moved file's contents should be preserved"
5153 );
5154}
5155
5156#[gpui::test(iterations = 10)]
5157async fn test_save_file(cx: &mut gpui::TestAppContext) {
5158 init_test(cx);
5159
5160 let fs = FakeFs::new(cx.executor());
5161 fs.insert_tree(
5162 path!("/dir"),
5163 json!({
5164 "file1": "the old contents",
5165 }),
5166 )
5167 .await;
5168
5169 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5170 let buffer = project
5171 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5172 .await
5173 .unwrap();
5174 buffer.update(cx, |buffer, cx| {
5175 assert_eq!(buffer.text(), "the old contents");
5176 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5177 });
5178
5179 project
5180 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5181 .await
5182 .unwrap();
5183
5184 let new_text = fs
5185 .load(Path::new(path!("/dir/file1")))
5186 .await
5187 .unwrap()
5188 .replace("\r\n", "\n");
5189 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5190}
5191
5192#[gpui::test(iterations = 10)]
5193async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5194 // Issue: #24349
5195 init_test(cx);
5196
5197 let fs = FakeFs::new(cx.executor());
5198 fs.insert_tree(path!("/dir"), json!({})).await;
5199
5200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5201 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5202
5203 language_registry.add(rust_lang());
5204 let mut fake_rust_servers = language_registry.register_fake_lsp(
5205 "Rust",
5206 FakeLspAdapter {
5207 name: "the-rust-language-server",
5208 capabilities: lsp::ServerCapabilities {
5209 completion_provider: Some(lsp::CompletionOptions {
5210 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5211 ..Default::default()
5212 }),
5213 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5214 lsp::TextDocumentSyncOptions {
5215 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5216 ..Default::default()
5217 },
5218 )),
5219 ..Default::default()
5220 },
5221 ..Default::default()
5222 },
5223 );
5224
5225 let buffer = project
5226 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5227 .unwrap()
5228 .await;
5229 project.update(cx, |this, cx| {
5230 this.register_buffer_with_language_servers(&buffer, cx);
5231 buffer.update(cx, |buffer, cx| {
5232 assert!(!this.has_language_servers_for(buffer, cx));
5233 })
5234 });
5235
5236 project
5237 .update(cx, |this, cx| {
5238 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5239 this.save_buffer_as(
5240 buffer.clone(),
5241 ProjectPath {
5242 worktree_id,
5243 path: rel_path("file.rs").into(),
5244 },
5245 cx,
5246 )
5247 })
5248 .await
5249 .unwrap();
5250 // A server is started up, and it is notified about Rust files.
5251 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5252 assert_eq!(
5253 fake_rust_server
5254 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5255 .await
5256 .text_document,
5257 lsp::TextDocumentItem {
5258 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5259 version: 0,
5260 text: "".to_string(),
5261 language_id: "rust".to_string(),
5262 }
5263 );
5264
5265 project.update(cx, |this, cx| {
5266 buffer.update(cx, |buffer, cx| {
5267 assert!(this.has_language_servers_for(buffer, cx));
5268 })
5269 });
5270}
5271
5272#[gpui::test(iterations = 30)]
5273async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5274 init_test(cx);
5275
5276 let fs = FakeFs::new(cx.executor());
5277 fs.insert_tree(
5278 path!("/dir"),
5279 json!({
5280 "file1": "the original contents",
5281 }),
5282 )
5283 .await;
5284
5285 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5286 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5287 let buffer = project
5288 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5289 .await
5290 .unwrap();
5291
5292 // Change the buffer's file on disk, and then wait for the file change
5293 // to be detected by the worktree, so that the buffer starts reloading.
5294 fs.save(
5295 path!("/dir/file1").as_ref(),
5296 &"the first contents".into(),
5297 Default::default(),
5298 )
5299 .await
5300 .unwrap();
5301 worktree.next_event(cx).await;
5302
5303 // Change the buffer's file again. Depending on the random seed, the
5304 // previous file change may still be in progress.
5305 fs.save(
5306 path!("/dir/file1").as_ref(),
5307 &"the second contents".into(),
5308 Default::default(),
5309 )
5310 .await
5311 .unwrap();
5312 worktree.next_event(cx).await;
5313
5314 cx.executor().run_until_parked();
5315 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5316 buffer.read_with(cx, |buffer, _| {
5317 assert_eq!(buffer.text(), on_disk_text);
5318 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5319 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5320 });
5321}
5322
5323#[gpui::test(iterations = 30)]
5324async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5325 init_test(cx);
5326
5327 let fs = FakeFs::new(cx.executor());
5328 fs.insert_tree(
5329 path!("/dir"),
5330 json!({
5331 "file1": "the original contents",
5332 }),
5333 )
5334 .await;
5335
5336 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5337 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5338 let buffer = project
5339 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5340 .await
5341 .unwrap();
5342
5343 // Change the buffer's file on disk, and then wait for the file change
5344 // to be detected by the worktree, so that the buffer starts reloading.
5345 fs.save(
5346 path!("/dir/file1").as_ref(),
5347 &"the first contents".into(),
5348 Default::default(),
5349 )
5350 .await
5351 .unwrap();
5352 worktree.next_event(cx).await;
5353
5354 cx.executor()
5355 .spawn(cx.executor().simulate_random_delay())
5356 .await;
5357
5358 // Perform a noop edit, causing the buffer's version to increase.
5359 buffer.update(cx, |buffer, cx| {
5360 buffer.edit([(0..0, " ")], None, cx);
5361 buffer.undo(cx);
5362 });
5363
5364 cx.executor().run_until_parked();
5365 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5366 buffer.read_with(cx, |buffer, _| {
5367 let buffer_text = buffer.text();
5368 if buffer_text == on_disk_text {
5369 assert!(
5370 !buffer.is_dirty() && !buffer.has_conflict(),
5371 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5372 );
5373 }
5374 // If the file change occurred while the buffer was processing the first
5375 // change, the buffer will be in a conflicting state.
5376 else {
5377 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5378 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5379 }
5380 });
5381}
5382
5383#[gpui::test]
5384async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5385 init_test(cx);
5386
5387 let fs = FakeFs::new(cx.executor());
5388 fs.insert_tree(
5389 path!("/dir"),
5390 json!({
5391 "file1": "the old contents",
5392 }),
5393 )
5394 .await;
5395
5396 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5397 let buffer = project
5398 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5399 .await
5400 .unwrap();
5401 buffer.update(cx, |buffer, cx| {
5402 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5403 });
5404
5405 project
5406 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5407 .await
5408 .unwrap();
5409
5410 let new_text = fs
5411 .load(Path::new(path!("/dir/file1")))
5412 .await
5413 .unwrap()
5414 .replace("\r\n", "\n");
5415 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5416}
5417
5418#[gpui::test]
5419async fn test_save_as(cx: &mut gpui::TestAppContext) {
5420 init_test(cx);
5421
5422 let fs = FakeFs::new(cx.executor());
5423 fs.insert_tree("/dir", json!({})).await;
5424
5425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5426
5427 let languages = project.update(cx, |project, _| project.languages().clone());
5428 languages.add(rust_lang());
5429
5430 let buffer = project.update(cx, |project, cx| {
5431 project.create_local_buffer("", None, false, cx)
5432 });
5433 buffer.update(cx, |buffer, cx| {
5434 buffer.edit([(0..0, "abc")], None, cx);
5435 assert!(buffer.is_dirty());
5436 assert!(!buffer.has_conflict());
5437 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5438 });
5439 project
5440 .update(cx, |project, cx| {
5441 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5442 let path = ProjectPath {
5443 worktree_id,
5444 path: rel_path("file1.rs").into(),
5445 };
5446 project.save_buffer_as(buffer.clone(), path, cx)
5447 })
5448 .await
5449 .unwrap();
5450 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5451
5452 cx.executor().run_until_parked();
5453 buffer.update(cx, |buffer, cx| {
5454 assert_eq!(
5455 buffer.file().unwrap().full_path(cx),
5456 Path::new("dir/file1.rs")
5457 );
5458 assert!(!buffer.is_dirty());
5459 assert!(!buffer.has_conflict());
5460 assert_eq!(buffer.language().unwrap().name(), "Rust");
5461 });
5462
5463 let opened_buffer = project
5464 .update(cx, |project, cx| {
5465 project.open_local_buffer("/dir/file1.rs", cx)
5466 })
5467 .await
5468 .unwrap();
5469 assert_eq!(opened_buffer, buffer);
5470}
5471
5472#[gpui::test]
5473async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5474 init_test(cx);
5475
5476 let fs = FakeFs::new(cx.executor());
5477 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5478
5479 fs.insert_tree(
5480 path!("/dir"),
5481 json!({
5482 "data_a.txt": "data about a"
5483 }),
5484 )
5485 .await;
5486
5487 let buffer = project
5488 .update(cx, |project, cx| {
5489 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5490 })
5491 .await
5492 .unwrap();
5493
5494 buffer.update(cx, |buffer, cx| {
5495 buffer.edit([(11..12, "b")], None, cx);
5496 });
5497
5498 // Save buffer's contents as a new file and confirm that the buffer's now
5499 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5500 // file associated with the buffer has now been updated to `data_b.txt`
5501 project
5502 .update(cx, |project, cx| {
5503 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5504 let new_path = ProjectPath {
5505 worktree_id,
5506 path: rel_path("data_b.txt").into(),
5507 };
5508
5509 project.save_buffer_as(buffer.clone(), new_path, cx)
5510 })
5511 .await
5512 .unwrap();
5513
5514 buffer.update(cx, |buffer, cx| {
5515 assert_eq!(
5516 buffer.file().unwrap().full_path(cx),
5517 Path::new("dir/data_b.txt")
5518 )
5519 });
5520
5521 // Open the original `data_a.txt` file, confirming that its contents are
5522 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5523 let original_buffer = project
5524 .update(cx, |project, cx| {
5525 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5526 })
5527 .await
5528 .unwrap();
5529
5530 original_buffer.update(cx, |buffer, cx| {
5531 assert_eq!(buffer.text(), "data about a");
5532 assert_eq!(
5533 buffer.file().unwrap().full_path(cx),
5534 Path::new("dir/data_a.txt")
5535 )
5536 });
5537}
5538
5539#[gpui::test(retries = 5)]
5540async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5541 use worktree::WorktreeModelHandle as _;
5542
5543 init_test(cx);
5544 cx.executor().allow_parking();
5545
5546 let dir = TempTree::new(json!({
5547 "a": {
5548 "file1": "",
5549 "file2": "",
5550 "file3": "",
5551 },
5552 "b": {
5553 "c": {
5554 "file4": "",
5555 "file5": "",
5556 }
5557 }
5558 }));
5559
5560 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5561
5562 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5563 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5564 async move { buffer.await.unwrap() }
5565 };
5566 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5567 project.update(cx, |project, cx| {
5568 let tree = project.worktrees(cx).next().unwrap();
5569 tree.read(cx)
5570 .entry_for_path(rel_path(path))
5571 .unwrap_or_else(|| panic!("no entry for path {}", path))
5572 .id
5573 })
5574 };
5575
5576 let buffer2 = buffer_for_path("a/file2", cx).await;
5577 let buffer3 = buffer_for_path("a/file3", cx).await;
5578 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5579 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5580
5581 let file2_id = id_for_path("a/file2", cx);
5582 let file3_id = id_for_path("a/file3", cx);
5583 let file4_id = id_for_path("b/c/file4", cx);
5584
5585 // Create a remote copy of this worktree.
5586 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5587 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5588
5589 let updates = Arc::new(Mutex::new(Vec::new()));
5590 tree.update(cx, |tree, cx| {
5591 let updates = updates.clone();
5592 tree.observe_updates(0, cx, move |update| {
5593 updates.lock().push(update);
5594 async { true }
5595 });
5596 });
5597
5598 let remote = cx.update(|cx| {
5599 Worktree::remote(
5600 0,
5601 ReplicaId::REMOTE_SERVER,
5602 metadata,
5603 project.read(cx).client().into(),
5604 project.read(cx).path_style(cx),
5605 cx,
5606 )
5607 });
5608
5609 cx.executor().run_until_parked();
5610
5611 cx.update(|cx| {
5612 assert!(!buffer2.read(cx).is_dirty());
5613 assert!(!buffer3.read(cx).is_dirty());
5614 assert!(!buffer4.read(cx).is_dirty());
5615 assert!(!buffer5.read(cx).is_dirty());
5616 });
5617
5618 // Rename and delete files and directories.
5619 tree.flush_fs_events(cx).await;
5620 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5621 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5622 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5623 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5624 tree.flush_fs_events(cx).await;
5625
5626 cx.update(|app| {
5627 assert_eq!(
5628 tree.read(app).paths().collect::<Vec<_>>(),
5629 vec![
5630 rel_path("a"),
5631 rel_path("a/file1"),
5632 rel_path("a/file2.new"),
5633 rel_path("b"),
5634 rel_path("d"),
5635 rel_path("d/file3"),
5636 rel_path("d/file4"),
5637 ]
5638 );
5639 });
5640
5641 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5642 assert_eq!(id_for_path("d/file3", cx), file3_id);
5643 assert_eq!(id_for_path("d/file4", cx), file4_id);
5644
5645 cx.update(|cx| {
5646 assert_eq!(
5647 buffer2.read(cx).file().unwrap().path().as_ref(),
5648 rel_path("a/file2.new")
5649 );
5650 assert_eq!(
5651 buffer3.read(cx).file().unwrap().path().as_ref(),
5652 rel_path("d/file3")
5653 );
5654 assert_eq!(
5655 buffer4.read(cx).file().unwrap().path().as_ref(),
5656 rel_path("d/file4")
5657 );
5658 assert_eq!(
5659 buffer5.read(cx).file().unwrap().path().as_ref(),
5660 rel_path("b/c/file5")
5661 );
5662
5663 assert_matches!(
5664 buffer2.read(cx).file().unwrap().disk_state(),
5665 DiskState::Present { .. }
5666 );
5667 assert_matches!(
5668 buffer3.read(cx).file().unwrap().disk_state(),
5669 DiskState::Present { .. }
5670 );
5671 assert_matches!(
5672 buffer4.read(cx).file().unwrap().disk_state(),
5673 DiskState::Present { .. }
5674 );
5675 assert_eq!(
5676 buffer5.read(cx).file().unwrap().disk_state(),
5677 DiskState::Deleted
5678 );
5679 });
5680
5681 // Update the remote worktree. Check that it becomes consistent with the
5682 // local worktree.
5683 cx.executor().run_until_parked();
5684
5685 remote.update(cx, |remote, _| {
5686 for update in updates.lock().drain(..) {
5687 remote.as_remote_mut().unwrap().update_from_remote(update);
5688 }
5689 });
5690 cx.executor().run_until_parked();
5691 remote.update(cx, |remote, _| {
5692 assert_eq!(
5693 remote.paths().collect::<Vec<_>>(),
5694 vec![
5695 rel_path("a"),
5696 rel_path("a/file1"),
5697 rel_path("a/file2.new"),
5698 rel_path("b"),
5699 rel_path("d"),
5700 rel_path("d/file3"),
5701 rel_path("d/file4"),
5702 ]
5703 );
5704 });
5705}
5706
5707#[cfg(target_os = "linux")]
5708#[gpui::test(retries = 5)]
5709async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5710 init_test(cx);
5711 cx.executor().allow_parking();
5712
5713 let dir = TempTree::new(json!({}));
5714 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5715 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5716
5717 tree.flush_fs_events(cx).await;
5718
5719 let repro_dir = dir.path().join("repro");
5720 std::fs::create_dir(&repro_dir).unwrap();
5721 tree.flush_fs_events(cx).await;
5722
5723 cx.update(|cx| {
5724 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5725 });
5726
5727 std::fs::remove_dir_all(&repro_dir).unwrap();
5728 tree.flush_fs_events(cx).await;
5729
5730 cx.update(|cx| {
5731 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5732 });
5733
5734 std::fs::create_dir(&repro_dir).unwrap();
5735 tree.flush_fs_events(cx).await;
5736
5737 cx.update(|cx| {
5738 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5739 });
5740
5741 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5742 tree.flush_fs_events(cx).await;
5743
5744 cx.update(|cx| {
5745 assert!(
5746 tree.read(cx)
5747 .entry_for_path(rel_path("repro/repro-marker"))
5748 .is_some()
5749 );
5750 });
5751}
5752
5753#[gpui::test(iterations = 10)]
5754async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5755 init_test(cx);
5756
5757 let fs = FakeFs::new(cx.executor());
5758 fs.insert_tree(
5759 path!("/dir"),
5760 json!({
5761 "a": {
5762 "file1": "",
5763 }
5764 }),
5765 )
5766 .await;
5767
5768 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5769 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5770 let tree_id = tree.update(cx, |tree, _| tree.id());
5771
5772 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5773 project.update(cx, |project, cx| {
5774 let tree = project.worktrees(cx).next().unwrap();
5775 tree.read(cx)
5776 .entry_for_path(rel_path(path))
5777 .unwrap_or_else(|| panic!("no entry for path {}", path))
5778 .id
5779 })
5780 };
5781
5782 let dir_id = id_for_path("a", cx);
5783 let file_id = id_for_path("a/file1", cx);
5784 let buffer = project
5785 .update(cx, |p, cx| {
5786 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5787 })
5788 .await
5789 .unwrap();
5790 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5791
5792 project
5793 .update(cx, |project, cx| {
5794 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5795 })
5796 .unwrap()
5797 .await
5798 .into_included()
5799 .unwrap();
5800 cx.executor().run_until_parked();
5801
5802 assert_eq!(id_for_path("b", cx), dir_id);
5803 assert_eq!(id_for_path("b/file1", cx), file_id);
5804 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5805}
5806
5807#[gpui::test]
5808async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5809 init_test(cx);
5810
5811 let fs = FakeFs::new(cx.executor());
5812 fs.insert_tree(
5813 "/dir",
5814 json!({
5815 "a.txt": "a-contents",
5816 "b.txt": "b-contents",
5817 }),
5818 )
5819 .await;
5820
5821 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5822
5823 // Spawn multiple tasks to open paths, repeating some paths.
5824 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5825 (
5826 p.open_local_buffer("/dir/a.txt", cx),
5827 p.open_local_buffer("/dir/b.txt", cx),
5828 p.open_local_buffer("/dir/a.txt", cx),
5829 )
5830 });
5831
5832 let buffer_a_1 = buffer_a_1.await.unwrap();
5833 let buffer_a_2 = buffer_a_2.await.unwrap();
5834 let buffer_b = buffer_b.await.unwrap();
5835 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5836 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5837
5838 // There is only one buffer per path.
5839 let buffer_a_id = buffer_a_1.entity_id();
5840 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5841
5842 // Open the same path again while it is still open.
5843 drop(buffer_a_1);
5844 let buffer_a_3 = project
5845 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5846 .await
5847 .unwrap();
5848
5849 // There's still only one buffer per path.
5850 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5851}
5852
5853#[gpui::test]
5854async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5855 init_test(cx);
5856
5857 let fs = FakeFs::new(cx.executor());
5858 fs.insert_tree(
5859 path!("/dir"),
5860 json!({
5861 "file1": "abc",
5862 "file2": "def",
5863 "file3": "ghi",
5864 }),
5865 )
5866 .await;
5867
5868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5869
5870 let buffer1 = project
5871 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5872 .await
5873 .unwrap();
5874 let events = Arc::new(Mutex::new(Vec::new()));
5875
5876 // initially, the buffer isn't dirty.
5877 buffer1.update(cx, |buffer, cx| {
5878 cx.subscribe(&buffer1, {
5879 let events = events.clone();
5880 move |_, _, event, _| match event {
5881 BufferEvent::Operation { .. } => {}
5882 _ => events.lock().push(event.clone()),
5883 }
5884 })
5885 .detach();
5886
5887 assert!(!buffer.is_dirty());
5888 assert!(events.lock().is_empty());
5889
5890 buffer.edit([(1..2, "")], None, cx);
5891 });
5892
5893 // after the first edit, the buffer is dirty, and emits a dirtied event.
5894 buffer1.update(cx, |buffer, cx| {
5895 assert!(buffer.text() == "ac");
5896 assert!(buffer.is_dirty());
5897 assert_eq!(
5898 *events.lock(),
5899 &[
5900 language::BufferEvent::Edited { is_local: true },
5901 language::BufferEvent::DirtyChanged
5902 ]
5903 );
5904 events.lock().clear();
5905 buffer.did_save(
5906 buffer.version(),
5907 buffer.file().unwrap().disk_state().mtime(),
5908 cx,
5909 );
5910 });
5911
5912 // after saving, the buffer is not dirty, and emits a saved event.
5913 buffer1.update(cx, |buffer, cx| {
5914 assert!(!buffer.is_dirty());
5915 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5916 events.lock().clear();
5917
5918 buffer.edit([(1..1, "B")], None, cx);
5919 buffer.edit([(2..2, "D")], None, cx);
5920 });
5921
5922 // after editing again, the buffer is dirty, and emits another dirty event.
5923 buffer1.update(cx, |buffer, cx| {
5924 assert!(buffer.text() == "aBDc");
5925 assert!(buffer.is_dirty());
5926 assert_eq!(
5927 *events.lock(),
5928 &[
5929 language::BufferEvent::Edited { is_local: true },
5930 language::BufferEvent::DirtyChanged,
5931 language::BufferEvent::Edited { is_local: true },
5932 ],
5933 );
5934 events.lock().clear();
5935
5936 // After restoring the buffer to its previously-saved state,
5937 // the buffer is not considered dirty anymore.
5938 buffer.edit([(1..3, "")], None, cx);
5939 assert!(buffer.text() == "ac");
5940 assert!(!buffer.is_dirty());
5941 });
5942
5943 assert_eq!(
5944 *events.lock(),
5945 &[
5946 language::BufferEvent::Edited { is_local: true },
5947 language::BufferEvent::DirtyChanged
5948 ]
5949 );
5950
5951 // When a file is deleted, it is not considered dirty.
5952 let events = Arc::new(Mutex::new(Vec::new()));
5953 let buffer2 = project
5954 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5955 .await
5956 .unwrap();
5957 buffer2.update(cx, |_, cx| {
5958 cx.subscribe(&buffer2, {
5959 let events = events.clone();
5960 move |_, _, event, _| match event {
5961 BufferEvent::Operation { .. } => {}
5962 _ => events.lock().push(event.clone()),
5963 }
5964 })
5965 .detach();
5966 });
5967
5968 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5969 .await
5970 .unwrap();
5971 cx.executor().run_until_parked();
5972 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5973 assert_eq!(
5974 mem::take(&mut *events.lock()),
5975 &[language::BufferEvent::FileHandleChanged]
5976 );
5977
5978 // Buffer becomes dirty when edited.
5979 buffer2.update(cx, |buffer, cx| {
5980 buffer.edit([(2..3, "")], None, cx);
5981 assert_eq!(buffer.is_dirty(), true);
5982 });
5983 assert_eq!(
5984 mem::take(&mut *events.lock()),
5985 &[
5986 language::BufferEvent::Edited { is_local: true },
5987 language::BufferEvent::DirtyChanged
5988 ]
5989 );
5990
5991 // Buffer becomes clean again when all of its content is removed, because
5992 // the file was deleted.
5993 buffer2.update(cx, |buffer, cx| {
5994 buffer.edit([(0..2, "")], None, cx);
5995 assert_eq!(buffer.is_empty(), true);
5996 assert_eq!(buffer.is_dirty(), false);
5997 });
5998 assert_eq!(
5999 *events.lock(),
6000 &[
6001 language::BufferEvent::Edited { is_local: true },
6002 language::BufferEvent::DirtyChanged
6003 ]
6004 );
6005
6006 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6007 let events = Arc::new(Mutex::new(Vec::new()));
6008 let buffer3 = project
6009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6010 .await
6011 .unwrap();
6012 buffer3.update(cx, |_, cx| {
6013 cx.subscribe(&buffer3, {
6014 let events = events.clone();
6015 move |_, _, event, _| match event {
6016 BufferEvent::Operation { .. } => {}
6017 _ => events.lock().push(event.clone()),
6018 }
6019 })
6020 .detach();
6021 });
6022
6023 buffer3.update(cx, |buffer, cx| {
6024 buffer.edit([(0..0, "x")], None, cx);
6025 });
6026 events.lock().clear();
6027 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6028 .await
6029 .unwrap();
6030 cx.executor().run_until_parked();
6031 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6032 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6033}
6034
6035#[gpui::test]
6036async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6037 init_test(cx);
6038
6039 let fs = FakeFs::new(cx.executor());
6040 fs.insert_tree(
6041 path!("/dir"),
6042 json!({
6043 "file.txt": "version 1",
6044 }),
6045 )
6046 .await;
6047
6048 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6049 let buffer = project
6050 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6051 .await
6052 .unwrap();
6053
6054 buffer.read_with(cx, |buffer, _| {
6055 assert_eq!(buffer.text(), "version 1");
6056 assert!(!buffer.is_dirty());
6057 });
6058
6059 // User makes an edit, making the buffer dirty.
6060 buffer.update(cx, |buffer, cx| {
6061 buffer.edit([(0..0, "user edit: ")], None, cx);
6062 });
6063
6064 buffer.read_with(cx, |buffer, _| {
6065 assert!(buffer.is_dirty());
6066 assert_eq!(buffer.text(), "user edit: version 1");
6067 });
6068
6069 // External tool writes new content while buffer is dirty.
6070 // file_updated() updates the File but suppresses ReloadNeeded.
6071 fs.save(
6072 path!("/dir/file.txt").as_ref(),
6073 &"version 2 from external tool".into(),
6074 Default::default(),
6075 )
6076 .await
6077 .unwrap();
6078 cx.executor().run_until_parked();
6079
6080 buffer.read_with(cx, |buffer, _| {
6081 assert!(buffer.has_conflict());
6082 assert_eq!(buffer.text(), "user edit: version 1");
6083 });
6084
6085 // User undoes their edit. Buffer becomes clean, but disk has different
6086 // content. did_edit() detects the dirty->clean transition and checks if
6087 // disk changed while dirty. Since mtime differs from saved_mtime, it
6088 // emits ReloadNeeded.
6089 buffer.update(cx, |buffer, cx| {
6090 buffer.undo(cx);
6091 });
6092 cx.executor().run_until_parked();
6093
6094 buffer.read_with(cx, |buffer, _| {
6095 assert_eq!(
6096 buffer.text(),
6097 "version 2 from external tool",
6098 "buffer should reload from disk after undo makes it clean"
6099 );
6100 assert!(!buffer.is_dirty());
6101 });
6102}
6103
6104#[gpui::test]
6105async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6106 init_test(cx);
6107
6108 let (initial_contents, initial_offsets) =
6109 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6110 let fs = FakeFs::new(cx.executor());
6111 fs.insert_tree(
6112 path!("/dir"),
6113 json!({
6114 "the-file": initial_contents,
6115 }),
6116 )
6117 .await;
6118 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6119 let buffer = project
6120 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6121 .await
6122 .unwrap();
6123
6124 let anchors = initial_offsets
6125 .iter()
6126 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6127 .collect::<Vec<_>>();
6128
6129 // Change the file on disk, adding two new lines of text, and removing
6130 // one line.
6131 buffer.update(cx, |buffer, _| {
6132 assert!(!buffer.is_dirty());
6133 assert!(!buffer.has_conflict());
6134 });
6135
6136 let (new_contents, new_offsets) =
6137 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6138 fs.save(
6139 path!("/dir/the-file").as_ref(),
6140 &new_contents.as_str().into(),
6141 LineEnding::Unix,
6142 )
6143 .await
6144 .unwrap();
6145
6146 // Because the buffer was not modified, it is reloaded from disk. Its
6147 // contents are edited according to the diff between the old and new
6148 // file contents.
6149 cx.executor().run_until_parked();
6150 buffer.update(cx, |buffer, _| {
6151 assert_eq!(buffer.text(), new_contents);
6152 assert!(!buffer.is_dirty());
6153 assert!(!buffer.has_conflict());
6154
6155 let anchor_offsets = anchors
6156 .iter()
6157 .map(|anchor| anchor.to_offset(&*buffer))
6158 .collect::<Vec<_>>();
6159 assert_eq!(anchor_offsets, new_offsets);
6160 });
6161
6162 // Modify the buffer
6163 buffer.update(cx, |buffer, cx| {
6164 buffer.edit([(0..0, " ")], None, cx);
6165 assert!(buffer.is_dirty());
6166 assert!(!buffer.has_conflict());
6167 });
6168
6169 // Change the file on disk again, adding blank lines to the beginning.
6170 fs.save(
6171 path!("/dir/the-file").as_ref(),
6172 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6173 LineEnding::Unix,
6174 )
6175 .await
6176 .unwrap();
6177
6178 // Because the buffer is modified, it doesn't reload from disk, but is
6179 // marked as having a conflict.
6180 cx.executor().run_until_parked();
6181 buffer.update(cx, |buffer, _| {
6182 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6183 assert!(buffer.has_conflict());
6184 });
6185}
6186
6187#[gpui::test]
6188async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6189 init_test(cx);
6190
6191 let fs = FakeFs::new(cx.executor());
6192 fs.insert_tree(
6193 path!("/dir"),
6194 json!({
6195 "file1": "a\nb\nc\n",
6196 "file2": "one\r\ntwo\r\nthree\r\n",
6197 }),
6198 )
6199 .await;
6200
6201 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6202 let buffer1 = project
6203 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6204 .await
6205 .unwrap();
6206 let buffer2 = project
6207 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6208 .await
6209 .unwrap();
6210
6211 buffer1.update(cx, |buffer, _| {
6212 assert_eq!(buffer.text(), "a\nb\nc\n");
6213 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6214 });
6215 buffer2.update(cx, |buffer, _| {
6216 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6217 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6218 });
6219
6220 // Change a file's line endings on disk from unix to windows. The buffer's
6221 // state updates correctly.
6222 fs.save(
6223 path!("/dir/file1").as_ref(),
6224 &"aaa\nb\nc\n".into(),
6225 LineEnding::Windows,
6226 )
6227 .await
6228 .unwrap();
6229 cx.executor().run_until_parked();
6230 buffer1.update(cx, |buffer, _| {
6231 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6232 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6233 });
6234
6235 // Save a file with windows line endings. The file is written correctly.
6236 buffer2.update(cx, |buffer, cx| {
6237 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6238 });
6239 project
6240 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6241 .await
6242 .unwrap();
6243 assert_eq!(
6244 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6245 "one\r\ntwo\r\nthree\r\nfour\r\n",
6246 );
6247}
6248
6249#[gpui::test]
6250async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6251 init_test(cx);
6252
6253 let fs = FakeFs::new(cx.executor());
6254 fs.insert_tree(
6255 path!("/dir"),
6256 json!({
6257 "a.rs": "
6258 fn foo(mut v: Vec<usize>) {
6259 for x in &v {
6260 v.push(1);
6261 }
6262 }
6263 "
6264 .unindent(),
6265 }),
6266 )
6267 .await;
6268
6269 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6270 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6271 let buffer = project
6272 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6273 .await
6274 .unwrap();
6275
6276 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6277 let message = lsp::PublishDiagnosticsParams {
6278 uri: buffer_uri.clone(),
6279 diagnostics: vec![
6280 lsp::Diagnostic {
6281 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6282 severity: Some(DiagnosticSeverity::WARNING),
6283 message: "error 1".to_string(),
6284 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6285 location: lsp::Location {
6286 uri: buffer_uri.clone(),
6287 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6288 },
6289 message: "error 1 hint 1".to_string(),
6290 }]),
6291 ..Default::default()
6292 },
6293 lsp::Diagnostic {
6294 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6295 severity: Some(DiagnosticSeverity::HINT),
6296 message: "error 1 hint 1".to_string(),
6297 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6298 location: lsp::Location {
6299 uri: buffer_uri.clone(),
6300 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6301 },
6302 message: "original diagnostic".to_string(),
6303 }]),
6304 ..Default::default()
6305 },
6306 lsp::Diagnostic {
6307 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6308 severity: Some(DiagnosticSeverity::ERROR),
6309 message: "error 2".to_string(),
6310 related_information: Some(vec![
6311 lsp::DiagnosticRelatedInformation {
6312 location: lsp::Location {
6313 uri: buffer_uri.clone(),
6314 range: lsp::Range::new(
6315 lsp::Position::new(1, 13),
6316 lsp::Position::new(1, 15),
6317 ),
6318 },
6319 message: "error 2 hint 1".to_string(),
6320 },
6321 lsp::DiagnosticRelatedInformation {
6322 location: lsp::Location {
6323 uri: buffer_uri.clone(),
6324 range: lsp::Range::new(
6325 lsp::Position::new(1, 13),
6326 lsp::Position::new(1, 15),
6327 ),
6328 },
6329 message: "error 2 hint 2".to_string(),
6330 },
6331 ]),
6332 ..Default::default()
6333 },
6334 lsp::Diagnostic {
6335 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6336 severity: Some(DiagnosticSeverity::HINT),
6337 message: "error 2 hint 1".to_string(),
6338 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6339 location: lsp::Location {
6340 uri: buffer_uri.clone(),
6341 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6342 },
6343 message: "original diagnostic".to_string(),
6344 }]),
6345 ..Default::default()
6346 },
6347 lsp::Diagnostic {
6348 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6349 severity: Some(DiagnosticSeverity::HINT),
6350 message: "error 2 hint 2".to_string(),
6351 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6352 location: lsp::Location {
6353 uri: buffer_uri,
6354 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6355 },
6356 message: "original diagnostic".to_string(),
6357 }]),
6358 ..Default::default()
6359 },
6360 ],
6361 version: None,
6362 };
6363
6364 lsp_store
6365 .update(cx, |lsp_store, cx| {
6366 lsp_store.update_diagnostics(
6367 LanguageServerId(0),
6368 message,
6369 None,
6370 DiagnosticSourceKind::Pushed,
6371 &[],
6372 cx,
6373 )
6374 })
6375 .unwrap();
6376 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6377
6378 assert_eq!(
6379 buffer
6380 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6381 .collect::<Vec<_>>(),
6382 &[
6383 DiagnosticEntry {
6384 range: Point::new(1, 8)..Point::new(1, 9),
6385 diagnostic: Diagnostic {
6386 severity: DiagnosticSeverity::WARNING,
6387 message: "error 1".to_string(),
6388 group_id: 1,
6389 is_primary: true,
6390 source_kind: DiagnosticSourceKind::Pushed,
6391 ..Diagnostic::default()
6392 }
6393 },
6394 DiagnosticEntry {
6395 range: Point::new(1, 8)..Point::new(1, 9),
6396 diagnostic: Diagnostic {
6397 severity: DiagnosticSeverity::HINT,
6398 message: "error 1 hint 1".to_string(),
6399 group_id: 1,
6400 is_primary: false,
6401 source_kind: DiagnosticSourceKind::Pushed,
6402 ..Diagnostic::default()
6403 }
6404 },
6405 DiagnosticEntry {
6406 range: Point::new(1, 13)..Point::new(1, 15),
6407 diagnostic: Diagnostic {
6408 severity: DiagnosticSeverity::HINT,
6409 message: "error 2 hint 1".to_string(),
6410 group_id: 0,
6411 is_primary: false,
6412 source_kind: DiagnosticSourceKind::Pushed,
6413 ..Diagnostic::default()
6414 }
6415 },
6416 DiagnosticEntry {
6417 range: Point::new(1, 13)..Point::new(1, 15),
6418 diagnostic: Diagnostic {
6419 severity: DiagnosticSeverity::HINT,
6420 message: "error 2 hint 2".to_string(),
6421 group_id: 0,
6422 is_primary: false,
6423 source_kind: DiagnosticSourceKind::Pushed,
6424 ..Diagnostic::default()
6425 }
6426 },
6427 DiagnosticEntry {
6428 range: Point::new(2, 8)..Point::new(2, 17),
6429 diagnostic: Diagnostic {
6430 severity: DiagnosticSeverity::ERROR,
6431 message: "error 2".to_string(),
6432 group_id: 0,
6433 is_primary: true,
6434 source_kind: DiagnosticSourceKind::Pushed,
6435 ..Diagnostic::default()
6436 }
6437 }
6438 ]
6439 );
6440
6441 assert_eq!(
6442 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6443 &[
6444 DiagnosticEntry {
6445 range: Point::new(1, 13)..Point::new(1, 15),
6446 diagnostic: Diagnostic {
6447 severity: DiagnosticSeverity::HINT,
6448 message: "error 2 hint 1".to_string(),
6449 group_id: 0,
6450 is_primary: false,
6451 source_kind: DiagnosticSourceKind::Pushed,
6452 ..Diagnostic::default()
6453 }
6454 },
6455 DiagnosticEntry {
6456 range: Point::new(1, 13)..Point::new(1, 15),
6457 diagnostic: Diagnostic {
6458 severity: DiagnosticSeverity::HINT,
6459 message: "error 2 hint 2".to_string(),
6460 group_id: 0,
6461 is_primary: false,
6462 source_kind: DiagnosticSourceKind::Pushed,
6463 ..Diagnostic::default()
6464 }
6465 },
6466 DiagnosticEntry {
6467 range: Point::new(2, 8)..Point::new(2, 17),
6468 diagnostic: Diagnostic {
6469 severity: DiagnosticSeverity::ERROR,
6470 message: "error 2".to_string(),
6471 group_id: 0,
6472 is_primary: true,
6473 source_kind: DiagnosticSourceKind::Pushed,
6474 ..Diagnostic::default()
6475 }
6476 }
6477 ]
6478 );
6479
6480 assert_eq!(
6481 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6482 &[
6483 DiagnosticEntry {
6484 range: Point::new(1, 8)..Point::new(1, 9),
6485 diagnostic: Diagnostic {
6486 severity: DiagnosticSeverity::WARNING,
6487 message: "error 1".to_string(),
6488 group_id: 1,
6489 is_primary: true,
6490 source_kind: DiagnosticSourceKind::Pushed,
6491 ..Diagnostic::default()
6492 }
6493 },
6494 DiagnosticEntry {
6495 range: Point::new(1, 8)..Point::new(1, 9),
6496 diagnostic: Diagnostic {
6497 severity: DiagnosticSeverity::HINT,
6498 message: "error 1 hint 1".to_string(),
6499 group_id: 1,
6500 is_primary: false,
6501 source_kind: DiagnosticSourceKind::Pushed,
6502 ..Diagnostic::default()
6503 }
6504 },
6505 ]
6506 );
6507}
6508
6509#[gpui::test]
6510async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6511 init_test(cx);
6512
6513 let fs = FakeFs::new(cx.executor());
6514 fs.insert_tree(
6515 path!("/dir"),
6516 json!({
6517 "one.rs": "const ONE: usize = 1;",
6518 "two": {
6519 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6520 }
6521
6522 }),
6523 )
6524 .await;
6525 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6526
6527 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6528 language_registry.add(rust_lang());
6529 let watched_paths = lsp::FileOperationRegistrationOptions {
6530 filters: vec![
6531 FileOperationFilter {
6532 scheme: Some("file".to_owned()),
6533 pattern: lsp::FileOperationPattern {
6534 glob: "**/*.rs".to_owned(),
6535 matches: Some(lsp::FileOperationPatternKind::File),
6536 options: None,
6537 },
6538 },
6539 FileOperationFilter {
6540 scheme: Some("file".to_owned()),
6541 pattern: lsp::FileOperationPattern {
6542 glob: "**/**".to_owned(),
6543 matches: Some(lsp::FileOperationPatternKind::Folder),
6544 options: None,
6545 },
6546 },
6547 ],
6548 };
6549 let mut fake_servers = language_registry.register_fake_lsp(
6550 "Rust",
6551 FakeLspAdapter {
6552 capabilities: lsp::ServerCapabilities {
6553 workspace: Some(lsp::WorkspaceServerCapabilities {
6554 workspace_folders: None,
6555 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6556 did_rename: Some(watched_paths.clone()),
6557 will_rename: Some(watched_paths),
6558 ..Default::default()
6559 }),
6560 }),
6561 ..Default::default()
6562 },
6563 ..Default::default()
6564 },
6565 );
6566
6567 let _ = project
6568 .update(cx, |project, cx| {
6569 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6570 })
6571 .await
6572 .unwrap();
6573
6574 let fake_server = fake_servers.next().await.unwrap();
6575 cx.executor().run_until_parked();
6576 let response = project.update(cx, |project, cx| {
6577 let worktree = project.worktrees(cx).next().unwrap();
6578 let entry = worktree
6579 .read(cx)
6580 .entry_for_path(rel_path("one.rs"))
6581 .unwrap();
6582 project.rename_entry(
6583 entry.id,
6584 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6585 cx,
6586 )
6587 });
6588 let expected_edit = lsp::WorkspaceEdit {
6589 changes: None,
6590 document_changes: Some(DocumentChanges::Edits({
6591 vec![TextDocumentEdit {
6592 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6593 range: lsp::Range {
6594 start: lsp::Position {
6595 line: 0,
6596 character: 1,
6597 },
6598 end: lsp::Position {
6599 line: 0,
6600 character: 3,
6601 },
6602 },
6603 new_text: "This is not a drill".to_owned(),
6604 })],
6605 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6606 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6607 version: Some(1337),
6608 },
6609 }]
6610 })),
6611 change_annotations: None,
6612 };
6613 let resolved_workspace_edit = Arc::new(OnceLock::new());
6614 fake_server
6615 .set_request_handler::<WillRenameFiles, _, _>({
6616 let resolved_workspace_edit = resolved_workspace_edit.clone();
6617 let expected_edit = expected_edit.clone();
6618 move |params, _| {
6619 let resolved_workspace_edit = resolved_workspace_edit.clone();
6620 let expected_edit = expected_edit.clone();
6621 async move {
6622 assert_eq!(params.files.len(), 1);
6623 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6624 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6625 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6626 Ok(Some(expected_edit))
6627 }
6628 }
6629 })
6630 .next()
6631 .await
6632 .unwrap();
6633 let _ = response.await.unwrap();
6634 fake_server
6635 .handle_notification::<DidRenameFiles, _>(|params, _| {
6636 assert_eq!(params.files.len(), 1);
6637 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6638 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6639 })
6640 .next()
6641 .await
6642 .unwrap();
6643 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6644}
6645
6646#[gpui::test]
6647async fn test_rename(cx: &mut gpui::TestAppContext) {
6648 // hi
6649 init_test(cx);
6650
6651 let fs = FakeFs::new(cx.executor());
6652 fs.insert_tree(
6653 path!("/dir"),
6654 json!({
6655 "one.rs": "const ONE: usize = 1;",
6656 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6657 }),
6658 )
6659 .await;
6660
6661 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6662
6663 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6664 language_registry.add(rust_lang());
6665 let mut fake_servers = language_registry.register_fake_lsp(
6666 "Rust",
6667 FakeLspAdapter {
6668 capabilities: lsp::ServerCapabilities {
6669 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6670 prepare_provider: Some(true),
6671 work_done_progress_options: Default::default(),
6672 })),
6673 ..Default::default()
6674 },
6675 ..Default::default()
6676 },
6677 );
6678
6679 let (buffer, _handle) = project
6680 .update(cx, |project, cx| {
6681 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6682 })
6683 .await
6684 .unwrap();
6685
6686 let fake_server = fake_servers.next().await.unwrap();
6687 cx.executor().run_until_parked();
6688
6689 let response = project.update(cx, |project, cx| {
6690 project.prepare_rename(buffer.clone(), 7, cx)
6691 });
6692 fake_server
6693 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6694 assert_eq!(
6695 params.text_document.uri.as_str(),
6696 uri!("file:///dir/one.rs")
6697 );
6698 assert_eq!(params.position, lsp::Position::new(0, 7));
6699 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6700 lsp::Position::new(0, 6),
6701 lsp::Position::new(0, 9),
6702 ))))
6703 })
6704 .next()
6705 .await
6706 .unwrap();
6707 let response = response.await.unwrap();
6708 let PrepareRenameResponse::Success(range) = response else {
6709 panic!("{:?}", response);
6710 };
6711 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6712 assert_eq!(range, 6..9);
6713
6714 let response = project.update(cx, |project, cx| {
6715 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6716 });
6717 fake_server
6718 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6719 assert_eq!(
6720 params.text_document_position.text_document.uri.as_str(),
6721 uri!("file:///dir/one.rs")
6722 );
6723 assert_eq!(
6724 params.text_document_position.position,
6725 lsp::Position::new(0, 7)
6726 );
6727 assert_eq!(params.new_name, "THREE");
6728 Ok(Some(lsp::WorkspaceEdit {
6729 changes: Some(
6730 [
6731 (
6732 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6733 vec![lsp::TextEdit::new(
6734 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6735 "THREE".to_string(),
6736 )],
6737 ),
6738 (
6739 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6740 vec![
6741 lsp::TextEdit::new(
6742 lsp::Range::new(
6743 lsp::Position::new(0, 24),
6744 lsp::Position::new(0, 27),
6745 ),
6746 "THREE".to_string(),
6747 ),
6748 lsp::TextEdit::new(
6749 lsp::Range::new(
6750 lsp::Position::new(0, 35),
6751 lsp::Position::new(0, 38),
6752 ),
6753 "THREE".to_string(),
6754 ),
6755 ],
6756 ),
6757 ]
6758 .into_iter()
6759 .collect(),
6760 ),
6761 ..Default::default()
6762 }))
6763 })
6764 .next()
6765 .await
6766 .unwrap();
6767 let mut transaction = response.await.unwrap().0;
6768 assert_eq!(transaction.len(), 2);
6769 assert_eq!(
6770 transaction
6771 .remove_entry(&buffer)
6772 .unwrap()
6773 .0
6774 .update(cx, |buffer, _| buffer.text()),
6775 "const THREE: usize = 1;"
6776 );
6777 assert_eq!(
6778 transaction
6779 .into_keys()
6780 .next()
6781 .unwrap()
6782 .update(cx, |buffer, _| buffer.text()),
6783 "const TWO: usize = one::THREE + one::THREE;"
6784 );
6785}
6786
6787#[gpui::test]
6788async fn test_search(cx: &mut gpui::TestAppContext) {
6789 init_test(cx);
6790
6791 let fs = FakeFs::new(cx.executor());
6792 fs.insert_tree(
6793 path!("/dir"),
6794 json!({
6795 "one.rs": "const ONE: usize = 1;",
6796 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6797 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6798 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6799 }),
6800 )
6801 .await;
6802 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6803 assert_eq!(
6804 search(
6805 &project,
6806 SearchQuery::text(
6807 "TWO",
6808 false,
6809 true,
6810 false,
6811 Default::default(),
6812 Default::default(),
6813 false,
6814 None
6815 )
6816 .unwrap(),
6817 cx
6818 )
6819 .await
6820 .unwrap(),
6821 HashMap::from_iter([
6822 (path!("dir/two.rs").to_string(), vec![6..9]),
6823 (path!("dir/three.rs").to_string(), vec![37..40])
6824 ])
6825 );
6826
6827 let buffer_4 = project
6828 .update(cx, |project, cx| {
6829 project.open_local_buffer(path!("/dir/four.rs"), cx)
6830 })
6831 .await
6832 .unwrap();
6833 buffer_4.update(cx, |buffer, cx| {
6834 let text = "two::TWO";
6835 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6836 });
6837
6838 assert_eq!(
6839 search(
6840 &project,
6841 SearchQuery::text(
6842 "TWO",
6843 false,
6844 true,
6845 false,
6846 Default::default(),
6847 Default::default(),
6848 false,
6849 None,
6850 )
6851 .unwrap(),
6852 cx
6853 )
6854 .await
6855 .unwrap(),
6856 HashMap::from_iter([
6857 (path!("dir/two.rs").to_string(), vec![6..9]),
6858 (path!("dir/three.rs").to_string(), vec![37..40]),
6859 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6860 ])
6861 );
6862}
6863
6864#[gpui::test]
6865async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6866 init_test(cx);
6867
6868 let search_query = "file";
6869
6870 let fs = FakeFs::new(cx.executor());
6871 fs.insert_tree(
6872 path!("/dir"),
6873 json!({
6874 "one.rs": r#"// Rust file one"#,
6875 "one.ts": r#"// TypeScript file one"#,
6876 "two.rs": r#"// Rust file two"#,
6877 "two.ts": r#"// TypeScript file two"#,
6878 }),
6879 )
6880 .await;
6881 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6882
6883 assert!(
6884 search(
6885 &project,
6886 SearchQuery::text(
6887 search_query,
6888 false,
6889 true,
6890 false,
6891 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6892 Default::default(),
6893 false,
6894 None
6895 )
6896 .unwrap(),
6897 cx
6898 )
6899 .await
6900 .unwrap()
6901 .is_empty(),
6902 "If no inclusions match, no files should be returned"
6903 );
6904
6905 assert_eq!(
6906 search(
6907 &project,
6908 SearchQuery::text(
6909 search_query,
6910 false,
6911 true,
6912 false,
6913 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6914 Default::default(),
6915 false,
6916 None
6917 )
6918 .unwrap(),
6919 cx
6920 )
6921 .await
6922 .unwrap(),
6923 HashMap::from_iter([
6924 (path!("dir/one.rs").to_string(), vec![8..12]),
6925 (path!("dir/two.rs").to_string(), vec![8..12]),
6926 ]),
6927 "Rust only search should give only Rust files"
6928 );
6929
6930 assert_eq!(
6931 search(
6932 &project,
6933 SearchQuery::text(
6934 search_query,
6935 false,
6936 true,
6937 false,
6938 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6939 .unwrap(),
6940 Default::default(),
6941 false,
6942 None,
6943 )
6944 .unwrap(),
6945 cx
6946 )
6947 .await
6948 .unwrap(),
6949 HashMap::from_iter([
6950 (path!("dir/one.ts").to_string(), vec![14..18]),
6951 (path!("dir/two.ts").to_string(), vec![14..18]),
6952 ]),
6953 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6954 );
6955
6956 assert_eq!(
6957 search(
6958 &project,
6959 SearchQuery::text(
6960 search_query,
6961 false,
6962 true,
6963 false,
6964 PathMatcher::new(
6965 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6966 PathStyle::local()
6967 )
6968 .unwrap(),
6969 Default::default(),
6970 false,
6971 None,
6972 )
6973 .unwrap(),
6974 cx
6975 )
6976 .await
6977 .unwrap(),
6978 HashMap::from_iter([
6979 (path!("dir/two.ts").to_string(), vec![14..18]),
6980 (path!("dir/one.rs").to_string(), vec![8..12]),
6981 (path!("dir/one.ts").to_string(), vec![14..18]),
6982 (path!("dir/two.rs").to_string(), vec![8..12]),
6983 ]),
6984 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6985 );
6986}
6987
6988#[gpui::test]
6989async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6990 init_test(cx);
6991
6992 let search_query = "file";
6993
6994 let fs = FakeFs::new(cx.executor());
6995 fs.insert_tree(
6996 path!("/dir"),
6997 json!({
6998 "one.rs": r#"// Rust file one"#,
6999 "one.ts": r#"// TypeScript file one"#,
7000 "two.rs": r#"// Rust file two"#,
7001 "two.ts": r#"// TypeScript file two"#,
7002 }),
7003 )
7004 .await;
7005 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7006
7007 assert_eq!(
7008 search(
7009 &project,
7010 SearchQuery::text(
7011 search_query,
7012 false,
7013 true,
7014 false,
7015 Default::default(),
7016 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7017 false,
7018 None,
7019 )
7020 .unwrap(),
7021 cx
7022 )
7023 .await
7024 .unwrap(),
7025 HashMap::from_iter([
7026 (path!("dir/one.rs").to_string(), vec![8..12]),
7027 (path!("dir/one.ts").to_string(), vec![14..18]),
7028 (path!("dir/two.rs").to_string(), vec![8..12]),
7029 (path!("dir/two.ts").to_string(), vec![14..18]),
7030 ]),
7031 "If no exclusions match, all files should be returned"
7032 );
7033
7034 assert_eq!(
7035 search(
7036 &project,
7037 SearchQuery::text(
7038 search_query,
7039 false,
7040 true,
7041 false,
7042 Default::default(),
7043 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7044 false,
7045 None,
7046 )
7047 .unwrap(),
7048 cx
7049 )
7050 .await
7051 .unwrap(),
7052 HashMap::from_iter([
7053 (path!("dir/one.ts").to_string(), vec![14..18]),
7054 (path!("dir/two.ts").to_string(), vec![14..18]),
7055 ]),
7056 "Rust exclusion search should give only TypeScript files"
7057 );
7058
7059 assert_eq!(
7060 search(
7061 &project,
7062 SearchQuery::text(
7063 search_query,
7064 false,
7065 true,
7066 false,
7067 Default::default(),
7068 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7069 .unwrap(),
7070 false,
7071 None,
7072 )
7073 .unwrap(),
7074 cx
7075 )
7076 .await
7077 .unwrap(),
7078 HashMap::from_iter([
7079 (path!("dir/one.rs").to_string(), vec![8..12]),
7080 (path!("dir/two.rs").to_string(), vec![8..12]),
7081 ]),
7082 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7083 );
7084
7085 assert!(
7086 search(
7087 &project,
7088 SearchQuery::text(
7089 search_query,
7090 false,
7091 true,
7092 false,
7093 Default::default(),
7094 PathMatcher::new(
7095 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7096 PathStyle::local(),
7097 )
7098 .unwrap(),
7099 false,
7100 None,
7101 )
7102 .unwrap(),
7103 cx
7104 )
7105 .await
7106 .unwrap()
7107 .is_empty(),
7108 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7109 );
7110}
7111
7112#[gpui::test]
7113async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7114 init_test(cx);
7115
7116 let search_query = "file";
7117
7118 let fs = FakeFs::new(cx.executor());
7119 fs.insert_tree(
7120 path!("/dir"),
7121 json!({
7122 "one.rs": r#"// Rust file one"#,
7123 "one.ts": r#"// TypeScript file one"#,
7124 "two.rs": r#"// Rust file two"#,
7125 "two.ts": r#"// TypeScript file two"#,
7126 }),
7127 )
7128 .await;
7129
7130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7131 let path_style = PathStyle::local();
7132 let _buffer = project.update(cx, |project, cx| {
7133 project.create_local_buffer("file", None, false, cx)
7134 });
7135
7136 assert_eq!(
7137 search(
7138 &project,
7139 SearchQuery::text(
7140 search_query,
7141 false,
7142 true,
7143 false,
7144 Default::default(),
7145 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7146 false,
7147 None,
7148 )
7149 .unwrap(),
7150 cx
7151 )
7152 .await
7153 .unwrap(),
7154 HashMap::from_iter([
7155 (path!("dir/one.rs").to_string(), vec![8..12]),
7156 (path!("dir/one.ts").to_string(), vec![14..18]),
7157 (path!("dir/two.rs").to_string(), vec![8..12]),
7158 (path!("dir/two.ts").to_string(), vec![14..18]),
7159 ]),
7160 "If no exclusions match, all files should be returned"
7161 );
7162
7163 assert_eq!(
7164 search(
7165 &project,
7166 SearchQuery::text(
7167 search_query,
7168 false,
7169 true,
7170 false,
7171 Default::default(),
7172 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7173 false,
7174 None,
7175 )
7176 .unwrap(),
7177 cx
7178 )
7179 .await
7180 .unwrap(),
7181 HashMap::from_iter([
7182 (path!("dir/one.ts").to_string(), vec![14..18]),
7183 (path!("dir/two.ts").to_string(), vec![14..18]),
7184 ]),
7185 "Rust exclusion search should give only TypeScript files"
7186 );
7187
7188 assert_eq!(
7189 search(
7190 &project,
7191 SearchQuery::text(
7192 search_query,
7193 false,
7194 true,
7195 false,
7196 Default::default(),
7197 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7198 false,
7199 None,
7200 )
7201 .unwrap(),
7202 cx
7203 )
7204 .await
7205 .unwrap(),
7206 HashMap::from_iter([
7207 (path!("dir/one.rs").to_string(), vec![8..12]),
7208 (path!("dir/two.rs").to_string(), vec![8..12]),
7209 ]),
7210 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7211 );
7212
7213 assert!(
7214 search(
7215 &project,
7216 SearchQuery::text(
7217 search_query,
7218 false,
7219 true,
7220 false,
7221 Default::default(),
7222 PathMatcher::new(
7223 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7224 PathStyle::local(),
7225 )
7226 .unwrap(),
7227 false,
7228 None,
7229 )
7230 .unwrap(),
7231 cx
7232 )
7233 .await
7234 .unwrap()
7235 .is_empty(),
7236 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7237 );
7238}
7239
7240#[gpui::test]
7241async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7242 init_test(cx);
7243
7244 let search_query = "file";
7245
7246 let fs = FakeFs::new(cx.executor());
7247 fs.insert_tree(
7248 path!("/dir"),
7249 json!({
7250 "one.rs": r#"// Rust file one"#,
7251 "one.ts": r#"// TypeScript file one"#,
7252 "two.rs": r#"// Rust file two"#,
7253 "two.ts": r#"// TypeScript file two"#,
7254 }),
7255 )
7256 .await;
7257 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7258 assert!(
7259 search(
7260 &project,
7261 SearchQuery::text(
7262 search_query,
7263 false,
7264 true,
7265 false,
7266 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7267 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7268 false,
7269 None,
7270 )
7271 .unwrap(),
7272 cx
7273 )
7274 .await
7275 .unwrap()
7276 .is_empty(),
7277 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7278 );
7279
7280 assert!(
7281 search(
7282 &project,
7283 SearchQuery::text(
7284 search_query,
7285 false,
7286 true,
7287 false,
7288 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7289 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7290 false,
7291 None,
7292 )
7293 .unwrap(),
7294 cx
7295 )
7296 .await
7297 .unwrap()
7298 .is_empty(),
7299 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7300 );
7301
7302 assert!(
7303 search(
7304 &project,
7305 SearchQuery::text(
7306 search_query,
7307 false,
7308 true,
7309 false,
7310 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7311 .unwrap(),
7312 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7313 .unwrap(),
7314 false,
7315 None,
7316 )
7317 .unwrap(),
7318 cx
7319 )
7320 .await
7321 .unwrap()
7322 .is_empty(),
7323 "Non-matching inclusions and exclusions should not change that."
7324 );
7325
7326 assert_eq!(
7327 search(
7328 &project,
7329 SearchQuery::text(
7330 search_query,
7331 false,
7332 true,
7333 false,
7334 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7335 .unwrap(),
7336 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7337 .unwrap(),
7338 false,
7339 None,
7340 )
7341 .unwrap(),
7342 cx
7343 )
7344 .await
7345 .unwrap(),
7346 HashMap::from_iter([
7347 (path!("dir/one.ts").to_string(), vec![14..18]),
7348 (path!("dir/two.ts").to_string(), vec![14..18]),
7349 ]),
7350 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7351 );
7352}
7353
7354#[gpui::test]
7355async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7356 init_test(cx);
7357
7358 let fs = FakeFs::new(cx.executor());
7359 fs.insert_tree(
7360 path!("/worktree-a"),
7361 json!({
7362 "haystack.rs": r#"// NEEDLE"#,
7363 "haystack.ts": r#"// NEEDLE"#,
7364 }),
7365 )
7366 .await;
7367 fs.insert_tree(
7368 path!("/worktree-b"),
7369 json!({
7370 "haystack.rs": r#"// NEEDLE"#,
7371 "haystack.ts": r#"// NEEDLE"#,
7372 }),
7373 )
7374 .await;
7375
7376 let path_style = PathStyle::local();
7377 let project = Project::test(
7378 fs.clone(),
7379 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7380 cx,
7381 )
7382 .await;
7383
7384 assert_eq!(
7385 search(
7386 &project,
7387 SearchQuery::text(
7388 "NEEDLE",
7389 false,
7390 true,
7391 false,
7392 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7393 Default::default(),
7394 true,
7395 None,
7396 )
7397 .unwrap(),
7398 cx
7399 )
7400 .await
7401 .unwrap(),
7402 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7403 "should only return results from included worktree"
7404 );
7405 assert_eq!(
7406 search(
7407 &project,
7408 SearchQuery::text(
7409 "NEEDLE",
7410 false,
7411 true,
7412 false,
7413 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7414 Default::default(),
7415 true,
7416 None,
7417 )
7418 .unwrap(),
7419 cx
7420 )
7421 .await
7422 .unwrap(),
7423 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7424 "should only return results from included worktree"
7425 );
7426
7427 assert_eq!(
7428 search(
7429 &project,
7430 SearchQuery::text(
7431 "NEEDLE",
7432 false,
7433 true,
7434 false,
7435 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7436 Default::default(),
7437 false,
7438 None,
7439 )
7440 .unwrap(),
7441 cx
7442 )
7443 .await
7444 .unwrap(),
7445 HashMap::from_iter([
7446 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7447 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7448 ]),
7449 "should return results from both worktrees"
7450 );
7451}
7452
7453#[gpui::test]
7454async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7455 init_test(cx);
7456
7457 let fs = FakeFs::new(cx.background_executor.clone());
7458 fs.insert_tree(
7459 path!("/dir"),
7460 json!({
7461 ".git": {},
7462 ".gitignore": "**/target\n/node_modules\n",
7463 "target": {
7464 "index.txt": "index_key:index_value"
7465 },
7466 "node_modules": {
7467 "eslint": {
7468 "index.ts": "const eslint_key = 'eslint value'",
7469 "package.json": r#"{ "some_key": "some value" }"#,
7470 },
7471 "prettier": {
7472 "index.ts": "const prettier_key = 'prettier value'",
7473 "package.json": r#"{ "other_key": "other value" }"#,
7474 },
7475 },
7476 "package.json": r#"{ "main_key": "main value" }"#,
7477 }),
7478 )
7479 .await;
7480 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7481
7482 let query = "key";
7483 assert_eq!(
7484 search(
7485 &project,
7486 SearchQuery::text(
7487 query,
7488 false,
7489 false,
7490 false,
7491 Default::default(),
7492 Default::default(),
7493 false,
7494 None,
7495 )
7496 .unwrap(),
7497 cx
7498 )
7499 .await
7500 .unwrap(),
7501 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7502 "Only one non-ignored file should have the query"
7503 );
7504
7505 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7506 let path_style = PathStyle::local();
7507 assert_eq!(
7508 search(
7509 &project,
7510 SearchQuery::text(
7511 query,
7512 false,
7513 false,
7514 true,
7515 Default::default(),
7516 Default::default(),
7517 false,
7518 None,
7519 )
7520 .unwrap(),
7521 cx
7522 )
7523 .await
7524 .unwrap(),
7525 HashMap::from_iter([
7526 (path!("dir/package.json").to_string(), vec![8..11]),
7527 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7528 (
7529 path!("dir/node_modules/prettier/package.json").to_string(),
7530 vec![9..12]
7531 ),
7532 (
7533 path!("dir/node_modules/prettier/index.ts").to_string(),
7534 vec![15..18]
7535 ),
7536 (
7537 path!("dir/node_modules/eslint/index.ts").to_string(),
7538 vec![13..16]
7539 ),
7540 (
7541 path!("dir/node_modules/eslint/package.json").to_string(),
7542 vec![8..11]
7543 ),
7544 ]),
7545 "Unrestricted search with ignored directories should find every file with the query"
7546 );
7547
7548 let files_to_include =
7549 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7550 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7551 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7552 assert_eq!(
7553 search(
7554 &project,
7555 SearchQuery::text(
7556 query,
7557 false,
7558 false,
7559 true,
7560 files_to_include,
7561 files_to_exclude,
7562 false,
7563 None,
7564 )
7565 .unwrap(),
7566 cx
7567 )
7568 .await
7569 .unwrap(),
7570 HashMap::from_iter([(
7571 path!("dir/node_modules/prettier/package.json").to_string(),
7572 vec![9..12]
7573 )]),
7574 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7575 );
7576}
7577
7578#[gpui::test]
7579async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7580 init_test(cx);
7581
7582 let fs = FakeFs::new(cx.executor());
7583 fs.insert_tree(
7584 path!("/dir"),
7585 json!({
7586 "one.rs": "// ПРИВЕТ? привет!",
7587 "two.rs": "// ПРИВЕТ.",
7588 "three.rs": "// привет",
7589 }),
7590 )
7591 .await;
7592 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7593 let unicode_case_sensitive_query = SearchQuery::text(
7594 "привет",
7595 false,
7596 true,
7597 false,
7598 Default::default(),
7599 Default::default(),
7600 false,
7601 None,
7602 );
7603 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7604 assert_eq!(
7605 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7606 .await
7607 .unwrap(),
7608 HashMap::from_iter([
7609 (path!("dir/one.rs").to_string(), vec![17..29]),
7610 (path!("dir/three.rs").to_string(), vec![3..15]),
7611 ])
7612 );
7613
7614 let unicode_case_insensitive_query = SearchQuery::text(
7615 "привет",
7616 false,
7617 false,
7618 false,
7619 Default::default(),
7620 Default::default(),
7621 false,
7622 None,
7623 );
7624 assert_matches!(
7625 unicode_case_insensitive_query,
7626 Ok(SearchQuery::Regex { .. })
7627 );
7628 assert_eq!(
7629 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7630 .await
7631 .unwrap(),
7632 HashMap::from_iter([
7633 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7634 (path!("dir/two.rs").to_string(), vec![3..15]),
7635 (path!("dir/three.rs").to_string(), vec![3..15]),
7636 ])
7637 );
7638
7639 assert_eq!(
7640 search(
7641 &project,
7642 SearchQuery::text(
7643 "привет.",
7644 false,
7645 false,
7646 false,
7647 Default::default(),
7648 Default::default(),
7649 false,
7650 None,
7651 )
7652 .unwrap(),
7653 cx
7654 )
7655 .await
7656 .unwrap(),
7657 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7658 );
7659}
7660
7661#[gpui::test]
7662async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7663 init_test(cx);
7664
7665 let fs = FakeFs::new(cx.executor());
7666 fs.insert_tree(
7667 "/one/two",
7668 json!({
7669 "three": {
7670 "a.txt": "",
7671 "four": {}
7672 },
7673 "c.rs": ""
7674 }),
7675 )
7676 .await;
7677
7678 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7679 project
7680 .update(cx, |project, cx| {
7681 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7682 project.create_entry((id, rel_path("b..")), true, cx)
7683 })
7684 .await
7685 .unwrap()
7686 .into_included()
7687 .unwrap();
7688
7689 assert_eq!(
7690 fs.paths(true),
7691 vec![
7692 PathBuf::from(path!("/")),
7693 PathBuf::from(path!("/one")),
7694 PathBuf::from(path!("/one/two")),
7695 PathBuf::from(path!("/one/two/c.rs")),
7696 PathBuf::from(path!("/one/two/three")),
7697 PathBuf::from(path!("/one/two/three/a.txt")),
7698 PathBuf::from(path!("/one/two/three/b..")),
7699 PathBuf::from(path!("/one/two/three/four")),
7700 ]
7701 );
7702}
7703
7704#[gpui::test]
7705async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7706 init_test(cx);
7707
7708 let fs = FakeFs::new(cx.executor());
7709 fs.insert_tree(
7710 path!("/dir"),
7711 json!({
7712 "a.tsx": "a",
7713 }),
7714 )
7715 .await;
7716
7717 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7718
7719 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7720 language_registry.add(tsx_lang());
7721 let language_server_names = [
7722 "TypeScriptServer",
7723 "TailwindServer",
7724 "ESLintServer",
7725 "NoHoverCapabilitiesServer",
7726 ];
7727 let mut language_servers = [
7728 language_registry.register_fake_lsp(
7729 "tsx",
7730 FakeLspAdapter {
7731 name: language_server_names[0],
7732 capabilities: lsp::ServerCapabilities {
7733 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7734 ..lsp::ServerCapabilities::default()
7735 },
7736 ..FakeLspAdapter::default()
7737 },
7738 ),
7739 language_registry.register_fake_lsp(
7740 "tsx",
7741 FakeLspAdapter {
7742 name: language_server_names[1],
7743 capabilities: lsp::ServerCapabilities {
7744 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7745 ..lsp::ServerCapabilities::default()
7746 },
7747 ..FakeLspAdapter::default()
7748 },
7749 ),
7750 language_registry.register_fake_lsp(
7751 "tsx",
7752 FakeLspAdapter {
7753 name: language_server_names[2],
7754 capabilities: lsp::ServerCapabilities {
7755 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7756 ..lsp::ServerCapabilities::default()
7757 },
7758 ..FakeLspAdapter::default()
7759 },
7760 ),
7761 language_registry.register_fake_lsp(
7762 "tsx",
7763 FakeLspAdapter {
7764 name: language_server_names[3],
7765 capabilities: lsp::ServerCapabilities {
7766 hover_provider: None,
7767 ..lsp::ServerCapabilities::default()
7768 },
7769 ..FakeLspAdapter::default()
7770 },
7771 ),
7772 ];
7773
7774 let (buffer, _handle) = project
7775 .update(cx, |p, cx| {
7776 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7777 })
7778 .await
7779 .unwrap();
7780 cx.executor().run_until_parked();
7781
7782 let mut servers_with_hover_requests = HashMap::default();
7783 for i in 0..language_server_names.len() {
7784 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7785 panic!(
7786 "Failed to get language server #{i} with name {}",
7787 &language_server_names[i]
7788 )
7789 });
7790 let new_server_name = new_server.server.name();
7791 assert!(
7792 !servers_with_hover_requests.contains_key(&new_server_name),
7793 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7794 );
7795 match new_server_name.as_ref() {
7796 "TailwindServer" | "TypeScriptServer" => {
7797 servers_with_hover_requests.insert(
7798 new_server_name.clone(),
7799 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7800 move |_, _| {
7801 let name = new_server_name.clone();
7802 async move {
7803 Ok(Some(lsp::Hover {
7804 contents: lsp::HoverContents::Scalar(
7805 lsp::MarkedString::String(format!("{name} hover")),
7806 ),
7807 range: None,
7808 }))
7809 }
7810 },
7811 ),
7812 );
7813 }
7814 "ESLintServer" => {
7815 servers_with_hover_requests.insert(
7816 new_server_name,
7817 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7818 |_, _| async move { Ok(None) },
7819 ),
7820 );
7821 }
7822 "NoHoverCapabilitiesServer" => {
7823 let _never_handled = new_server
7824 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7825 panic!(
7826 "Should not call for hovers server with no corresponding capabilities"
7827 )
7828 });
7829 }
7830 unexpected => panic!("Unexpected server name: {unexpected}"),
7831 }
7832 }
7833
7834 let hover_task = project.update(cx, |project, cx| {
7835 project.hover(&buffer, Point::new(0, 0), cx)
7836 });
7837 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7838 |mut hover_request| async move {
7839 hover_request
7840 .next()
7841 .await
7842 .expect("All hover requests should have been triggered")
7843 },
7844 ))
7845 .await;
7846 assert_eq!(
7847 vec!["TailwindServer hover", "TypeScriptServer hover"],
7848 hover_task
7849 .await
7850 .into_iter()
7851 .flatten()
7852 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7853 .sorted()
7854 .collect::<Vec<_>>(),
7855 "Should receive hover responses from all related servers with hover capabilities"
7856 );
7857}
7858
7859#[gpui::test]
7860async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7861 init_test(cx);
7862
7863 let fs = FakeFs::new(cx.executor());
7864 fs.insert_tree(
7865 path!("/dir"),
7866 json!({
7867 "a.ts": "a",
7868 }),
7869 )
7870 .await;
7871
7872 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7873
7874 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7875 language_registry.add(typescript_lang());
7876 let mut fake_language_servers = language_registry.register_fake_lsp(
7877 "TypeScript",
7878 FakeLspAdapter {
7879 capabilities: lsp::ServerCapabilities {
7880 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7881 ..lsp::ServerCapabilities::default()
7882 },
7883 ..FakeLspAdapter::default()
7884 },
7885 );
7886
7887 let (buffer, _handle) = project
7888 .update(cx, |p, cx| {
7889 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7890 })
7891 .await
7892 .unwrap();
7893 cx.executor().run_until_parked();
7894
7895 let fake_server = fake_language_servers
7896 .next()
7897 .await
7898 .expect("failed to get the language server");
7899
7900 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7901 move |_, _| async move {
7902 Ok(Some(lsp::Hover {
7903 contents: lsp::HoverContents::Array(vec![
7904 lsp::MarkedString::String("".to_string()),
7905 lsp::MarkedString::String(" ".to_string()),
7906 lsp::MarkedString::String("\n\n\n".to_string()),
7907 ]),
7908 range: None,
7909 }))
7910 },
7911 );
7912
7913 let hover_task = project.update(cx, |project, cx| {
7914 project.hover(&buffer, Point::new(0, 0), cx)
7915 });
7916 let () = request_handled
7917 .next()
7918 .await
7919 .expect("All hover requests should have been triggered");
7920 assert_eq!(
7921 Vec::<String>::new(),
7922 hover_task
7923 .await
7924 .into_iter()
7925 .flatten()
7926 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7927 .sorted()
7928 .collect::<Vec<_>>(),
7929 "Empty hover parts should be ignored"
7930 );
7931}
7932
7933#[gpui::test]
7934async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7935 init_test(cx);
7936
7937 let fs = FakeFs::new(cx.executor());
7938 fs.insert_tree(
7939 path!("/dir"),
7940 json!({
7941 "a.ts": "a",
7942 }),
7943 )
7944 .await;
7945
7946 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7947
7948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7949 language_registry.add(typescript_lang());
7950 let mut fake_language_servers = language_registry.register_fake_lsp(
7951 "TypeScript",
7952 FakeLspAdapter {
7953 capabilities: lsp::ServerCapabilities {
7954 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7955 ..lsp::ServerCapabilities::default()
7956 },
7957 ..FakeLspAdapter::default()
7958 },
7959 );
7960
7961 let (buffer, _handle) = project
7962 .update(cx, |p, cx| {
7963 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7964 })
7965 .await
7966 .unwrap();
7967 cx.executor().run_until_parked();
7968
7969 let fake_server = fake_language_servers
7970 .next()
7971 .await
7972 .expect("failed to get the language server");
7973
7974 let mut request_handled = fake_server
7975 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7976 Ok(Some(vec![
7977 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7978 title: "organize imports".to_string(),
7979 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7980 ..lsp::CodeAction::default()
7981 }),
7982 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7983 title: "fix code".to_string(),
7984 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7985 ..lsp::CodeAction::default()
7986 }),
7987 ]))
7988 });
7989
7990 let code_actions_task = project.update(cx, |project, cx| {
7991 project.code_actions(
7992 &buffer,
7993 0..buffer.read(cx).len(),
7994 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7995 cx,
7996 )
7997 });
7998
7999 let () = request_handled
8000 .next()
8001 .await
8002 .expect("The code action request should have been triggered");
8003
8004 let code_actions = code_actions_task.await.unwrap().unwrap();
8005 assert_eq!(code_actions.len(), 1);
8006 assert_eq!(
8007 code_actions[0].lsp_action.action_kind(),
8008 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8009 );
8010}
8011
8012#[gpui::test]
8013async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8014 cx: &mut gpui::TestAppContext,
8015) {
8016 init_test(cx);
8017
8018 let fs = FakeFs::new(cx.executor());
8019 fs.insert_tree(
8020 path!("/dir"),
8021 json!({
8022 "a.ts": "a",
8023 }),
8024 )
8025 .await;
8026
8027 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8028
8029 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8030 language_registry.add(typescript_lang());
8031 let mut fake_language_servers = language_registry.register_fake_lsp(
8032 "TypeScript",
8033 FakeLspAdapter {
8034 capabilities: lsp::ServerCapabilities {
8035 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8036 lsp::CodeActionOptions {
8037 code_action_kinds: Some(vec![
8038 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8039 "source.doc".into(),
8040 ]),
8041 ..lsp::CodeActionOptions::default()
8042 },
8043 )),
8044 ..lsp::ServerCapabilities::default()
8045 },
8046 ..FakeLspAdapter::default()
8047 },
8048 );
8049
8050 let (buffer, _handle) = project
8051 .update(cx, |p, cx| {
8052 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8053 })
8054 .await
8055 .unwrap();
8056 cx.executor().run_until_parked();
8057
8058 let fake_server = fake_language_servers
8059 .next()
8060 .await
8061 .expect("failed to get the language server");
8062
8063 let mut request_handled = fake_server.set_request_handler::<
8064 lsp::request::CodeActionRequest,
8065 _,
8066 _,
8067 >(move |params, _| async move {
8068 assert_eq!(
8069 params.context.only, None,
8070 "Code action requests without explicit kind filters should not send `context.only`"
8071 );
8072 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8073 lsp::CodeAction {
8074 title: "Add test".to_string(),
8075 kind: Some("source.addTest".into()),
8076 ..lsp::CodeAction::default()
8077 },
8078 )]))
8079 });
8080
8081 let code_actions_task = project.update(cx, |project, cx| {
8082 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8083 });
8084
8085 let () = request_handled
8086 .next()
8087 .await
8088 .expect("The code action request should have been triggered");
8089
8090 let code_actions = code_actions_task.await.unwrap().unwrap();
8091 assert_eq!(code_actions.len(), 1);
8092 assert_eq!(
8093 code_actions[0].lsp_action.action_kind(),
8094 Some("source.addTest".into())
8095 );
8096}
8097
8098#[gpui::test]
8099async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8100 init_test(cx);
8101
8102 let fs = FakeFs::new(cx.executor());
8103 fs.insert_tree(
8104 path!("/dir"),
8105 json!({
8106 "a.tsx": "a",
8107 }),
8108 )
8109 .await;
8110
8111 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8112
8113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8114 language_registry.add(tsx_lang());
8115 let language_server_names = [
8116 "TypeScriptServer",
8117 "TailwindServer",
8118 "ESLintServer",
8119 "NoActionsCapabilitiesServer",
8120 ];
8121
8122 let mut language_server_rxs = [
8123 language_registry.register_fake_lsp(
8124 "tsx",
8125 FakeLspAdapter {
8126 name: language_server_names[0],
8127 capabilities: lsp::ServerCapabilities {
8128 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8129 ..lsp::ServerCapabilities::default()
8130 },
8131 ..FakeLspAdapter::default()
8132 },
8133 ),
8134 language_registry.register_fake_lsp(
8135 "tsx",
8136 FakeLspAdapter {
8137 name: language_server_names[1],
8138 capabilities: lsp::ServerCapabilities {
8139 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8140 ..lsp::ServerCapabilities::default()
8141 },
8142 ..FakeLspAdapter::default()
8143 },
8144 ),
8145 language_registry.register_fake_lsp(
8146 "tsx",
8147 FakeLspAdapter {
8148 name: language_server_names[2],
8149 capabilities: lsp::ServerCapabilities {
8150 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8151 ..lsp::ServerCapabilities::default()
8152 },
8153 ..FakeLspAdapter::default()
8154 },
8155 ),
8156 language_registry.register_fake_lsp(
8157 "tsx",
8158 FakeLspAdapter {
8159 name: language_server_names[3],
8160 capabilities: lsp::ServerCapabilities {
8161 code_action_provider: None,
8162 ..lsp::ServerCapabilities::default()
8163 },
8164 ..FakeLspAdapter::default()
8165 },
8166 ),
8167 ];
8168
8169 let (buffer, _handle) = project
8170 .update(cx, |p, cx| {
8171 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8172 })
8173 .await
8174 .unwrap();
8175 cx.executor().run_until_parked();
8176
8177 let mut servers_with_actions_requests = HashMap::default();
8178 for i in 0..language_server_names.len() {
8179 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8180 panic!(
8181 "Failed to get language server #{i} with name {}",
8182 &language_server_names[i]
8183 )
8184 });
8185 let new_server_name = new_server.server.name();
8186
8187 assert!(
8188 !servers_with_actions_requests.contains_key(&new_server_name),
8189 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8190 );
8191 match new_server_name.0.as_ref() {
8192 "TailwindServer" | "TypeScriptServer" => {
8193 servers_with_actions_requests.insert(
8194 new_server_name.clone(),
8195 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8196 move |_, _| {
8197 let name = new_server_name.clone();
8198 async move {
8199 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8200 lsp::CodeAction {
8201 title: format!("{name} code action"),
8202 ..lsp::CodeAction::default()
8203 },
8204 )]))
8205 }
8206 },
8207 ),
8208 );
8209 }
8210 "ESLintServer" => {
8211 servers_with_actions_requests.insert(
8212 new_server_name,
8213 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8214 |_, _| async move { Ok(None) },
8215 ),
8216 );
8217 }
8218 "NoActionsCapabilitiesServer" => {
8219 let _never_handled = new_server
8220 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8221 panic!(
8222 "Should not call for code actions server with no corresponding capabilities"
8223 )
8224 });
8225 }
8226 unexpected => panic!("Unexpected server name: {unexpected}"),
8227 }
8228 }
8229
8230 let code_actions_task = project.update(cx, |project, cx| {
8231 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8232 });
8233
8234 // cx.run_until_parked();
8235 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8236 |mut code_actions_request| async move {
8237 code_actions_request
8238 .next()
8239 .await
8240 .expect("All code actions requests should have been triggered")
8241 },
8242 ))
8243 .await;
8244 assert_eq!(
8245 vec!["TailwindServer code action", "TypeScriptServer code action"],
8246 code_actions_task
8247 .await
8248 .unwrap()
8249 .unwrap()
8250 .into_iter()
8251 .map(|code_action| code_action.lsp_action.title().to_owned())
8252 .sorted()
8253 .collect::<Vec<_>>(),
8254 "Should receive code actions responses from all related servers with hover capabilities"
8255 );
8256}
8257
8258#[gpui::test]
8259async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8260 init_test(cx);
8261
8262 let fs = FakeFs::new(cx.executor());
8263 fs.insert_tree(
8264 "/dir",
8265 json!({
8266 "a.rs": "let a = 1;",
8267 "b.rs": "let b = 2;",
8268 "c.rs": "let c = 2;",
8269 }),
8270 )
8271 .await;
8272
8273 let project = Project::test(
8274 fs,
8275 [
8276 "/dir/a.rs".as_ref(),
8277 "/dir/b.rs".as_ref(),
8278 "/dir/c.rs".as_ref(),
8279 ],
8280 cx,
8281 )
8282 .await;
8283
8284 // check the initial state and get the worktrees
8285 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8286 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8287 assert_eq!(worktrees.len(), 3);
8288
8289 let worktree_a = worktrees[0].read(cx);
8290 let worktree_b = worktrees[1].read(cx);
8291 let worktree_c = worktrees[2].read(cx);
8292
8293 // check they start in the right order
8294 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8295 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8296 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8297
8298 (
8299 worktrees[0].clone(),
8300 worktrees[1].clone(),
8301 worktrees[2].clone(),
8302 )
8303 });
8304
8305 // move first worktree to after the second
8306 // [a, b, c] -> [b, a, c]
8307 project
8308 .update(cx, |project, cx| {
8309 let first = worktree_a.read(cx);
8310 let second = worktree_b.read(cx);
8311 project.move_worktree(first.id(), second.id(), cx)
8312 })
8313 .expect("moving first after second");
8314
8315 // check the state after moving
8316 project.update(cx, |project, cx| {
8317 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8318 assert_eq!(worktrees.len(), 3);
8319
8320 let first = worktrees[0].read(cx);
8321 let second = worktrees[1].read(cx);
8322 let third = worktrees[2].read(cx);
8323
8324 // check they are now in the right order
8325 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8326 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8327 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8328 });
8329
8330 // move the second worktree to before the first
8331 // [b, a, c] -> [a, b, c]
8332 project
8333 .update(cx, |project, cx| {
8334 let second = worktree_a.read(cx);
8335 let first = worktree_b.read(cx);
8336 project.move_worktree(first.id(), second.id(), cx)
8337 })
8338 .expect("moving second before first");
8339
8340 // check the state after moving
8341 project.update(cx, |project, cx| {
8342 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8343 assert_eq!(worktrees.len(), 3);
8344
8345 let first = worktrees[0].read(cx);
8346 let second = worktrees[1].read(cx);
8347 let third = worktrees[2].read(cx);
8348
8349 // check they are now in the right order
8350 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8351 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8352 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8353 });
8354
8355 // move the second worktree to after the third
8356 // [a, b, c] -> [a, c, b]
8357 project
8358 .update(cx, |project, cx| {
8359 let second = worktree_b.read(cx);
8360 let third = worktree_c.read(cx);
8361 project.move_worktree(second.id(), third.id(), cx)
8362 })
8363 .expect("moving second after third");
8364
8365 // check the state after moving
8366 project.update(cx, |project, cx| {
8367 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8368 assert_eq!(worktrees.len(), 3);
8369
8370 let first = worktrees[0].read(cx);
8371 let second = worktrees[1].read(cx);
8372 let third = worktrees[2].read(cx);
8373
8374 // check they are now in the right order
8375 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8376 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8377 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8378 });
8379
8380 // move the third worktree to before the second
8381 // [a, c, b] -> [a, b, c]
8382 project
8383 .update(cx, |project, cx| {
8384 let third = worktree_c.read(cx);
8385 let second = worktree_b.read(cx);
8386 project.move_worktree(third.id(), second.id(), cx)
8387 })
8388 .expect("moving third before second");
8389
8390 // check the state after moving
8391 project.update(cx, |project, cx| {
8392 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8393 assert_eq!(worktrees.len(), 3);
8394
8395 let first = worktrees[0].read(cx);
8396 let second = worktrees[1].read(cx);
8397 let third = worktrees[2].read(cx);
8398
8399 // check they are now in the right order
8400 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8401 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8402 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8403 });
8404
8405 // move the first worktree to after the third
8406 // [a, b, c] -> [b, c, a]
8407 project
8408 .update(cx, |project, cx| {
8409 let first = worktree_a.read(cx);
8410 let third = worktree_c.read(cx);
8411 project.move_worktree(first.id(), third.id(), cx)
8412 })
8413 .expect("moving first after third");
8414
8415 // check the state after moving
8416 project.update(cx, |project, cx| {
8417 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8418 assert_eq!(worktrees.len(), 3);
8419
8420 let first = worktrees[0].read(cx);
8421 let second = worktrees[1].read(cx);
8422 let third = worktrees[2].read(cx);
8423
8424 // check they are now in the right order
8425 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8426 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8427 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8428 });
8429
8430 // move the third worktree to before the first
8431 // [b, c, a] -> [a, b, c]
8432 project
8433 .update(cx, |project, cx| {
8434 let third = worktree_a.read(cx);
8435 let first = worktree_b.read(cx);
8436 project.move_worktree(third.id(), first.id(), cx)
8437 })
8438 .expect("moving third before first");
8439
8440 // check the state after moving
8441 project.update(cx, |project, cx| {
8442 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8443 assert_eq!(worktrees.len(), 3);
8444
8445 let first = worktrees[0].read(cx);
8446 let second = worktrees[1].read(cx);
8447 let third = worktrees[2].read(cx);
8448
8449 // check they are now in the right order
8450 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8451 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8452 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8453 });
8454}
8455
8456#[gpui::test]
8457async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8458 init_test(cx);
8459
8460 let staged_contents = r#"
8461 fn main() {
8462 println!("hello world");
8463 }
8464 "#
8465 .unindent();
8466 let file_contents = r#"
8467 // print goodbye
8468 fn main() {
8469 println!("goodbye world");
8470 }
8471 "#
8472 .unindent();
8473
8474 let fs = FakeFs::new(cx.background_executor.clone());
8475 fs.insert_tree(
8476 "/dir",
8477 json!({
8478 ".git": {},
8479 "src": {
8480 "main.rs": file_contents,
8481 }
8482 }),
8483 )
8484 .await;
8485
8486 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8487
8488 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8489
8490 let buffer = project
8491 .update(cx, |project, cx| {
8492 project.open_local_buffer("/dir/src/main.rs", cx)
8493 })
8494 .await
8495 .unwrap();
8496 let unstaged_diff = project
8497 .update(cx, |project, cx| {
8498 project.open_unstaged_diff(buffer.clone(), cx)
8499 })
8500 .await
8501 .unwrap();
8502
8503 cx.run_until_parked();
8504 unstaged_diff.update(cx, |unstaged_diff, cx| {
8505 let snapshot = buffer.read(cx).snapshot();
8506 assert_hunks(
8507 unstaged_diff.snapshot(cx).hunks(&snapshot),
8508 &snapshot,
8509 &unstaged_diff.base_text_string(cx).unwrap(),
8510 &[
8511 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8512 (
8513 2..3,
8514 " println!(\"hello world\");\n",
8515 " println!(\"goodbye world\");\n",
8516 DiffHunkStatus::modified_none(),
8517 ),
8518 ],
8519 );
8520 });
8521
8522 let staged_contents = r#"
8523 // print goodbye
8524 fn main() {
8525 }
8526 "#
8527 .unindent();
8528
8529 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8530
8531 cx.run_until_parked();
8532 unstaged_diff.update(cx, |unstaged_diff, cx| {
8533 let snapshot = buffer.read(cx).snapshot();
8534 assert_hunks(
8535 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8536 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8537 &snapshot,
8538 ),
8539 &snapshot,
8540 &unstaged_diff.base_text(cx).text(),
8541 &[(
8542 2..3,
8543 "",
8544 " println!(\"goodbye world\");\n",
8545 DiffHunkStatus::added_none(),
8546 )],
8547 );
8548 });
8549}
8550
8551#[gpui::test]
8552async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8553 init_test(cx);
8554
8555 let committed_contents = r#"
8556 fn main() {
8557 println!("hello world");
8558 }
8559 "#
8560 .unindent();
8561 let staged_contents = r#"
8562 fn main() {
8563 println!("goodbye world");
8564 }
8565 "#
8566 .unindent();
8567 let file_contents = r#"
8568 // print goodbye
8569 fn main() {
8570 println!("goodbye world");
8571 }
8572 "#
8573 .unindent();
8574
8575 let fs = FakeFs::new(cx.background_executor.clone());
8576 fs.insert_tree(
8577 "/dir",
8578 json!({
8579 ".git": {},
8580 "src": {
8581 "modification.rs": file_contents,
8582 }
8583 }),
8584 )
8585 .await;
8586
8587 fs.set_head_for_repo(
8588 Path::new("/dir/.git"),
8589 &[
8590 ("src/modification.rs", committed_contents),
8591 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8592 ],
8593 "deadbeef",
8594 );
8595 fs.set_index_for_repo(
8596 Path::new("/dir/.git"),
8597 &[
8598 ("src/modification.rs", staged_contents),
8599 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8600 ],
8601 );
8602
8603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8604 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8605 let language = rust_lang();
8606 language_registry.add(language.clone());
8607
8608 let buffer_1 = project
8609 .update(cx, |project, cx| {
8610 project.open_local_buffer("/dir/src/modification.rs", cx)
8611 })
8612 .await
8613 .unwrap();
8614 let diff_1 = project
8615 .update(cx, |project, cx| {
8616 project.open_uncommitted_diff(buffer_1.clone(), cx)
8617 })
8618 .await
8619 .unwrap();
8620 diff_1.read_with(cx, |diff, cx| {
8621 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8622 });
8623 cx.run_until_parked();
8624 diff_1.update(cx, |diff, cx| {
8625 let snapshot = buffer_1.read(cx).snapshot();
8626 assert_hunks(
8627 diff.snapshot(cx).hunks_intersecting_range(
8628 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8629 &snapshot,
8630 ),
8631 &snapshot,
8632 &diff.base_text_string(cx).unwrap(),
8633 &[
8634 (
8635 0..1,
8636 "",
8637 "// print goodbye\n",
8638 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8639 ),
8640 (
8641 2..3,
8642 " println!(\"hello world\");\n",
8643 " println!(\"goodbye world\");\n",
8644 DiffHunkStatus::modified_none(),
8645 ),
8646 ],
8647 );
8648 });
8649
8650 // Reset HEAD to a version that differs from both the buffer and the index.
8651 let committed_contents = r#"
8652 // print goodbye
8653 fn main() {
8654 }
8655 "#
8656 .unindent();
8657 fs.set_head_for_repo(
8658 Path::new("/dir/.git"),
8659 &[
8660 ("src/modification.rs", committed_contents.clone()),
8661 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8662 ],
8663 "deadbeef",
8664 );
8665
8666 // Buffer now has an unstaged hunk.
8667 cx.run_until_parked();
8668 diff_1.update(cx, |diff, cx| {
8669 let snapshot = buffer_1.read(cx).snapshot();
8670 assert_hunks(
8671 diff.snapshot(cx).hunks_intersecting_range(
8672 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8673 &snapshot,
8674 ),
8675 &snapshot,
8676 &diff.base_text(cx).text(),
8677 &[(
8678 2..3,
8679 "",
8680 " println!(\"goodbye world\");\n",
8681 DiffHunkStatus::added_none(),
8682 )],
8683 );
8684 });
8685
8686 // Open a buffer for a file that's been deleted.
8687 let buffer_2 = project
8688 .update(cx, |project, cx| {
8689 project.open_local_buffer("/dir/src/deletion.rs", cx)
8690 })
8691 .await
8692 .unwrap();
8693 let diff_2 = project
8694 .update(cx, |project, cx| {
8695 project.open_uncommitted_diff(buffer_2.clone(), cx)
8696 })
8697 .await
8698 .unwrap();
8699 cx.run_until_parked();
8700 diff_2.update(cx, |diff, cx| {
8701 let snapshot = buffer_2.read(cx).snapshot();
8702 assert_hunks(
8703 diff.snapshot(cx).hunks_intersecting_range(
8704 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8705 &snapshot,
8706 ),
8707 &snapshot,
8708 &diff.base_text_string(cx).unwrap(),
8709 &[(
8710 0..0,
8711 "// the-deleted-contents\n",
8712 "",
8713 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8714 )],
8715 );
8716 });
8717
8718 // Stage the deletion of this file
8719 fs.set_index_for_repo(
8720 Path::new("/dir/.git"),
8721 &[("src/modification.rs", committed_contents.clone())],
8722 );
8723 cx.run_until_parked();
8724 diff_2.update(cx, |diff, cx| {
8725 let snapshot = buffer_2.read(cx).snapshot();
8726 assert_hunks(
8727 diff.snapshot(cx).hunks_intersecting_range(
8728 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8729 &snapshot,
8730 ),
8731 &snapshot,
8732 &diff.base_text_string(cx).unwrap(),
8733 &[(
8734 0..0,
8735 "// the-deleted-contents\n",
8736 "",
8737 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8738 )],
8739 );
8740 });
8741}
8742
8743#[gpui::test]
8744async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8745 use DiffHunkSecondaryStatus::*;
8746 init_test(cx);
8747
8748 let committed_contents = r#"
8749 zero
8750 one
8751 two
8752 three
8753 four
8754 five
8755 "#
8756 .unindent();
8757 let file_contents = r#"
8758 one
8759 TWO
8760 three
8761 FOUR
8762 five
8763 "#
8764 .unindent();
8765
8766 let fs = FakeFs::new(cx.background_executor.clone());
8767 fs.insert_tree(
8768 "/dir",
8769 json!({
8770 ".git": {},
8771 "file.txt": file_contents.clone()
8772 }),
8773 )
8774 .await;
8775
8776 fs.set_head_and_index_for_repo(
8777 path!("/dir/.git").as_ref(),
8778 &[("file.txt", committed_contents.clone())],
8779 );
8780
8781 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8782
8783 let buffer = project
8784 .update(cx, |project, cx| {
8785 project.open_local_buffer("/dir/file.txt", cx)
8786 })
8787 .await
8788 .unwrap();
8789 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8790 let uncommitted_diff = project
8791 .update(cx, |project, cx| {
8792 project.open_uncommitted_diff(buffer.clone(), cx)
8793 })
8794 .await
8795 .unwrap();
8796 let mut diff_events = cx.events(&uncommitted_diff);
8797
8798 // The hunks are initially unstaged.
8799 uncommitted_diff.read_with(cx, |diff, cx| {
8800 assert_hunks(
8801 diff.snapshot(cx).hunks(&snapshot),
8802 &snapshot,
8803 &diff.base_text_string(cx).unwrap(),
8804 &[
8805 (
8806 0..0,
8807 "zero\n",
8808 "",
8809 DiffHunkStatus::deleted(HasSecondaryHunk),
8810 ),
8811 (
8812 1..2,
8813 "two\n",
8814 "TWO\n",
8815 DiffHunkStatus::modified(HasSecondaryHunk),
8816 ),
8817 (
8818 3..4,
8819 "four\n",
8820 "FOUR\n",
8821 DiffHunkStatus::modified(HasSecondaryHunk),
8822 ),
8823 ],
8824 );
8825 });
8826
8827 // Stage a hunk. It appears as optimistically staged.
8828 uncommitted_diff.update(cx, |diff, cx| {
8829 let range =
8830 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8831 let hunks = diff
8832 .snapshot(cx)
8833 .hunks_intersecting_range(range, &snapshot)
8834 .collect::<Vec<_>>();
8835 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8836
8837 assert_hunks(
8838 diff.snapshot(cx).hunks(&snapshot),
8839 &snapshot,
8840 &diff.base_text_string(cx).unwrap(),
8841 &[
8842 (
8843 0..0,
8844 "zero\n",
8845 "",
8846 DiffHunkStatus::deleted(HasSecondaryHunk),
8847 ),
8848 (
8849 1..2,
8850 "two\n",
8851 "TWO\n",
8852 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8853 ),
8854 (
8855 3..4,
8856 "four\n",
8857 "FOUR\n",
8858 DiffHunkStatus::modified(HasSecondaryHunk),
8859 ),
8860 ],
8861 );
8862 });
8863
8864 // The diff emits a change event for the range of the staged hunk.
8865 assert!(matches!(
8866 diff_events.next().await.unwrap(),
8867 BufferDiffEvent::HunksStagedOrUnstaged(_)
8868 ));
8869 let event = diff_events.next().await.unwrap();
8870 if let BufferDiffEvent::DiffChanged(DiffChanged {
8871 changed_range: Some(changed_range),
8872 base_text_changed_range: _,
8873 extended_range: _,
8874 }) = event
8875 {
8876 let changed_range = changed_range.to_point(&snapshot);
8877 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8878 } else {
8879 panic!("Unexpected event {event:?}");
8880 }
8881
8882 // When the write to the index completes, it appears as staged.
8883 cx.run_until_parked();
8884 uncommitted_diff.update(cx, |diff, cx| {
8885 assert_hunks(
8886 diff.snapshot(cx).hunks(&snapshot),
8887 &snapshot,
8888 &diff.base_text_string(cx).unwrap(),
8889 &[
8890 (
8891 0..0,
8892 "zero\n",
8893 "",
8894 DiffHunkStatus::deleted(HasSecondaryHunk),
8895 ),
8896 (
8897 1..2,
8898 "two\n",
8899 "TWO\n",
8900 DiffHunkStatus::modified(NoSecondaryHunk),
8901 ),
8902 (
8903 3..4,
8904 "four\n",
8905 "FOUR\n",
8906 DiffHunkStatus::modified(HasSecondaryHunk),
8907 ),
8908 ],
8909 );
8910 });
8911
8912 // The diff emits a change event for the changed index text.
8913 let event = diff_events.next().await.unwrap();
8914 if let BufferDiffEvent::DiffChanged(DiffChanged {
8915 changed_range: Some(changed_range),
8916 base_text_changed_range: _,
8917 extended_range: _,
8918 }) = event
8919 {
8920 let changed_range = changed_range.to_point(&snapshot);
8921 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8922 } else {
8923 panic!("Unexpected event {event:?}");
8924 }
8925
8926 // Simulate a problem writing to the git index.
8927 fs.set_error_message_for_index_write(
8928 "/dir/.git".as_ref(),
8929 Some("failed to write git index".into()),
8930 );
8931
8932 // Stage another hunk.
8933 uncommitted_diff.update(cx, |diff, cx| {
8934 let range =
8935 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8936 let hunks = diff
8937 .snapshot(cx)
8938 .hunks_intersecting_range(range, &snapshot)
8939 .collect::<Vec<_>>();
8940 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8941
8942 assert_hunks(
8943 diff.snapshot(cx).hunks(&snapshot),
8944 &snapshot,
8945 &diff.base_text_string(cx).unwrap(),
8946 &[
8947 (
8948 0..0,
8949 "zero\n",
8950 "",
8951 DiffHunkStatus::deleted(HasSecondaryHunk),
8952 ),
8953 (
8954 1..2,
8955 "two\n",
8956 "TWO\n",
8957 DiffHunkStatus::modified(NoSecondaryHunk),
8958 ),
8959 (
8960 3..4,
8961 "four\n",
8962 "FOUR\n",
8963 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8964 ),
8965 ],
8966 );
8967 });
8968 assert!(matches!(
8969 diff_events.next().await.unwrap(),
8970 BufferDiffEvent::HunksStagedOrUnstaged(_)
8971 ));
8972 let event = diff_events.next().await.unwrap();
8973 if let BufferDiffEvent::DiffChanged(DiffChanged {
8974 changed_range: Some(changed_range),
8975 base_text_changed_range: _,
8976 extended_range: _,
8977 }) = event
8978 {
8979 let changed_range = changed_range.to_point(&snapshot);
8980 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8981 } else {
8982 panic!("Unexpected event {event:?}");
8983 }
8984
8985 // When the write fails, the hunk returns to being unstaged.
8986 cx.run_until_parked();
8987 uncommitted_diff.update(cx, |diff, cx| {
8988 assert_hunks(
8989 diff.snapshot(cx).hunks(&snapshot),
8990 &snapshot,
8991 &diff.base_text_string(cx).unwrap(),
8992 &[
8993 (
8994 0..0,
8995 "zero\n",
8996 "",
8997 DiffHunkStatus::deleted(HasSecondaryHunk),
8998 ),
8999 (
9000 1..2,
9001 "two\n",
9002 "TWO\n",
9003 DiffHunkStatus::modified(NoSecondaryHunk),
9004 ),
9005 (
9006 3..4,
9007 "four\n",
9008 "FOUR\n",
9009 DiffHunkStatus::modified(HasSecondaryHunk),
9010 ),
9011 ],
9012 );
9013 });
9014
9015 let event = diff_events.next().await.unwrap();
9016 if let BufferDiffEvent::DiffChanged(DiffChanged {
9017 changed_range: Some(changed_range),
9018 base_text_changed_range: _,
9019 extended_range: _,
9020 }) = event
9021 {
9022 let changed_range = changed_range.to_point(&snapshot);
9023 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9024 } else {
9025 panic!("Unexpected event {event:?}");
9026 }
9027
9028 // Allow writing to the git index to succeed again.
9029 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9030
9031 // Stage two hunks with separate operations.
9032 uncommitted_diff.update(cx, |diff, cx| {
9033 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9034 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9035 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9036 });
9037
9038 // Both staged hunks appear as pending.
9039 uncommitted_diff.update(cx, |diff, cx| {
9040 assert_hunks(
9041 diff.snapshot(cx).hunks(&snapshot),
9042 &snapshot,
9043 &diff.base_text_string(cx).unwrap(),
9044 &[
9045 (
9046 0..0,
9047 "zero\n",
9048 "",
9049 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9050 ),
9051 (
9052 1..2,
9053 "two\n",
9054 "TWO\n",
9055 DiffHunkStatus::modified(NoSecondaryHunk),
9056 ),
9057 (
9058 3..4,
9059 "four\n",
9060 "FOUR\n",
9061 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9062 ),
9063 ],
9064 );
9065 });
9066
9067 // Both staging operations take effect.
9068 cx.run_until_parked();
9069 uncommitted_diff.update(cx, |diff, cx| {
9070 assert_hunks(
9071 diff.snapshot(cx).hunks(&snapshot),
9072 &snapshot,
9073 &diff.base_text_string(cx).unwrap(),
9074 &[
9075 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9076 (
9077 1..2,
9078 "two\n",
9079 "TWO\n",
9080 DiffHunkStatus::modified(NoSecondaryHunk),
9081 ),
9082 (
9083 3..4,
9084 "four\n",
9085 "FOUR\n",
9086 DiffHunkStatus::modified(NoSecondaryHunk),
9087 ),
9088 ],
9089 );
9090 });
9091}
9092
9093#[gpui::test(seeds(340, 472))]
9094async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9095 use DiffHunkSecondaryStatus::*;
9096 init_test(cx);
9097
9098 let committed_contents = r#"
9099 zero
9100 one
9101 two
9102 three
9103 four
9104 five
9105 "#
9106 .unindent();
9107 let file_contents = r#"
9108 one
9109 TWO
9110 three
9111 FOUR
9112 five
9113 "#
9114 .unindent();
9115
9116 let fs = FakeFs::new(cx.background_executor.clone());
9117 fs.insert_tree(
9118 "/dir",
9119 json!({
9120 ".git": {},
9121 "file.txt": file_contents.clone()
9122 }),
9123 )
9124 .await;
9125
9126 fs.set_head_for_repo(
9127 "/dir/.git".as_ref(),
9128 &[("file.txt", committed_contents.clone())],
9129 "deadbeef",
9130 );
9131 fs.set_index_for_repo(
9132 "/dir/.git".as_ref(),
9133 &[("file.txt", committed_contents.clone())],
9134 );
9135
9136 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9137
9138 let buffer = project
9139 .update(cx, |project, cx| {
9140 project.open_local_buffer("/dir/file.txt", cx)
9141 })
9142 .await
9143 .unwrap();
9144 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9145 let uncommitted_diff = project
9146 .update(cx, |project, cx| {
9147 project.open_uncommitted_diff(buffer.clone(), cx)
9148 })
9149 .await
9150 .unwrap();
9151
9152 // The hunks are initially unstaged.
9153 uncommitted_diff.read_with(cx, |diff, cx| {
9154 assert_hunks(
9155 diff.snapshot(cx).hunks(&snapshot),
9156 &snapshot,
9157 &diff.base_text_string(cx).unwrap(),
9158 &[
9159 (
9160 0..0,
9161 "zero\n",
9162 "",
9163 DiffHunkStatus::deleted(HasSecondaryHunk),
9164 ),
9165 (
9166 1..2,
9167 "two\n",
9168 "TWO\n",
9169 DiffHunkStatus::modified(HasSecondaryHunk),
9170 ),
9171 (
9172 3..4,
9173 "four\n",
9174 "FOUR\n",
9175 DiffHunkStatus::modified(HasSecondaryHunk),
9176 ),
9177 ],
9178 );
9179 });
9180
9181 // Pause IO events
9182 fs.pause_events();
9183
9184 // Stage the first hunk.
9185 uncommitted_diff.update(cx, |diff, cx| {
9186 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9187 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9188 assert_hunks(
9189 diff.snapshot(cx).hunks(&snapshot),
9190 &snapshot,
9191 &diff.base_text_string(cx).unwrap(),
9192 &[
9193 (
9194 0..0,
9195 "zero\n",
9196 "",
9197 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9198 ),
9199 (
9200 1..2,
9201 "two\n",
9202 "TWO\n",
9203 DiffHunkStatus::modified(HasSecondaryHunk),
9204 ),
9205 (
9206 3..4,
9207 "four\n",
9208 "FOUR\n",
9209 DiffHunkStatus::modified(HasSecondaryHunk),
9210 ),
9211 ],
9212 );
9213 });
9214
9215 // Stage the second hunk *before* receiving the FS event for the first hunk.
9216 cx.run_until_parked();
9217 uncommitted_diff.update(cx, |diff, cx| {
9218 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9219 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9220 assert_hunks(
9221 diff.snapshot(cx).hunks(&snapshot),
9222 &snapshot,
9223 &diff.base_text_string(cx).unwrap(),
9224 &[
9225 (
9226 0..0,
9227 "zero\n",
9228 "",
9229 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9230 ),
9231 (
9232 1..2,
9233 "two\n",
9234 "TWO\n",
9235 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9236 ),
9237 (
9238 3..4,
9239 "four\n",
9240 "FOUR\n",
9241 DiffHunkStatus::modified(HasSecondaryHunk),
9242 ),
9243 ],
9244 );
9245 });
9246
9247 // Process the FS event for staging the first hunk (second event is still pending).
9248 fs.flush_events(1);
9249 cx.run_until_parked();
9250
9251 // Stage the third hunk before receiving the second FS event.
9252 uncommitted_diff.update(cx, |diff, cx| {
9253 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9254 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9255 });
9256
9257 // Wait for all remaining IO.
9258 cx.run_until_parked();
9259 fs.flush_events(fs.buffered_event_count());
9260
9261 // Now all hunks are staged.
9262 cx.run_until_parked();
9263 uncommitted_diff.update(cx, |diff, cx| {
9264 assert_hunks(
9265 diff.snapshot(cx).hunks(&snapshot),
9266 &snapshot,
9267 &diff.base_text_string(cx).unwrap(),
9268 &[
9269 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9270 (
9271 1..2,
9272 "two\n",
9273 "TWO\n",
9274 DiffHunkStatus::modified(NoSecondaryHunk),
9275 ),
9276 (
9277 3..4,
9278 "four\n",
9279 "FOUR\n",
9280 DiffHunkStatus::modified(NoSecondaryHunk),
9281 ),
9282 ],
9283 );
9284 });
9285}
9286
9287#[gpui::test(iterations = 25)]
9288async fn test_staging_random_hunks(
9289 mut rng: StdRng,
9290 _executor: BackgroundExecutor,
9291 cx: &mut gpui::TestAppContext,
9292) {
9293 let operations = env::var("OPERATIONS")
9294 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9295 .unwrap_or(20);
9296
9297 use DiffHunkSecondaryStatus::*;
9298 init_test(cx);
9299
9300 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9301 let index_text = committed_text.clone();
9302 let buffer_text = (0..30)
9303 .map(|i| match i % 5 {
9304 0 => format!("line {i} (modified)\n"),
9305 _ => format!("line {i}\n"),
9306 })
9307 .collect::<String>();
9308
9309 let fs = FakeFs::new(cx.background_executor.clone());
9310 fs.insert_tree(
9311 path!("/dir"),
9312 json!({
9313 ".git": {},
9314 "file.txt": buffer_text.clone()
9315 }),
9316 )
9317 .await;
9318 fs.set_head_for_repo(
9319 path!("/dir/.git").as_ref(),
9320 &[("file.txt", committed_text.clone())],
9321 "deadbeef",
9322 );
9323 fs.set_index_for_repo(
9324 path!("/dir/.git").as_ref(),
9325 &[("file.txt", index_text.clone())],
9326 );
9327 let repo = fs
9328 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9329 .unwrap();
9330
9331 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9332 let buffer = project
9333 .update(cx, |project, cx| {
9334 project.open_local_buffer(path!("/dir/file.txt"), cx)
9335 })
9336 .await
9337 .unwrap();
9338 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9339 let uncommitted_diff = project
9340 .update(cx, |project, cx| {
9341 project.open_uncommitted_diff(buffer.clone(), cx)
9342 })
9343 .await
9344 .unwrap();
9345
9346 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9347 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9348 });
9349 assert_eq!(hunks.len(), 6);
9350
9351 for _i in 0..operations {
9352 let hunk_ix = rng.random_range(0..hunks.len());
9353 let hunk = &mut hunks[hunk_ix];
9354 let row = hunk.range.start.row;
9355
9356 if hunk.status().has_secondary_hunk() {
9357 log::info!("staging hunk at {row}");
9358 uncommitted_diff.update(cx, |diff, cx| {
9359 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9360 });
9361 hunk.secondary_status = SecondaryHunkRemovalPending;
9362 } else {
9363 log::info!("unstaging hunk at {row}");
9364 uncommitted_diff.update(cx, |diff, cx| {
9365 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9366 });
9367 hunk.secondary_status = SecondaryHunkAdditionPending;
9368 }
9369
9370 for _ in 0..rng.random_range(0..10) {
9371 log::info!("yielding");
9372 cx.executor().simulate_random_delay().await;
9373 }
9374 }
9375
9376 cx.executor().run_until_parked();
9377
9378 for hunk in &mut hunks {
9379 if hunk.secondary_status == SecondaryHunkRemovalPending {
9380 hunk.secondary_status = NoSecondaryHunk;
9381 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9382 hunk.secondary_status = HasSecondaryHunk;
9383 }
9384 }
9385
9386 log::info!(
9387 "index text:\n{}",
9388 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9389 .await
9390 .unwrap()
9391 );
9392
9393 uncommitted_diff.update(cx, |diff, cx| {
9394 let expected_hunks = hunks
9395 .iter()
9396 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9397 .collect::<Vec<_>>();
9398 let actual_hunks = diff
9399 .snapshot(cx)
9400 .hunks(&snapshot)
9401 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9402 .collect::<Vec<_>>();
9403 assert_eq!(actual_hunks, expected_hunks);
9404 });
9405}
9406
9407#[gpui::test]
9408async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9409 init_test(cx);
9410
9411 let committed_contents = r#"
9412 fn main() {
9413 println!("hello from HEAD");
9414 }
9415 "#
9416 .unindent();
9417 let file_contents = r#"
9418 fn main() {
9419 println!("hello from the working copy");
9420 }
9421 "#
9422 .unindent();
9423
9424 let fs = FakeFs::new(cx.background_executor.clone());
9425 fs.insert_tree(
9426 "/dir",
9427 json!({
9428 ".git": {},
9429 "src": {
9430 "main.rs": file_contents,
9431 }
9432 }),
9433 )
9434 .await;
9435
9436 fs.set_head_for_repo(
9437 Path::new("/dir/.git"),
9438 &[("src/main.rs", committed_contents.clone())],
9439 "deadbeef",
9440 );
9441 fs.set_index_for_repo(
9442 Path::new("/dir/.git"),
9443 &[("src/main.rs", committed_contents.clone())],
9444 );
9445
9446 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9447
9448 let buffer = project
9449 .update(cx, |project, cx| {
9450 project.open_local_buffer("/dir/src/main.rs", cx)
9451 })
9452 .await
9453 .unwrap();
9454 let uncommitted_diff = project
9455 .update(cx, |project, cx| {
9456 project.open_uncommitted_diff(buffer.clone(), cx)
9457 })
9458 .await
9459 .unwrap();
9460
9461 cx.run_until_parked();
9462 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9463 let snapshot = buffer.read(cx).snapshot();
9464 assert_hunks(
9465 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9466 &snapshot,
9467 &uncommitted_diff.base_text_string(cx).unwrap(),
9468 &[(
9469 1..2,
9470 " println!(\"hello from HEAD\");\n",
9471 " println!(\"hello from the working copy\");\n",
9472 DiffHunkStatus {
9473 kind: DiffHunkStatusKind::Modified,
9474 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9475 },
9476 )],
9477 );
9478 });
9479}
9480
9481// TODO: Should we test this on Windows also?
9482#[gpui::test]
9483#[cfg(not(windows))]
9484async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9485 use std::os::unix::fs::PermissionsExt;
9486 init_test(cx);
9487 cx.executor().allow_parking();
9488 let committed_contents = "bar\n";
9489 let file_contents = "baz\n";
9490 let root = TempTree::new(json!({
9491 "project": {
9492 "foo": committed_contents
9493 },
9494 }));
9495
9496 let work_dir = root.path().join("project");
9497 let file_path = work_dir.join("foo");
9498 let repo = git_init(work_dir.as_path());
9499 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9500 perms.set_mode(0o755);
9501 std::fs::set_permissions(&file_path, perms).unwrap();
9502 git_add("foo", &repo);
9503 git_commit("Initial commit", &repo);
9504 std::fs::write(&file_path, file_contents).unwrap();
9505
9506 let project = Project::test(
9507 Arc::new(RealFs::new(None, cx.executor())),
9508 [root.path()],
9509 cx,
9510 )
9511 .await;
9512
9513 let buffer = project
9514 .update(cx, |project, cx| {
9515 project.open_local_buffer(file_path.as_path(), cx)
9516 })
9517 .await
9518 .unwrap();
9519
9520 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9521
9522 let uncommitted_diff = project
9523 .update(cx, |project, cx| {
9524 project.open_uncommitted_diff(buffer.clone(), cx)
9525 })
9526 .await
9527 .unwrap();
9528
9529 uncommitted_diff.update(cx, |diff, cx| {
9530 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9531 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9532 });
9533
9534 cx.run_until_parked();
9535
9536 let output = smol::process::Command::new("git")
9537 .current_dir(&work_dir)
9538 .args(["diff", "--staged"])
9539 .output()
9540 .await
9541 .unwrap();
9542
9543 let staged_diff = String::from_utf8_lossy(&output.stdout);
9544
9545 assert!(
9546 !staged_diff.contains("new mode 100644"),
9547 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9548 staged_diff
9549 );
9550
9551 let output = smol::process::Command::new("git")
9552 .current_dir(&work_dir)
9553 .args(["ls-files", "-s"])
9554 .output()
9555 .await
9556 .unwrap();
9557 let index_contents = String::from_utf8_lossy(&output.stdout);
9558
9559 assert!(
9560 index_contents.contains("100755"),
9561 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9562 index_contents
9563 );
9564}
9565
9566#[gpui::test]
9567async fn test_repository_and_path_for_project_path(
9568 background_executor: BackgroundExecutor,
9569 cx: &mut gpui::TestAppContext,
9570) {
9571 init_test(cx);
9572 let fs = FakeFs::new(background_executor);
9573 fs.insert_tree(
9574 path!("/root"),
9575 json!({
9576 "c.txt": "",
9577 "dir1": {
9578 ".git": {},
9579 "deps": {
9580 "dep1": {
9581 ".git": {},
9582 "src": {
9583 "a.txt": ""
9584 }
9585 }
9586 },
9587 "src": {
9588 "b.txt": ""
9589 }
9590 },
9591 }),
9592 )
9593 .await;
9594
9595 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9596 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9597 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9598 project
9599 .update(cx, |project, cx| project.git_scans_complete(cx))
9600 .await;
9601 cx.run_until_parked();
9602
9603 project.read_with(cx, |project, cx| {
9604 let git_store = project.git_store().read(cx);
9605 let pairs = [
9606 ("c.txt", None),
9607 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9608 (
9609 "dir1/deps/dep1/src/a.txt",
9610 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9611 ),
9612 ];
9613 let expected = pairs
9614 .iter()
9615 .map(|(path, result)| {
9616 (
9617 path,
9618 result.map(|(repo, repo_path)| {
9619 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9620 }),
9621 )
9622 })
9623 .collect::<Vec<_>>();
9624 let actual = pairs
9625 .iter()
9626 .map(|(path, _)| {
9627 let project_path = (tree_id, rel_path(path)).into();
9628 let result = maybe!({
9629 let (repo, repo_path) =
9630 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9631 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9632 });
9633 (path, result)
9634 })
9635 .collect::<Vec<_>>();
9636 pretty_assertions::assert_eq!(expected, actual);
9637 });
9638
9639 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9640 .await
9641 .unwrap();
9642 cx.run_until_parked();
9643
9644 project.read_with(cx, |project, cx| {
9645 let git_store = project.git_store().read(cx);
9646 assert_eq!(
9647 git_store.repository_and_path_for_project_path(
9648 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9649 cx
9650 ),
9651 None
9652 );
9653 });
9654}
9655
9656#[gpui::test]
9657async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9658 init_test(cx);
9659 let fs = FakeFs::new(cx.background_executor.clone());
9660 let home = paths::home_dir();
9661 fs.insert_tree(
9662 home,
9663 json!({
9664 ".git": {},
9665 "project": {
9666 "a.txt": "A"
9667 },
9668 }),
9669 )
9670 .await;
9671
9672 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9673 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9674 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9675
9676 project
9677 .update(cx, |project, cx| project.git_scans_complete(cx))
9678 .await;
9679 tree.flush_fs_events(cx).await;
9680
9681 project.read_with(cx, |project, cx| {
9682 let containing = project
9683 .git_store()
9684 .read(cx)
9685 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9686 assert!(containing.is_none());
9687 });
9688
9689 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9690 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9691 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9692 project
9693 .update(cx, |project, cx| project.git_scans_complete(cx))
9694 .await;
9695 tree.flush_fs_events(cx).await;
9696
9697 project.read_with(cx, |project, cx| {
9698 let containing = project
9699 .git_store()
9700 .read(cx)
9701 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9702 assert_eq!(
9703 containing
9704 .unwrap()
9705 .0
9706 .read(cx)
9707 .work_directory_abs_path
9708 .as_ref(),
9709 home,
9710 );
9711 });
9712}
9713
9714#[gpui::test]
9715async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9716 init_test(cx);
9717 cx.executor().allow_parking();
9718
9719 let root = TempTree::new(json!({
9720 "project": {
9721 "a.txt": "a", // Modified
9722 "b.txt": "bb", // Added
9723 "c.txt": "ccc", // Unchanged
9724 "d.txt": "dddd", // Deleted
9725 },
9726 }));
9727
9728 // Set up git repository before creating the project.
9729 let work_dir = root.path().join("project");
9730 let repo = git_init(work_dir.as_path());
9731 git_add("a.txt", &repo);
9732 git_add("c.txt", &repo);
9733 git_add("d.txt", &repo);
9734 git_commit("Initial commit", &repo);
9735 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9736 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9737
9738 let project = Project::test(
9739 Arc::new(RealFs::new(None, cx.executor())),
9740 [root.path()],
9741 cx,
9742 )
9743 .await;
9744
9745 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9746 tree.flush_fs_events(cx).await;
9747 project
9748 .update(cx, |project, cx| project.git_scans_complete(cx))
9749 .await;
9750 cx.executor().run_until_parked();
9751
9752 let repository = project.read_with(cx, |project, cx| {
9753 project.repositories(cx).values().next().unwrap().clone()
9754 });
9755
9756 // Check that the right git state is observed on startup
9757 repository.read_with(cx, |repository, _| {
9758 let entries = repository.cached_status().collect::<Vec<_>>();
9759 assert_eq!(
9760 entries,
9761 [
9762 StatusEntry {
9763 repo_path: repo_path("a.txt"),
9764 status: StatusCode::Modified.worktree(),
9765 diff_stat: Some(DiffStat {
9766 added: 1,
9767 deleted: 1,
9768 }),
9769 },
9770 StatusEntry {
9771 repo_path: repo_path("b.txt"),
9772 status: FileStatus::Untracked,
9773 diff_stat: None,
9774 },
9775 StatusEntry {
9776 repo_path: repo_path("d.txt"),
9777 status: StatusCode::Deleted.worktree(),
9778 diff_stat: Some(DiffStat {
9779 added: 0,
9780 deleted: 1,
9781 }),
9782 },
9783 ]
9784 );
9785 });
9786
9787 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9788
9789 tree.flush_fs_events(cx).await;
9790 project
9791 .update(cx, |project, cx| project.git_scans_complete(cx))
9792 .await;
9793 cx.executor().run_until_parked();
9794
9795 repository.read_with(cx, |repository, _| {
9796 let entries = repository.cached_status().collect::<Vec<_>>();
9797 assert_eq!(
9798 entries,
9799 [
9800 StatusEntry {
9801 repo_path: repo_path("a.txt"),
9802 status: StatusCode::Modified.worktree(),
9803 diff_stat: Some(DiffStat {
9804 added: 1,
9805 deleted: 1,
9806 }),
9807 },
9808 StatusEntry {
9809 repo_path: repo_path("b.txt"),
9810 status: FileStatus::Untracked,
9811 diff_stat: None,
9812 },
9813 StatusEntry {
9814 repo_path: repo_path("c.txt"),
9815 status: StatusCode::Modified.worktree(),
9816 diff_stat: Some(DiffStat {
9817 added: 1,
9818 deleted: 1,
9819 }),
9820 },
9821 StatusEntry {
9822 repo_path: repo_path("d.txt"),
9823 status: StatusCode::Deleted.worktree(),
9824 diff_stat: Some(DiffStat {
9825 added: 0,
9826 deleted: 1,
9827 }),
9828 },
9829 ]
9830 );
9831 });
9832
9833 git_add("a.txt", &repo);
9834 git_add("c.txt", &repo);
9835 git_remove_index(Path::new("d.txt"), &repo);
9836 git_commit("Another commit", &repo);
9837 tree.flush_fs_events(cx).await;
9838 project
9839 .update(cx, |project, cx| project.git_scans_complete(cx))
9840 .await;
9841 cx.executor().run_until_parked();
9842
9843 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9844 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9845 tree.flush_fs_events(cx).await;
9846 project
9847 .update(cx, |project, cx| project.git_scans_complete(cx))
9848 .await;
9849 cx.executor().run_until_parked();
9850
9851 repository.read_with(cx, |repository, _cx| {
9852 let entries = repository.cached_status().collect::<Vec<_>>();
9853
9854 // Deleting an untracked entry, b.txt, should leave no status
9855 // a.txt was tracked, and so should have a status
9856 assert_eq!(
9857 entries,
9858 [StatusEntry {
9859 repo_path: repo_path("a.txt"),
9860 status: StatusCode::Deleted.worktree(),
9861 diff_stat: Some(DiffStat {
9862 added: 0,
9863 deleted: 1,
9864 }),
9865 }]
9866 );
9867 });
9868}
9869
9870#[gpui::test]
9871#[ignore]
9872async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9873 init_test(cx);
9874 cx.executor().allow_parking();
9875
9876 let root = TempTree::new(json!({
9877 "project": {
9878 "sub": {},
9879 "a.txt": "",
9880 },
9881 }));
9882
9883 let work_dir = root.path().join("project");
9884 let repo = git_init(work_dir.as_path());
9885 // a.txt exists in HEAD and the working copy but is deleted in the index.
9886 git_add("a.txt", &repo);
9887 git_commit("Initial commit", &repo);
9888 git_remove_index("a.txt".as_ref(), &repo);
9889 // `sub` is a nested git repository.
9890 let _sub = git_init(&work_dir.join("sub"));
9891
9892 let project = Project::test(
9893 Arc::new(RealFs::new(None, cx.executor())),
9894 [root.path()],
9895 cx,
9896 )
9897 .await;
9898
9899 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9900 tree.flush_fs_events(cx).await;
9901 project
9902 .update(cx, |project, cx| project.git_scans_complete(cx))
9903 .await;
9904 cx.executor().run_until_parked();
9905
9906 let repository = project.read_with(cx, |project, cx| {
9907 project
9908 .repositories(cx)
9909 .values()
9910 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9911 .unwrap()
9912 .clone()
9913 });
9914
9915 repository.read_with(cx, |repository, _cx| {
9916 let entries = repository.cached_status().collect::<Vec<_>>();
9917
9918 // `sub` doesn't appear in our computed statuses.
9919 // a.txt appears with a combined `DA` status.
9920 assert_eq!(
9921 entries,
9922 [StatusEntry {
9923 repo_path: repo_path("a.txt"),
9924 status: TrackedStatus {
9925 index_status: StatusCode::Deleted,
9926 worktree_status: StatusCode::Added
9927 }
9928 .into(),
9929 diff_stat: None,
9930 }]
9931 )
9932 });
9933}
9934
9935#[track_caller]
9936/// We merge lhs into rhs.
9937fn merge_pending_ops_snapshots(
9938 source: Vec<pending_op::PendingOps>,
9939 mut target: Vec<pending_op::PendingOps>,
9940) -> Vec<pending_op::PendingOps> {
9941 for s_ops in source {
9942 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9943 if ops.repo_path == s_ops.repo_path {
9944 Some(idx)
9945 } else {
9946 None
9947 }
9948 }) {
9949 let t_ops = &mut target[idx];
9950 for s_op in s_ops.ops {
9951 if let Some(op_idx) = t_ops
9952 .ops
9953 .iter()
9954 .zip(0..)
9955 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9956 {
9957 let t_op = &mut t_ops.ops[op_idx];
9958 match (s_op.job_status, t_op.job_status) {
9959 (pending_op::JobStatus::Running, _) => {}
9960 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9961 (s_st, t_st) if s_st == t_st => {}
9962 _ => unreachable!(),
9963 }
9964 } else {
9965 t_ops.ops.push(s_op);
9966 }
9967 }
9968 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9969 } else {
9970 target.push(s_ops);
9971 }
9972 }
9973 target
9974}
9975
9976#[gpui::test]
9977async fn test_repository_pending_ops_staging(
9978 executor: gpui::BackgroundExecutor,
9979 cx: &mut gpui::TestAppContext,
9980) {
9981 init_test(cx);
9982
9983 let fs = FakeFs::new(executor);
9984 fs.insert_tree(
9985 path!("/root"),
9986 json!({
9987 "my-repo": {
9988 ".git": {},
9989 "a.txt": "a",
9990 }
9991
9992 }),
9993 )
9994 .await;
9995
9996 fs.set_status_for_repo(
9997 path!("/root/my-repo/.git").as_ref(),
9998 &[("a.txt", FileStatus::Untracked)],
9999 );
10000
10001 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10002 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10003 project.update(cx, |project, cx| {
10004 let pending_ops_all = pending_ops_all.clone();
10005 cx.subscribe(project.git_store(), move |_, _, e, _| {
10006 if let GitStoreEvent::RepositoryUpdated(
10007 _,
10008 RepositoryEvent::PendingOpsChanged { pending_ops },
10009 _,
10010 ) = e
10011 {
10012 let merged = merge_pending_ops_snapshots(
10013 pending_ops.items(()),
10014 pending_ops_all.lock().items(()),
10015 );
10016 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10017 }
10018 })
10019 .detach();
10020 });
10021 project
10022 .update(cx, |project, cx| project.git_scans_complete(cx))
10023 .await;
10024
10025 let repo = project.read_with(cx, |project, cx| {
10026 project.repositories(cx).values().next().unwrap().clone()
10027 });
10028
10029 // Ensure we have no pending ops for any of the untracked files
10030 repo.read_with(cx, |repo, _cx| {
10031 assert!(repo.pending_ops().next().is_none());
10032 });
10033
10034 let mut id = 1u16;
10035
10036 let mut assert_stage = async |path: RepoPath, stage| {
10037 let git_status = if stage {
10038 pending_op::GitStatus::Staged
10039 } else {
10040 pending_op::GitStatus::Unstaged
10041 };
10042 repo.update(cx, |repo, cx| {
10043 let task = if stage {
10044 repo.stage_entries(vec![path.clone()], cx)
10045 } else {
10046 repo.unstage_entries(vec![path.clone()], cx)
10047 };
10048 let ops = repo.pending_ops_for_path(&path).unwrap();
10049 assert_eq!(
10050 ops.ops.last(),
10051 Some(&pending_op::PendingOp {
10052 id: id.into(),
10053 git_status,
10054 job_status: pending_op::JobStatus::Running
10055 })
10056 );
10057 task
10058 })
10059 .await
10060 .unwrap();
10061
10062 repo.read_with(cx, |repo, _cx| {
10063 let ops = repo.pending_ops_for_path(&path).unwrap();
10064 assert_eq!(
10065 ops.ops.last(),
10066 Some(&pending_op::PendingOp {
10067 id: id.into(),
10068 git_status,
10069 job_status: pending_op::JobStatus::Finished
10070 })
10071 );
10072 });
10073
10074 id += 1;
10075 };
10076
10077 assert_stage(repo_path("a.txt"), true).await;
10078 assert_stage(repo_path("a.txt"), false).await;
10079 assert_stage(repo_path("a.txt"), true).await;
10080 assert_stage(repo_path("a.txt"), false).await;
10081 assert_stage(repo_path("a.txt"), true).await;
10082
10083 cx.run_until_parked();
10084
10085 assert_eq!(
10086 pending_ops_all
10087 .lock()
10088 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10089 .unwrap()
10090 .ops,
10091 vec![
10092 pending_op::PendingOp {
10093 id: 1u16.into(),
10094 git_status: pending_op::GitStatus::Staged,
10095 job_status: pending_op::JobStatus::Finished
10096 },
10097 pending_op::PendingOp {
10098 id: 2u16.into(),
10099 git_status: pending_op::GitStatus::Unstaged,
10100 job_status: pending_op::JobStatus::Finished
10101 },
10102 pending_op::PendingOp {
10103 id: 3u16.into(),
10104 git_status: pending_op::GitStatus::Staged,
10105 job_status: pending_op::JobStatus::Finished
10106 },
10107 pending_op::PendingOp {
10108 id: 4u16.into(),
10109 git_status: pending_op::GitStatus::Unstaged,
10110 job_status: pending_op::JobStatus::Finished
10111 },
10112 pending_op::PendingOp {
10113 id: 5u16.into(),
10114 git_status: pending_op::GitStatus::Staged,
10115 job_status: pending_op::JobStatus::Finished
10116 }
10117 ],
10118 );
10119
10120 repo.update(cx, |repo, _cx| {
10121 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10122
10123 assert_eq!(
10124 git_statuses,
10125 [StatusEntry {
10126 repo_path: repo_path("a.txt"),
10127 status: TrackedStatus {
10128 index_status: StatusCode::Added,
10129 worktree_status: StatusCode::Unmodified
10130 }
10131 .into(),
10132 diff_stat: Some(DiffStat {
10133 added: 1,
10134 deleted: 0,
10135 }),
10136 }]
10137 );
10138 });
10139}
10140
10141#[gpui::test]
10142async fn test_repository_pending_ops_long_running_staging(
10143 executor: gpui::BackgroundExecutor,
10144 cx: &mut gpui::TestAppContext,
10145) {
10146 init_test(cx);
10147
10148 let fs = FakeFs::new(executor);
10149 fs.insert_tree(
10150 path!("/root"),
10151 json!({
10152 "my-repo": {
10153 ".git": {},
10154 "a.txt": "a",
10155 }
10156
10157 }),
10158 )
10159 .await;
10160
10161 fs.set_status_for_repo(
10162 path!("/root/my-repo/.git").as_ref(),
10163 &[("a.txt", FileStatus::Untracked)],
10164 );
10165
10166 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10167 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10168 project.update(cx, |project, cx| {
10169 let pending_ops_all = pending_ops_all.clone();
10170 cx.subscribe(project.git_store(), move |_, _, e, _| {
10171 if let GitStoreEvent::RepositoryUpdated(
10172 _,
10173 RepositoryEvent::PendingOpsChanged { pending_ops },
10174 _,
10175 ) = e
10176 {
10177 let merged = merge_pending_ops_snapshots(
10178 pending_ops.items(()),
10179 pending_ops_all.lock().items(()),
10180 );
10181 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10182 }
10183 })
10184 .detach();
10185 });
10186
10187 project
10188 .update(cx, |project, cx| project.git_scans_complete(cx))
10189 .await;
10190
10191 let repo = project.read_with(cx, |project, cx| {
10192 project.repositories(cx).values().next().unwrap().clone()
10193 });
10194
10195 repo.update(cx, |repo, cx| {
10196 repo.stage_entries(vec![repo_path("a.txt")], cx)
10197 })
10198 .detach();
10199
10200 repo.update(cx, |repo, cx| {
10201 repo.stage_entries(vec![repo_path("a.txt")], cx)
10202 })
10203 .unwrap()
10204 .with_timeout(Duration::from_secs(1), &cx.executor())
10205 .await
10206 .unwrap();
10207
10208 cx.run_until_parked();
10209
10210 assert_eq!(
10211 pending_ops_all
10212 .lock()
10213 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10214 .unwrap()
10215 .ops,
10216 vec![
10217 pending_op::PendingOp {
10218 id: 1u16.into(),
10219 git_status: pending_op::GitStatus::Staged,
10220 job_status: pending_op::JobStatus::Skipped
10221 },
10222 pending_op::PendingOp {
10223 id: 2u16.into(),
10224 git_status: pending_op::GitStatus::Staged,
10225 job_status: pending_op::JobStatus::Finished
10226 }
10227 ],
10228 );
10229
10230 repo.update(cx, |repo, _cx| {
10231 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10232
10233 assert_eq!(
10234 git_statuses,
10235 [StatusEntry {
10236 repo_path: repo_path("a.txt"),
10237 status: TrackedStatus {
10238 index_status: StatusCode::Added,
10239 worktree_status: StatusCode::Unmodified
10240 }
10241 .into(),
10242 diff_stat: Some(DiffStat {
10243 added: 1,
10244 deleted: 0,
10245 }),
10246 }]
10247 );
10248 });
10249}
10250
10251#[gpui::test]
10252async fn test_repository_pending_ops_stage_all(
10253 executor: gpui::BackgroundExecutor,
10254 cx: &mut gpui::TestAppContext,
10255) {
10256 init_test(cx);
10257
10258 let fs = FakeFs::new(executor);
10259 fs.insert_tree(
10260 path!("/root"),
10261 json!({
10262 "my-repo": {
10263 ".git": {},
10264 "a.txt": "a",
10265 "b.txt": "b"
10266 }
10267
10268 }),
10269 )
10270 .await;
10271
10272 fs.set_status_for_repo(
10273 path!("/root/my-repo/.git").as_ref(),
10274 &[
10275 ("a.txt", FileStatus::Untracked),
10276 ("b.txt", FileStatus::Untracked),
10277 ],
10278 );
10279
10280 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10281 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10282 project.update(cx, |project, cx| {
10283 let pending_ops_all = pending_ops_all.clone();
10284 cx.subscribe(project.git_store(), move |_, _, e, _| {
10285 if let GitStoreEvent::RepositoryUpdated(
10286 _,
10287 RepositoryEvent::PendingOpsChanged { pending_ops },
10288 _,
10289 ) = e
10290 {
10291 let merged = merge_pending_ops_snapshots(
10292 pending_ops.items(()),
10293 pending_ops_all.lock().items(()),
10294 );
10295 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10296 }
10297 })
10298 .detach();
10299 });
10300 project
10301 .update(cx, |project, cx| project.git_scans_complete(cx))
10302 .await;
10303
10304 let repo = project.read_with(cx, |project, cx| {
10305 project.repositories(cx).values().next().unwrap().clone()
10306 });
10307
10308 repo.update(cx, |repo, cx| {
10309 repo.stage_entries(vec![repo_path("a.txt")], cx)
10310 })
10311 .await
10312 .unwrap();
10313 repo.update(cx, |repo, cx| repo.stage_all(cx))
10314 .await
10315 .unwrap();
10316 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10317 .await
10318 .unwrap();
10319
10320 cx.run_until_parked();
10321
10322 assert_eq!(
10323 pending_ops_all
10324 .lock()
10325 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10326 .unwrap()
10327 .ops,
10328 vec![
10329 pending_op::PendingOp {
10330 id: 1u16.into(),
10331 git_status: pending_op::GitStatus::Staged,
10332 job_status: pending_op::JobStatus::Finished
10333 },
10334 pending_op::PendingOp {
10335 id: 2u16.into(),
10336 git_status: pending_op::GitStatus::Unstaged,
10337 job_status: pending_op::JobStatus::Finished
10338 },
10339 ],
10340 );
10341 assert_eq!(
10342 pending_ops_all
10343 .lock()
10344 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10345 .unwrap()
10346 .ops,
10347 vec![
10348 pending_op::PendingOp {
10349 id: 1u16.into(),
10350 git_status: pending_op::GitStatus::Staged,
10351 job_status: pending_op::JobStatus::Finished
10352 },
10353 pending_op::PendingOp {
10354 id: 2u16.into(),
10355 git_status: pending_op::GitStatus::Unstaged,
10356 job_status: pending_op::JobStatus::Finished
10357 },
10358 ],
10359 );
10360
10361 repo.update(cx, |repo, _cx| {
10362 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10363
10364 assert_eq!(
10365 git_statuses,
10366 [
10367 StatusEntry {
10368 repo_path: repo_path("a.txt"),
10369 status: FileStatus::Untracked,
10370 diff_stat: None,
10371 },
10372 StatusEntry {
10373 repo_path: repo_path("b.txt"),
10374 status: FileStatus::Untracked,
10375 diff_stat: None,
10376 },
10377 ]
10378 );
10379 });
10380}
10381
10382#[gpui::test]
10383async fn test_repository_subfolder_git_status(
10384 executor: gpui::BackgroundExecutor,
10385 cx: &mut gpui::TestAppContext,
10386) {
10387 init_test(cx);
10388
10389 let fs = FakeFs::new(executor);
10390 fs.insert_tree(
10391 path!("/root"),
10392 json!({
10393 "my-repo": {
10394 ".git": {},
10395 "a.txt": "a",
10396 "sub-folder-1": {
10397 "sub-folder-2": {
10398 "c.txt": "cc",
10399 "d": {
10400 "e.txt": "eee"
10401 }
10402 },
10403 }
10404 },
10405 }),
10406 )
10407 .await;
10408
10409 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10410 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10411
10412 fs.set_status_for_repo(
10413 path!("/root/my-repo/.git").as_ref(),
10414 &[(E_TXT, FileStatus::Untracked)],
10415 );
10416
10417 let project = Project::test(
10418 fs.clone(),
10419 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10420 cx,
10421 )
10422 .await;
10423
10424 project
10425 .update(cx, |project, cx| project.git_scans_complete(cx))
10426 .await;
10427 cx.run_until_parked();
10428
10429 let repository = project.read_with(cx, |project, cx| {
10430 project.repositories(cx).values().next().unwrap().clone()
10431 });
10432
10433 // Ensure that the git status is loaded correctly
10434 repository.read_with(cx, |repository, _cx| {
10435 assert_eq!(
10436 repository.work_directory_abs_path,
10437 Path::new(path!("/root/my-repo")).into()
10438 );
10439
10440 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10441 assert_eq!(
10442 repository
10443 .status_for_path(&repo_path(E_TXT))
10444 .unwrap()
10445 .status,
10446 FileStatus::Untracked
10447 );
10448 });
10449
10450 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10451 project
10452 .update(cx, |project, cx| project.git_scans_complete(cx))
10453 .await;
10454 cx.run_until_parked();
10455
10456 repository.read_with(cx, |repository, _cx| {
10457 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10458 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10459 });
10460}
10461
10462// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10463#[cfg(any())]
10464#[gpui::test]
10465async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10466 init_test(cx);
10467 cx.executor().allow_parking();
10468
10469 let root = TempTree::new(json!({
10470 "project": {
10471 "a.txt": "a",
10472 },
10473 }));
10474 let root_path = root.path();
10475
10476 let repo = git_init(&root_path.join("project"));
10477 git_add("a.txt", &repo);
10478 git_commit("init", &repo);
10479
10480 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10481
10482 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10483 tree.flush_fs_events(cx).await;
10484 project
10485 .update(cx, |project, cx| project.git_scans_complete(cx))
10486 .await;
10487 cx.executor().run_until_parked();
10488
10489 let repository = project.read_with(cx, |project, cx| {
10490 project.repositories(cx).values().next().unwrap().clone()
10491 });
10492
10493 git_branch("other-branch", &repo);
10494 git_checkout("refs/heads/other-branch", &repo);
10495 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10496 git_add("a.txt", &repo);
10497 git_commit("capitalize", &repo);
10498 let commit = repo
10499 .head()
10500 .expect("Failed to get HEAD")
10501 .peel_to_commit()
10502 .expect("HEAD is not a commit");
10503 git_checkout("refs/heads/main", &repo);
10504 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10505 git_add("a.txt", &repo);
10506 git_commit("improve letter", &repo);
10507 git_cherry_pick(&commit, &repo);
10508 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10509 .expect("No CHERRY_PICK_HEAD");
10510 pretty_assertions::assert_eq!(
10511 git_status(&repo),
10512 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10513 );
10514 tree.flush_fs_events(cx).await;
10515 project
10516 .update(cx, |project, cx| project.git_scans_complete(cx))
10517 .await;
10518 cx.executor().run_until_parked();
10519 let conflicts = repository.update(cx, |repository, _| {
10520 repository
10521 .merge_conflicts
10522 .iter()
10523 .cloned()
10524 .collect::<Vec<_>>()
10525 });
10526 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10527
10528 git_add("a.txt", &repo);
10529 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10530 git_commit("whatevs", &repo);
10531 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10532 .expect("Failed to remove CHERRY_PICK_HEAD");
10533 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10534 tree.flush_fs_events(cx).await;
10535 let conflicts = repository.update(cx, |repository, _| {
10536 repository
10537 .merge_conflicts
10538 .iter()
10539 .cloned()
10540 .collect::<Vec<_>>()
10541 });
10542 pretty_assertions::assert_eq!(conflicts, []);
10543}
10544
10545#[gpui::test]
10546async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10547 init_test(cx);
10548 let fs = FakeFs::new(cx.background_executor.clone());
10549 fs.insert_tree(
10550 path!("/root"),
10551 json!({
10552 ".git": {},
10553 ".gitignore": "*.txt\n",
10554 "a.xml": "<a></a>",
10555 "b.txt": "Some text"
10556 }),
10557 )
10558 .await;
10559
10560 fs.set_head_and_index_for_repo(
10561 path!("/root/.git").as_ref(),
10562 &[
10563 (".gitignore", "*.txt\n".into()),
10564 ("a.xml", "<a></a>".into()),
10565 ],
10566 );
10567
10568 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10569
10570 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10571 tree.flush_fs_events(cx).await;
10572 project
10573 .update(cx, |project, cx| project.git_scans_complete(cx))
10574 .await;
10575 cx.executor().run_until_parked();
10576
10577 let repository = project.read_with(cx, |project, cx| {
10578 project.repositories(cx).values().next().unwrap().clone()
10579 });
10580
10581 // One file is unmodified, the other is ignored.
10582 cx.read(|cx| {
10583 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10584 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10585 });
10586
10587 // Change the gitignore, and stage the newly non-ignored file.
10588 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10589 .await
10590 .unwrap();
10591 fs.set_index_for_repo(
10592 Path::new(path!("/root/.git")),
10593 &[
10594 (".gitignore", "*.txt\n".into()),
10595 ("a.xml", "<a></a>".into()),
10596 ("b.txt", "Some text".into()),
10597 ],
10598 );
10599
10600 cx.executor().run_until_parked();
10601 cx.read(|cx| {
10602 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10603 assert_entry_git_state(
10604 tree.read(cx),
10605 repository.read(cx),
10606 "b.txt",
10607 Some(StatusCode::Added),
10608 false,
10609 );
10610 });
10611}
10612
10613// NOTE:
10614// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10615// a directory which some program has already open.
10616// This is a limitation of the Windows.
10617// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10618// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10619#[gpui::test]
10620#[cfg_attr(target_os = "windows", ignore)]
10621async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10622 init_test(cx);
10623 cx.executor().allow_parking();
10624 let root = TempTree::new(json!({
10625 "projects": {
10626 "project1": {
10627 "a": "",
10628 "b": "",
10629 }
10630 },
10631
10632 }));
10633 let root_path = root.path();
10634
10635 let repo = git_init(&root_path.join("projects/project1"));
10636 git_add("a", &repo);
10637 git_commit("init", &repo);
10638 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10639
10640 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10641
10642 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10643 tree.flush_fs_events(cx).await;
10644 project
10645 .update(cx, |project, cx| project.git_scans_complete(cx))
10646 .await;
10647 cx.executor().run_until_parked();
10648
10649 let repository = project.read_with(cx, |project, cx| {
10650 project.repositories(cx).values().next().unwrap().clone()
10651 });
10652
10653 repository.read_with(cx, |repository, _| {
10654 assert_eq!(
10655 repository.work_directory_abs_path.as_ref(),
10656 root_path.join("projects/project1").as_path()
10657 );
10658 assert_eq!(
10659 repository
10660 .status_for_path(&repo_path("a"))
10661 .map(|entry| entry.status),
10662 Some(StatusCode::Modified.worktree()),
10663 );
10664 assert_eq!(
10665 repository
10666 .status_for_path(&repo_path("b"))
10667 .map(|entry| entry.status),
10668 Some(FileStatus::Untracked),
10669 );
10670 });
10671
10672 std::fs::rename(
10673 root_path.join("projects/project1"),
10674 root_path.join("projects/project2"),
10675 )
10676 .unwrap();
10677 tree.flush_fs_events(cx).await;
10678
10679 repository.read_with(cx, |repository, _| {
10680 assert_eq!(
10681 repository.work_directory_abs_path.as_ref(),
10682 root_path.join("projects/project2").as_path()
10683 );
10684 assert_eq!(
10685 repository.status_for_path(&repo_path("a")).unwrap().status,
10686 StatusCode::Modified.worktree(),
10687 );
10688 assert_eq!(
10689 repository.status_for_path(&repo_path("b")).unwrap().status,
10690 FileStatus::Untracked,
10691 );
10692 });
10693}
10694
10695// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10696// you can't rename a directory which some program has already open. This is a
10697// limitation of the Windows. See:
10698// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10699// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10700#[gpui::test]
10701#[cfg_attr(target_os = "windows", ignore)]
10702async fn test_file_status(cx: &mut gpui::TestAppContext) {
10703 init_test(cx);
10704 cx.executor().allow_parking();
10705 const IGNORE_RULE: &str = "**/target";
10706
10707 let root = TempTree::new(json!({
10708 "project": {
10709 "a.txt": "a",
10710 "b.txt": "bb",
10711 "c": {
10712 "d": {
10713 "e.txt": "eee"
10714 }
10715 },
10716 "f.txt": "ffff",
10717 "target": {
10718 "build_file": "???"
10719 },
10720 ".gitignore": IGNORE_RULE
10721 },
10722
10723 }));
10724 let root_path = root.path();
10725
10726 const A_TXT: &str = "a.txt";
10727 const B_TXT: &str = "b.txt";
10728 const E_TXT: &str = "c/d/e.txt";
10729 const F_TXT: &str = "f.txt";
10730 const DOTGITIGNORE: &str = ".gitignore";
10731 const BUILD_FILE: &str = "target/build_file";
10732
10733 // Set up git repository before creating the worktree.
10734 let work_dir = root.path().join("project");
10735 let mut repo = git_init(work_dir.as_path());
10736 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10737 git_add(A_TXT, &repo);
10738 git_add(E_TXT, &repo);
10739 git_add(DOTGITIGNORE, &repo);
10740 git_commit("Initial commit", &repo);
10741
10742 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10743
10744 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10745 tree.flush_fs_events(cx).await;
10746 project
10747 .update(cx, |project, cx| project.git_scans_complete(cx))
10748 .await;
10749 cx.executor().run_until_parked();
10750
10751 let repository = project.read_with(cx, |project, cx| {
10752 project.repositories(cx).values().next().unwrap().clone()
10753 });
10754
10755 // Check that the right git state is observed on startup
10756 repository.read_with(cx, |repository, _cx| {
10757 assert_eq!(
10758 repository.work_directory_abs_path.as_ref(),
10759 root_path.join("project").as_path()
10760 );
10761
10762 assert_eq!(
10763 repository
10764 .status_for_path(&repo_path(B_TXT))
10765 .unwrap()
10766 .status,
10767 FileStatus::Untracked,
10768 );
10769 assert_eq!(
10770 repository
10771 .status_for_path(&repo_path(F_TXT))
10772 .unwrap()
10773 .status,
10774 FileStatus::Untracked,
10775 );
10776 });
10777
10778 // Modify a file in the working copy.
10779 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10780 tree.flush_fs_events(cx).await;
10781 project
10782 .update(cx, |project, cx| project.git_scans_complete(cx))
10783 .await;
10784 cx.executor().run_until_parked();
10785
10786 // The worktree detects that the file's git status has changed.
10787 repository.read_with(cx, |repository, _| {
10788 assert_eq!(
10789 repository
10790 .status_for_path(&repo_path(A_TXT))
10791 .unwrap()
10792 .status,
10793 StatusCode::Modified.worktree(),
10794 );
10795 });
10796
10797 // Create a commit in the git repository.
10798 git_add(A_TXT, &repo);
10799 git_add(B_TXT, &repo);
10800 git_commit("Committing modified and added", &repo);
10801 tree.flush_fs_events(cx).await;
10802 project
10803 .update(cx, |project, cx| project.git_scans_complete(cx))
10804 .await;
10805 cx.executor().run_until_parked();
10806
10807 // The worktree detects that the files' git status have changed.
10808 repository.read_with(cx, |repository, _cx| {
10809 assert_eq!(
10810 repository
10811 .status_for_path(&repo_path(F_TXT))
10812 .unwrap()
10813 .status,
10814 FileStatus::Untracked,
10815 );
10816 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10817 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10818 });
10819
10820 // Modify files in the working copy and perform git operations on other files.
10821 git_reset(0, &repo);
10822 git_remove_index(Path::new(B_TXT), &repo);
10823 git_stash(&mut repo);
10824 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10825 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10826 tree.flush_fs_events(cx).await;
10827 project
10828 .update(cx, |project, cx| project.git_scans_complete(cx))
10829 .await;
10830 cx.executor().run_until_parked();
10831
10832 // Check that more complex repo changes are tracked
10833 repository.read_with(cx, |repository, _cx| {
10834 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10835 assert_eq!(
10836 repository
10837 .status_for_path(&repo_path(B_TXT))
10838 .unwrap()
10839 .status,
10840 FileStatus::Untracked,
10841 );
10842 assert_eq!(
10843 repository
10844 .status_for_path(&repo_path(E_TXT))
10845 .unwrap()
10846 .status,
10847 StatusCode::Modified.worktree(),
10848 );
10849 });
10850
10851 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10852 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10853 std::fs::write(
10854 work_dir.join(DOTGITIGNORE),
10855 [IGNORE_RULE, "f.txt"].join("\n"),
10856 )
10857 .unwrap();
10858
10859 git_add(Path::new(DOTGITIGNORE), &repo);
10860 git_commit("Committing modified git ignore", &repo);
10861
10862 tree.flush_fs_events(cx).await;
10863 cx.executor().run_until_parked();
10864
10865 let mut renamed_dir_name = "first_directory/second_directory";
10866 const RENAMED_FILE: &str = "rf.txt";
10867
10868 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10869 std::fs::write(
10870 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10871 "new-contents",
10872 )
10873 .unwrap();
10874
10875 tree.flush_fs_events(cx).await;
10876 project
10877 .update(cx, |project, cx| project.git_scans_complete(cx))
10878 .await;
10879 cx.executor().run_until_parked();
10880
10881 repository.read_with(cx, |repository, _cx| {
10882 assert_eq!(
10883 repository
10884 .status_for_path(&RepoPath::from_rel_path(
10885 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10886 ))
10887 .unwrap()
10888 .status,
10889 FileStatus::Untracked,
10890 );
10891 });
10892
10893 renamed_dir_name = "new_first_directory/second_directory";
10894
10895 std::fs::rename(
10896 work_dir.join("first_directory"),
10897 work_dir.join("new_first_directory"),
10898 )
10899 .unwrap();
10900
10901 tree.flush_fs_events(cx).await;
10902 project
10903 .update(cx, |project, cx| project.git_scans_complete(cx))
10904 .await;
10905 cx.executor().run_until_parked();
10906
10907 repository.read_with(cx, |repository, _cx| {
10908 assert_eq!(
10909 repository
10910 .status_for_path(&RepoPath::from_rel_path(
10911 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10912 ))
10913 .unwrap()
10914 .status,
10915 FileStatus::Untracked,
10916 );
10917 });
10918}
10919
10920#[gpui::test]
10921#[ignore]
10922async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10923 init_test(cx);
10924 cx.executor().allow_parking();
10925
10926 const IGNORE_RULE: &str = "**/target";
10927
10928 let root = TempTree::new(json!({
10929 "project": {
10930 "src": {
10931 "main.rs": "fn main() {}"
10932 },
10933 "target": {
10934 "debug": {
10935 "important_text.txt": "important text",
10936 },
10937 },
10938 ".gitignore": IGNORE_RULE
10939 },
10940
10941 }));
10942 let root_path = root.path();
10943
10944 // Set up git repository before creating the worktree.
10945 let work_dir = root.path().join("project");
10946 let repo = git_init(work_dir.as_path());
10947 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10948 git_add("src/main.rs", &repo);
10949 git_add(".gitignore", &repo);
10950 git_commit("Initial commit", &repo);
10951
10952 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10953 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10954 let project_events = Arc::new(Mutex::new(Vec::new()));
10955 project.update(cx, |project, cx| {
10956 let repo_events = repository_updates.clone();
10957 cx.subscribe(project.git_store(), move |_, _, e, _| {
10958 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10959 repo_events.lock().push(e.clone());
10960 }
10961 })
10962 .detach();
10963 let project_events = project_events.clone();
10964 cx.subscribe_self(move |_, e, _| {
10965 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10966 project_events.lock().extend(
10967 updates
10968 .iter()
10969 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10970 .filter(|(path, _)| path != "fs-event-sentinel"),
10971 );
10972 }
10973 })
10974 .detach();
10975 });
10976
10977 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10978 tree.flush_fs_events(cx).await;
10979 tree.update(cx, |tree, cx| {
10980 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10981 })
10982 .await
10983 .unwrap();
10984 tree.update(cx, |tree, _| {
10985 assert_eq!(
10986 tree.entries(true, 0)
10987 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10988 .collect::<Vec<_>>(),
10989 vec![
10990 (rel_path(""), false),
10991 (rel_path("project/"), false),
10992 (rel_path("project/.gitignore"), false),
10993 (rel_path("project/src"), false),
10994 (rel_path("project/src/main.rs"), false),
10995 (rel_path("project/target"), true),
10996 (rel_path("project/target/debug"), true),
10997 (rel_path("project/target/debug/important_text.txt"), true),
10998 ]
10999 );
11000 });
11001
11002 assert_eq!(
11003 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11004 vec![RepositoryEvent::StatusesChanged,],
11005 "Initial worktree scan should produce a repo update event"
11006 );
11007 assert_eq!(
11008 project_events.lock().drain(..).collect::<Vec<_>>(),
11009 vec![
11010 ("project/target".to_string(), PathChange::Loaded),
11011 ("project/target/debug".to_string(), PathChange::Loaded),
11012 (
11013 "project/target/debug/important_text.txt".to_string(),
11014 PathChange::Loaded
11015 ),
11016 ],
11017 "Initial project changes should show that all not-ignored and all opened files are loaded"
11018 );
11019
11020 let deps_dir = work_dir.join("target").join("debug").join("deps");
11021 std::fs::create_dir_all(&deps_dir).unwrap();
11022 tree.flush_fs_events(cx).await;
11023 project
11024 .update(cx, |project, cx| project.git_scans_complete(cx))
11025 .await;
11026 cx.executor().run_until_parked();
11027 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11028 tree.flush_fs_events(cx).await;
11029 project
11030 .update(cx, |project, cx| project.git_scans_complete(cx))
11031 .await;
11032 cx.executor().run_until_parked();
11033 std::fs::remove_dir_all(&deps_dir).unwrap();
11034 tree.flush_fs_events(cx).await;
11035 project
11036 .update(cx, |project, cx| project.git_scans_complete(cx))
11037 .await;
11038 cx.executor().run_until_parked();
11039
11040 tree.update(cx, |tree, _| {
11041 assert_eq!(
11042 tree.entries(true, 0)
11043 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11044 .collect::<Vec<_>>(),
11045 vec![
11046 (rel_path(""), false),
11047 (rel_path("project/"), false),
11048 (rel_path("project/.gitignore"), false),
11049 (rel_path("project/src"), false),
11050 (rel_path("project/src/main.rs"), false),
11051 (rel_path("project/target"), true),
11052 (rel_path("project/target/debug"), true),
11053 (rel_path("project/target/debug/important_text.txt"), true),
11054 ],
11055 "No stray temp files should be left after the flycheck changes"
11056 );
11057 });
11058
11059 assert_eq!(
11060 repository_updates
11061 .lock()
11062 .iter()
11063 .cloned()
11064 .collect::<Vec<_>>(),
11065 Vec::new(),
11066 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11067 );
11068 assert_eq!(
11069 project_events.lock().as_slice(),
11070 vec![
11071 ("project/target/debug/deps".to_string(), PathChange::Added),
11072 ("project/target/debug/deps".to_string(), PathChange::Removed),
11073 ],
11074 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11075 No updates for more nested directories should happen as those are ignored",
11076 );
11077}
11078
11079// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11080// to different timings/ordering of events.
11081#[ignore]
11082#[gpui::test]
11083async fn test_odd_events_for_ignored_dirs(
11084 executor: BackgroundExecutor,
11085 cx: &mut gpui::TestAppContext,
11086) {
11087 init_test(cx);
11088 let fs = FakeFs::new(executor);
11089 fs.insert_tree(
11090 path!("/root"),
11091 json!({
11092 ".git": {},
11093 ".gitignore": "**/target/",
11094 "src": {
11095 "main.rs": "fn main() {}",
11096 },
11097 "target": {
11098 "debug": {
11099 "foo.txt": "foo",
11100 "deps": {}
11101 }
11102 }
11103 }),
11104 )
11105 .await;
11106 fs.set_head_and_index_for_repo(
11107 path!("/root/.git").as_ref(),
11108 &[
11109 (".gitignore", "**/target/".into()),
11110 ("src/main.rs", "fn main() {}".into()),
11111 ],
11112 );
11113
11114 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11115 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11116 let project_events = Arc::new(Mutex::new(Vec::new()));
11117 project.update(cx, |project, cx| {
11118 let repository_updates = repository_updates.clone();
11119 cx.subscribe(project.git_store(), move |_, _, e, _| {
11120 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11121 repository_updates.lock().push(e.clone());
11122 }
11123 })
11124 .detach();
11125 let project_events = project_events.clone();
11126 cx.subscribe_self(move |_, e, _| {
11127 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11128 project_events.lock().extend(
11129 updates
11130 .iter()
11131 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11132 .filter(|(path, _)| path != "fs-event-sentinel"),
11133 );
11134 }
11135 })
11136 .detach();
11137 });
11138
11139 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11140 tree.update(cx, |tree, cx| {
11141 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11142 })
11143 .await
11144 .unwrap();
11145 tree.flush_fs_events(cx).await;
11146 project
11147 .update(cx, |project, cx| project.git_scans_complete(cx))
11148 .await;
11149 cx.run_until_parked();
11150 tree.update(cx, |tree, _| {
11151 assert_eq!(
11152 tree.entries(true, 0)
11153 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11154 .collect::<Vec<_>>(),
11155 vec![
11156 (rel_path(""), false),
11157 (rel_path(".gitignore"), false),
11158 (rel_path("src"), false),
11159 (rel_path("src/main.rs"), false),
11160 (rel_path("target"), true),
11161 (rel_path("target/debug"), true),
11162 (rel_path("target/debug/deps"), true),
11163 (rel_path("target/debug/foo.txt"), true),
11164 ]
11165 );
11166 });
11167
11168 assert_eq!(
11169 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11170 vec![
11171 RepositoryEvent::HeadChanged,
11172 RepositoryEvent::StatusesChanged,
11173 RepositoryEvent::StatusesChanged,
11174 ],
11175 "Initial worktree scan should produce a repo update event"
11176 );
11177 assert_eq!(
11178 project_events.lock().drain(..).collect::<Vec<_>>(),
11179 vec![
11180 ("target".to_string(), PathChange::Loaded),
11181 ("target/debug".to_string(), PathChange::Loaded),
11182 ("target/debug/deps".to_string(), PathChange::Loaded),
11183 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11184 ],
11185 "All non-ignored entries and all opened firs should be getting a project event",
11186 );
11187
11188 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11189 // This may happen multiple times during a single flycheck, but once is enough for testing.
11190 fs.emit_fs_event("/root/target/debug/deps", None);
11191 tree.flush_fs_events(cx).await;
11192 project
11193 .update(cx, |project, cx| project.git_scans_complete(cx))
11194 .await;
11195 cx.executor().run_until_parked();
11196
11197 assert_eq!(
11198 repository_updates
11199 .lock()
11200 .iter()
11201 .cloned()
11202 .collect::<Vec<_>>(),
11203 Vec::new(),
11204 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11205 );
11206 assert_eq!(
11207 project_events.lock().as_slice(),
11208 Vec::new(),
11209 "No further project events should happen, as only ignored dirs received FS events",
11210 );
11211}
11212
11213#[gpui::test]
11214async fn test_repos_in_invisible_worktrees(
11215 executor: BackgroundExecutor,
11216 cx: &mut gpui::TestAppContext,
11217) {
11218 init_test(cx);
11219 let fs = FakeFs::new(executor);
11220 fs.insert_tree(
11221 path!("/root"),
11222 json!({
11223 "dir1": {
11224 ".git": {},
11225 "dep1": {
11226 ".git": {},
11227 "src": {
11228 "a.txt": "",
11229 },
11230 },
11231 "b.txt": "",
11232 },
11233 }),
11234 )
11235 .await;
11236
11237 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11238 let _visible_worktree =
11239 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11240 project
11241 .update(cx, |project, cx| project.git_scans_complete(cx))
11242 .await;
11243
11244 let repos = project.read_with(cx, |project, cx| {
11245 project
11246 .repositories(cx)
11247 .values()
11248 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11249 .collect::<Vec<_>>()
11250 });
11251 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11252
11253 let (_invisible_worktree, _) = project
11254 .update(cx, |project, cx| {
11255 project.worktree_store().update(cx, |worktree_store, cx| {
11256 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11257 })
11258 })
11259 .await
11260 .expect("failed to create worktree");
11261 project
11262 .update(cx, |project, cx| project.git_scans_complete(cx))
11263 .await;
11264
11265 let repos = project.read_with(cx, |project, cx| {
11266 project
11267 .repositories(cx)
11268 .values()
11269 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11270 .collect::<Vec<_>>()
11271 });
11272 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11273}
11274
11275#[gpui::test(iterations = 10)]
11276async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11277 init_test(cx);
11278 cx.update(|cx| {
11279 cx.update_global::<SettingsStore, _>(|store, cx| {
11280 store.update_user_settings(cx, |settings| {
11281 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11282 });
11283 });
11284 });
11285 let fs = FakeFs::new(cx.background_executor.clone());
11286 fs.insert_tree(
11287 path!("/root"),
11288 json!({
11289 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11290 "tree": {
11291 ".git": {},
11292 ".gitignore": "ignored-dir\n",
11293 "tracked-dir": {
11294 "tracked-file1": "",
11295 "ancestor-ignored-file1": "",
11296 },
11297 "ignored-dir": {
11298 "ignored-file1": ""
11299 }
11300 }
11301 }),
11302 )
11303 .await;
11304 fs.set_head_and_index_for_repo(
11305 path!("/root/tree/.git").as_ref(),
11306 &[
11307 (".gitignore", "ignored-dir\n".into()),
11308 ("tracked-dir/tracked-file1", "".into()),
11309 ],
11310 );
11311
11312 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11313
11314 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11315 tree.flush_fs_events(cx).await;
11316 project
11317 .update(cx, |project, cx| project.git_scans_complete(cx))
11318 .await;
11319 cx.executor().run_until_parked();
11320
11321 let repository = project.read_with(cx, |project, cx| {
11322 project.repositories(cx).values().next().unwrap().clone()
11323 });
11324
11325 tree.read_with(cx, |tree, _| {
11326 tree.as_local()
11327 .unwrap()
11328 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11329 })
11330 .recv()
11331 .await;
11332
11333 cx.read(|cx| {
11334 assert_entry_git_state(
11335 tree.read(cx),
11336 repository.read(cx),
11337 "tracked-dir/tracked-file1",
11338 None,
11339 false,
11340 );
11341 assert_entry_git_state(
11342 tree.read(cx),
11343 repository.read(cx),
11344 "tracked-dir/ancestor-ignored-file1",
11345 None,
11346 false,
11347 );
11348 assert_entry_git_state(
11349 tree.read(cx),
11350 repository.read(cx),
11351 "ignored-dir/ignored-file1",
11352 None,
11353 true,
11354 );
11355 });
11356
11357 fs.create_file(
11358 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11359 Default::default(),
11360 )
11361 .await
11362 .unwrap();
11363 fs.set_index_for_repo(
11364 path!("/root/tree/.git").as_ref(),
11365 &[
11366 (".gitignore", "ignored-dir\n".into()),
11367 ("tracked-dir/tracked-file1", "".into()),
11368 ("tracked-dir/tracked-file2", "".into()),
11369 ],
11370 );
11371 fs.create_file(
11372 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11373 Default::default(),
11374 )
11375 .await
11376 .unwrap();
11377 fs.create_file(
11378 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11379 Default::default(),
11380 )
11381 .await
11382 .unwrap();
11383
11384 cx.executor().run_until_parked();
11385 cx.read(|cx| {
11386 assert_entry_git_state(
11387 tree.read(cx),
11388 repository.read(cx),
11389 "tracked-dir/tracked-file2",
11390 Some(StatusCode::Added),
11391 false,
11392 );
11393 assert_entry_git_state(
11394 tree.read(cx),
11395 repository.read(cx),
11396 "tracked-dir/ancestor-ignored-file2",
11397 None,
11398 false,
11399 );
11400 assert_entry_git_state(
11401 tree.read(cx),
11402 repository.read(cx),
11403 "ignored-dir/ignored-file2",
11404 None,
11405 true,
11406 );
11407 assert!(
11408 tree.read(cx)
11409 .entry_for_path(&rel_path(".git"))
11410 .unwrap()
11411 .is_ignored
11412 );
11413 });
11414}
11415
11416#[gpui::test]
11417async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11418 init_test(cx);
11419
11420 let fs = FakeFs::new(cx.executor());
11421 fs.insert_tree(
11422 path!("/project"),
11423 json!({
11424 ".git": {
11425 "worktrees": {
11426 "some-worktree": {
11427 "commondir": "../..\n",
11428 // For is_git_dir
11429 "HEAD": "",
11430 "config": ""
11431 }
11432 },
11433 "modules": {
11434 "subdir": {
11435 "some-submodule": {
11436 // For is_git_dir
11437 "HEAD": "",
11438 "config": "",
11439 }
11440 }
11441 }
11442 },
11443 "src": {
11444 "a.txt": "A",
11445 },
11446 "some-worktree": {
11447 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11448 "src": {
11449 "b.txt": "B",
11450 }
11451 },
11452 "subdir": {
11453 "some-submodule": {
11454 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11455 "c.txt": "C",
11456 }
11457 }
11458 }),
11459 )
11460 .await;
11461
11462 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11463 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11464 scan_complete.await;
11465
11466 let mut repositories = project.update(cx, |project, cx| {
11467 project
11468 .repositories(cx)
11469 .values()
11470 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11471 .collect::<Vec<_>>()
11472 });
11473 repositories.sort();
11474 pretty_assertions::assert_eq!(
11475 repositories,
11476 [
11477 Path::new(path!("/project")).into(),
11478 Path::new(path!("/project/some-worktree")).into(),
11479 Path::new(path!("/project/subdir/some-submodule")).into(),
11480 ]
11481 );
11482
11483 // Generate a git-related event for the worktree and check that it's refreshed.
11484 fs.with_git_state(
11485 path!("/project/some-worktree/.git").as_ref(),
11486 true,
11487 |state| {
11488 state
11489 .head_contents
11490 .insert(repo_path("src/b.txt"), "b".to_owned());
11491 state
11492 .index_contents
11493 .insert(repo_path("src/b.txt"), "b".to_owned());
11494 },
11495 )
11496 .unwrap();
11497 cx.run_until_parked();
11498
11499 let buffer = project
11500 .update(cx, |project, cx| {
11501 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11502 })
11503 .await
11504 .unwrap();
11505 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11506 let (repo, _) = project
11507 .git_store()
11508 .read(cx)
11509 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11510 .unwrap();
11511 pretty_assertions::assert_eq!(
11512 repo.read(cx).work_directory_abs_path,
11513 Path::new(path!("/project/some-worktree")).into(),
11514 );
11515 pretty_assertions::assert_eq!(
11516 repo.read(cx).original_repo_abs_path,
11517 Path::new(path!("/project")).into(),
11518 );
11519 assert!(
11520 repo.read(cx).linked_worktree_path().is_some(),
11521 "linked worktree should be detected as a linked worktree"
11522 );
11523 let barrier = repo.update(cx, |repo, _| repo.barrier());
11524 (repo.clone(), barrier)
11525 });
11526 barrier.await.unwrap();
11527 worktree_repo.update(cx, |repo, _| {
11528 pretty_assertions::assert_eq!(
11529 repo.status_for_path(&repo_path("src/b.txt"))
11530 .unwrap()
11531 .status,
11532 StatusCode::Modified.worktree(),
11533 );
11534 });
11535
11536 // The same for the submodule.
11537 fs.with_git_state(
11538 path!("/project/subdir/some-submodule/.git").as_ref(),
11539 true,
11540 |state| {
11541 state
11542 .head_contents
11543 .insert(repo_path("c.txt"), "c".to_owned());
11544 state
11545 .index_contents
11546 .insert(repo_path("c.txt"), "c".to_owned());
11547 },
11548 )
11549 .unwrap();
11550 cx.run_until_parked();
11551
11552 let buffer = project
11553 .update(cx, |project, cx| {
11554 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11555 })
11556 .await
11557 .unwrap();
11558 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11559 let (repo, _) = project
11560 .git_store()
11561 .read(cx)
11562 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11563 .unwrap();
11564 pretty_assertions::assert_eq!(
11565 repo.read(cx).work_directory_abs_path,
11566 Path::new(path!("/project/subdir/some-submodule")).into(),
11567 );
11568 pretty_assertions::assert_eq!(
11569 repo.read(cx).original_repo_abs_path,
11570 Path::new(path!("/project/subdir/some-submodule")).into(),
11571 );
11572 assert!(
11573 repo.read(cx).linked_worktree_path().is_none(),
11574 "submodule should not be detected as a linked worktree"
11575 );
11576 let barrier = repo.update(cx, |repo, _| repo.barrier());
11577 (repo.clone(), barrier)
11578 });
11579 barrier.await.unwrap();
11580 submodule_repo.update(cx, |repo, _| {
11581 pretty_assertions::assert_eq!(
11582 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11583 StatusCode::Modified.worktree(),
11584 );
11585 });
11586}
11587
11588#[gpui::test]
11589async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11590 init_test(cx);
11591 let fs = FakeFs::new(cx.background_executor.clone());
11592 fs.insert_tree(
11593 path!("/root"),
11594 json!({
11595 "project": {
11596 ".git": {},
11597 "child1": {
11598 "a.txt": "A",
11599 },
11600 "child2": {
11601 "b.txt": "B",
11602 }
11603 }
11604 }),
11605 )
11606 .await;
11607
11608 let project = Project::test(
11609 fs.clone(),
11610 [
11611 path!("/root/project/child1").as_ref(),
11612 path!("/root/project/child2").as_ref(),
11613 ],
11614 cx,
11615 )
11616 .await;
11617
11618 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11619 tree.flush_fs_events(cx).await;
11620 project
11621 .update(cx, |project, cx| project.git_scans_complete(cx))
11622 .await;
11623 cx.executor().run_until_parked();
11624
11625 let repos = project.read_with(cx, |project, cx| {
11626 project
11627 .repositories(cx)
11628 .values()
11629 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11630 .collect::<Vec<_>>()
11631 });
11632 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11633}
11634
11635#[gpui::test]
11636async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11637 init_test(cx);
11638
11639 let file_1_committed = String::from(r#"file_1_committed"#);
11640 let file_1_staged = String::from(r#"file_1_staged"#);
11641 let file_2_committed = String::from(r#"file_2_committed"#);
11642 let file_2_staged = String::from(r#"file_2_staged"#);
11643 let buffer_contents = String::from(r#"buffer"#);
11644
11645 let fs = FakeFs::new(cx.background_executor.clone());
11646 fs.insert_tree(
11647 path!("/dir"),
11648 json!({
11649 ".git": {},
11650 "src": {
11651 "file_1.rs": file_1_committed.clone(),
11652 "file_2.rs": file_2_committed.clone(),
11653 }
11654 }),
11655 )
11656 .await;
11657
11658 fs.set_head_for_repo(
11659 path!("/dir/.git").as_ref(),
11660 &[
11661 ("src/file_1.rs", file_1_committed.clone()),
11662 ("src/file_2.rs", file_2_committed.clone()),
11663 ],
11664 "deadbeef",
11665 );
11666 fs.set_index_for_repo(
11667 path!("/dir/.git").as_ref(),
11668 &[
11669 ("src/file_1.rs", file_1_staged.clone()),
11670 ("src/file_2.rs", file_2_staged.clone()),
11671 ],
11672 );
11673
11674 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11675
11676 let buffer = project
11677 .update(cx, |project, cx| {
11678 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11679 })
11680 .await
11681 .unwrap();
11682
11683 buffer.update(cx, |buffer, cx| {
11684 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11685 });
11686
11687 let unstaged_diff = project
11688 .update(cx, |project, cx| {
11689 project.open_unstaged_diff(buffer.clone(), cx)
11690 })
11691 .await
11692 .unwrap();
11693
11694 cx.run_until_parked();
11695
11696 unstaged_diff.update(cx, |unstaged_diff, cx| {
11697 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11698 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11699 });
11700
11701 // Save the buffer as `file_2.rs`, which should trigger the
11702 // `BufferChangedFilePath` event.
11703 project
11704 .update(cx, |project, cx| {
11705 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11706 let path = ProjectPath {
11707 worktree_id,
11708 path: rel_path("src/file_2.rs").into(),
11709 };
11710 project.save_buffer_as(buffer.clone(), path, cx)
11711 })
11712 .await
11713 .unwrap();
11714
11715 cx.run_until_parked();
11716
11717 // Verify that the diff bases have been updated to file_2's contents due to
11718 // the `BufferChangedFilePath` event being handled.
11719 unstaged_diff.update(cx, |unstaged_diff, cx| {
11720 let snapshot = buffer.read(cx).snapshot();
11721 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11722 assert_eq!(
11723 base_text, file_2_staged,
11724 "Diff bases should be automatically updated to file_2 staged content"
11725 );
11726
11727 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11728 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11729 });
11730
11731 let uncommitted_diff = project
11732 .update(cx, |project, cx| {
11733 project.open_uncommitted_diff(buffer.clone(), cx)
11734 })
11735 .await
11736 .unwrap();
11737
11738 cx.run_until_parked();
11739
11740 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11741 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11742 assert_eq!(
11743 base_text, file_2_committed,
11744 "Uncommitted diff should compare against file_2 committed content"
11745 );
11746 });
11747}
11748
11749async fn search(
11750 project: &Entity<Project>,
11751 query: SearchQuery,
11752 cx: &mut gpui::TestAppContext,
11753) -> Result<HashMap<String, Vec<Range<usize>>>> {
11754 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11755 let mut results = HashMap::default();
11756 while let Ok(search_result) = search_rx.rx.recv().await {
11757 match search_result {
11758 SearchResult::Buffer { buffer, ranges } => {
11759 results.entry(buffer).or_insert(ranges);
11760 }
11761 SearchResult::LimitReached => {}
11762 }
11763 }
11764 Ok(results
11765 .into_iter()
11766 .map(|(buffer, ranges)| {
11767 buffer.update(cx, |buffer, cx| {
11768 let path = buffer
11769 .file()
11770 .unwrap()
11771 .full_path(cx)
11772 .to_string_lossy()
11773 .to_string();
11774 let ranges = ranges
11775 .into_iter()
11776 .map(|range| range.to_offset(buffer))
11777 .collect::<Vec<_>>();
11778 (path, ranges)
11779 })
11780 })
11781 .collect())
11782}
11783
11784#[gpui::test]
11785async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11786 init_test(cx);
11787
11788 let fs = FakeFs::new(cx.executor());
11789
11790 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11791 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11792 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11793 fs.insert_tree(path!("/dir"), json!({})).await;
11794 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11795
11796 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11797
11798 let buffer = project
11799 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11800 .await
11801 .unwrap();
11802
11803 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11804 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11805 });
11806 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11807 assert_eq!(initial_text, "Hi");
11808 assert!(!initial_dirty);
11809
11810 let reload_receiver = buffer.update(cx, |buffer, cx| {
11811 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11812 });
11813 cx.executor().run_until_parked();
11814
11815 // Wait for reload to complete
11816 let _ = reload_receiver.await;
11817
11818 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11819 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11820 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11821 });
11822 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11823 assert_eq!(reloaded_text, "楈");
11824 assert!(!reloaded_dirty);
11825
11826 // Undo the reload
11827 buffer.update(cx, |buffer, cx| {
11828 buffer.undo(cx);
11829 });
11830
11831 buffer.read_with(cx, |buffer, _| {
11832 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11833 assert_eq!(buffer.text(), "Hi");
11834 assert!(!buffer.is_dirty());
11835 });
11836
11837 buffer.update(cx, |buffer, cx| {
11838 buffer.redo(cx);
11839 });
11840
11841 buffer.read_with(cx, |buffer, _| {
11842 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11843 assert_ne!(buffer.text(), "Hi");
11844 assert!(!buffer.is_dirty());
11845 });
11846}
11847
11848#[gpui::test]
11849async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11850 init_test(cx);
11851
11852 let fs = FakeFs::new(cx.executor());
11853 fs.insert_tree(
11854 path!("/root"),
11855 json!({
11856 "a": {
11857 ".git": {},
11858 ".zed": {
11859 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11860 },
11861 "src": { "main.rs": "" }
11862 },
11863 "b": {
11864 ".git": {},
11865 ".zed": {
11866 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11867 },
11868 "src": { "lib.rs": "" }
11869 },
11870 }),
11871 )
11872 .await;
11873
11874 let repos_created = Rc::new(RefCell::new(Vec::new()));
11875 let _observe = {
11876 let repos_created = repos_created.clone();
11877 cx.update(|cx| {
11878 cx.observe_new::<Repository>(move |repo, _, cx| {
11879 repos_created.borrow_mut().push(cx.entity().downgrade());
11880 let _ = repo;
11881 })
11882 })
11883 };
11884
11885 let project = Project::test(
11886 fs.clone(),
11887 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11888 cx,
11889 )
11890 .await;
11891
11892 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11893 scan_complete.await;
11894
11895 project.read_with(cx, |project, cx| {
11896 assert!(
11897 project.worktree_store().read(cx).initial_scan_completed(),
11898 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11899 );
11900 });
11901
11902 let created_repos_len = repos_created.borrow().len();
11903 assert_eq!(
11904 created_repos_len, 2,
11905 "Expected 2 repositories to be created during scan, got {}",
11906 created_repos_len
11907 );
11908
11909 project.read_with(cx, |project, cx| {
11910 let git_store = project.git_store().read(cx);
11911 assert_eq!(
11912 git_store.repositories().len(),
11913 2,
11914 "Expected 2 repositories in GitStore"
11915 );
11916 });
11917}
11918
11919pub fn init_test(cx: &mut gpui::TestAppContext) {
11920 zlog::init_test();
11921
11922 cx.update(|cx| {
11923 let settings_store = SettingsStore::test(cx);
11924 cx.set_global(settings_store);
11925 release_channel::init(semver::Version::new(0, 0, 0), cx);
11926 });
11927}
11928
11929fn json_lang() -> Arc<Language> {
11930 Arc::new(Language::new(
11931 LanguageConfig {
11932 name: "JSON".into(),
11933 matcher: LanguageMatcher {
11934 path_suffixes: vec!["json".to_string()],
11935 ..Default::default()
11936 },
11937 ..Default::default()
11938 },
11939 None,
11940 ))
11941}
11942
11943fn js_lang() -> Arc<Language> {
11944 Arc::new(Language::new(
11945 LanguageConfig {
11946 name: "JavaScript".into(),
11947 matcher: LanguageMatcher {
11948 path_suffixes: vec!["js".to_string()],
11949 ..Default::default()
11950 },
11951 ..Default::default()
11952 },
11953 None,
11954 ))
11955}
11956
11957fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11958 struct PythonMootToolchainLister(Arc<FakeFs>);
11959 #[async_trait]
11960 impl ToolchainLister for PythonMootToolchainLister {
11961 async fn list(
11962 &self,
11963 worktree_root: PathBuf,
11964 subroot_relative_path: Arc<RelPath>,
11965 _: Option<HashMap<String, String>>,
11966 ) -> ToolchainList {
11967 // This lister will always return a path .venv directories within ancestors
11968 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11969 let mut toolchains = vec![];
11970 for ancestor in ancestors {
11971 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11972 if self.0.is_dir(&venv_path).await {
11973 toolchains.push(Toolchain {
11974 name: SharedString::new_static("Python Venv"),
11975 path: venv_path.to_string_lossy().into_owned().into(),
11976 language_name: LanguageName(SharedString::new_static("Python")),
11977 as_json: serde_json::Value::Null,
11978 })
11979 }
11980 }
11981 ToolchainList {
11982 toolchains,
11983 ..Default::default()
11984 }
11985 }
11986 async fn resolve(
11987 &self,
11988 _: PathBuf,
11989 _: Option<HashMap<String, String>>,
11990 ) -> anyhow::Result<Toolchain> {
11991 Err(anyhow::anyhow!("Not implemented"))
11992 }
11993 fn meta(&self) -> ToolchainMetadata {
11994 ToolchainMetadata {
11995 term: SharedString::new_static("Virtual Environment"),
11996 new_toolchain_placeholder: SharedString::new_static(
11997 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11998 ),
11999 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12000 }
12001 }
12002 fn activation_script(
12003 &self,
12004 _: &Toolchain,
12005 _: ShellKind,
12006 _: &gpui::App,
12007 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12008 Box::pin(async { vec![] })
12009 }
12010 }
12011 Arc::new(
12012 Language::new(
12013 LanguageConfig {
12014 name: "Python".into(),
12015 matcher: LanguageMatcher {
12016 path_suffixes: vec!["py".to_string()],
12017 ..Default::default()
12018 },
12019 ..Default::default()
12020 },
12021 None, // We're not testing Python parsing with this language.
12022 )
12023 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12024 "pyproject.toml",
12025 ))))
12026 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12027 )
12028}
12029
12030fn typescript_lang() -> Arc<Language> {
12031 Arc::new(Language::new(
12032 LanguageConfig {
12033 name: "TypeScript".into(),
12034 matcher: LanguageMatcher {
12035 path_suffixes: vec!["ts".to_string()],
12036 ..Default::default()
12037 },
12038 ..Default::default()
12039 },
12040 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12041 ))
12042}
12043
12044fn tsx_lang() -> Arc<Language> {
12045 Arc::new(Language::new(
12046 LanguageConfig {
12047 name: "tsx".into(),
12048 matcher: LanguageMatcher {
12049 path_suffixes: vec!["tsx".to_string()],
12050 ..Default::default()
12051 },
12052 ..Default::default()
12053 },
12054 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12055 ))
12056}
12057
12058fn get_all_tasks(
12059 project: &Entity<Project>,
12060 task_contexts: Arc<TaskContexts>,
12061 cx: &mut App,
12062) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12063 let new_tasks = project.update(cx, |project, cx| {
12064 project.task_store().update(cx, |task_store, cx| {
12065 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12066 this.used_and_current_resolved_tasks(task_contexts, cx)
12067 })
12068 })
12069 });
12070
12071 cx.background_spawn(async move {
12072 let (mut old, new) = new_tasks.await;
12073 old.extend(new);
12074 old
12075 })
12076}
12077
12078#[track_caller]
12079fn assert_entry_git_state(
12080 tree: &Worktree,
12081 repository: &Repository,
12082 path: &str,
12083 index_status: Option<StatusCode>,
12084 is_ignored: bool,
12085) {
12086 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12087 let entry = tree
12088 .entry_for_path(&rel_path(path))
12089 .unwrap_or_else(|| panic!("entry {path} not found"));
12090 let status = repository
12091 .status_for_path(&repo_path(path))
12092 .map(|entry| entry.status);
12093 let expected = index_status.map(|index_status| {
12094 TrackedStatus {
12095 index_status,
12096 worktree_status: StatusCode::Unmodified,
12097 }
12098 .into()
12099 });
12100 assert_eq!(
12101 status, expected,
12102 "expected {path} to have git status: {expected:?}"
12103 );
12104 assert_eq!(
12105 entry.is_ignored, is_ignored,
12106 "expected {path} to have is_ignored: {is_ignored}"
12107 );
12108}
12109
12110#[track_caller]
12111fn git_init(path: &Path) -> git2::Repository {
12112 let mut init_opts = RepositoryInitOptions::new();
12113 init_opts.initial_head("main");
12114 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12115}
12116
12117#[track_caller]
12118fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12119 let path = path.as_ref();
12120 let mut index = repo.index().expect("Failed to get index");
12121 index.add_path(path).expect("Failed to add file");
12122 index.write().expect("Failed to write index");
12123}
12124
12125#[track_caller]
12126fn git_remove_index(path: &Path, repo: &git2::Repository) {
12127 let mut index = repo.index().expect("Failed to get index");
12128 index.remove_path(path).expect("Failed to add file");
12129 index.write().expect("Failed to write index");
12130}
12131
12132#[track_caller]
12133fn git_commit(msg: &'static str, repo: &git2::Repository) {
12134 use git2::Signature;
12135
12136 let signature = Signature::now("test", "test@zed.dev").unwrap();
12137 let oid = repo.index().unwrap().write_tree().unwrap();
12138 let tree = repo.find_tree(oid).unwrap();
12139 if let Ok(head) = repo.head() {
12140 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12141
12142 let parent_commit = parent_obj.as_commit().unwrap();
12143
12144 repo.commit(
12145 Some("HEAD"),
12146 &signature,
12147 &signature,
12148 msg,
12149 &tree,
12150 &[parent_commit],
12151 )
12152 .expect("Failed to commit with parent");
12153 } else {
12154 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12155 .expect("Failed to commit");
12156 }
12157}
12158
12159#[cfg(any())]
12160#[track_caller]
12161fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12162 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12163}
12164
12165#[track_caller]
12166fn git_stash(repo: &mut git2::Repository) {
12167 use git2::Signature;
12168
12169 let signature = Signature::now("test", "test@zed.dev").unwrap();
12170 repo.stash_save(&signature, "N/A", None)
12171 .expect("Failed to stash");
12172}
12173
12174#[track_caller]
12175fn git_reset(offset: usize, repo: &git2::Repository) {
12176 let head = repo.head().expect("Couldn't get repo head");
12177 let object = head.peel(git2::ObjectType::Commit).unwrap();
12178 let commit = object.as_commit().unwrap();
12179 let new_head = commit
12180 .parents()
12181 .inspect(|parnet| {
12182 parnet.message();
12183 })
12184 .nth(offset)
12185 .expect("Not enough history");
12186 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12187 .expect("Could not reset");
12188}
12189
12190#[cfg(any())]
12191#[track_caller]
12192fn git_branch(name: &str, repo: &git2::Repository) {
12193 let head = repo
12194 .head()
12195 .expect("Couldn't get repo head")
12196 .peel_to_commit()
12197 .expect("HEAD is not a commit");
12198 repo.branch(name, &head, false).expect("Failed to commit");
12199}
12200
12201#[cfg(any())]
12202#[track_caller]
12203fn git_checkout(name: &str, repo: &git2::Repository) {
12204 repo.set_head(name).expect("Failed to set head");
12205 repo.checkout_head(None).expect("Failed to check out head");
12206}
12207
12208#[cfg(any())]
12209#[track_caller]
12210fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12211 repo.statuses(None)
12212 .unwrap()
12213 .iter()
12214 .map(|status| (status.path().unwrap().to_string(), status.status()))
12215 .collect()
12216}
12217
12218#[gpui::test]
12219async fn test_find_project_path_abs(
12220 background_executor: BackgroundExecutor,
12221 cx: &mut gpui::TestAppContext,
12222) {
12223 // find_project_path should work with absolute paths
12224 init_test(cx);
12225
12226 let fs = FakeFs::new(background_executor);
12227 fs.insert_tree(
12228 path!("/root"),
12229 json!({
12230 "project1": {
12231 "file1.txt": "content1",
12232 "subdir": {
12233 "file2.txt": "content2"
12234 }
12235 },
12236 "project2": {
12237 "file3.txt": "content3"
12238 }
12239 }),
12240 )
12241 .await;
12242
12243 let project = Project::test(
12244 fs.clone(),
12245 [
12246 path!("/root/project1").as_ref(),
12247 path!("/root/project2").as_ref(),
12248 ],
12249 cx,
12250 )
12251 .await;
12252
12253 // Make sure the worktrees are fully initialized
12254 project
12255 .update(cx, |project, cx| project.git_scans_complete(cx))
12256 .await;
12257 cx.run_until_parked();
12258
12259 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12260 project.read_with(cx, |project, cx| {
12261 let worktrees: Vec<_> = project.worktrees(cx).collect();
12262 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12263 let id1 = worktrees[0].read(cx).id();
12264 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12265 let id2 = worktrees[1].read(cx).id();
12266 (abs_path1, id1, abs_path2, id2)
12267 });
12268
12269 project.update(cx, |project, cx| {
12270 let abs_path = project1_abs_path.join("file1.txt");
12271 let found_path = project.find_project_path(abs_path, cx).unwrap();
12272 assert_eq!(found_path.worktree_id, project1_id);
12273 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12274
12275 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12276 let found_path = project.find_project_path(abs_path, cx).unwrap();
12277 assert_eq!(found_path.worktree_id, project1_id);
12278 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12279
12280 let abs_path = project2_abs_path.join("file3.txt");
12281 let found_path = project.find_project_path(abs_path, cx).unwrap();
12282 assert_eq!(found_path.worktree_id, project2_id);
12283 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12284
12285 let abs_path = project1_abs_path.join("nonexistent.txt");
12286 let found_path = project.find_project_path(abs_path, cx);
12287 assert!(
12288 found_path.is_some(),
12289 "Should find project path for nonexistent file in worktree"
12290 );
12291
12292 // Test with an absolute path outside any worktree
12293 let abs_path = Path::new("/some/other/path");
12294 let found_path = project.find_project_path(abs_path, cx);
12295 assert!(
12296 found_path.is_none(),
12297 "Should not find project path for path outside any worktree"
12298 );
12299 });
12300}
12301
12302#[gpui::test]
12303async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12304 init_test(cx);
12305
12306 let fs = FakeFs::new(cx.executor());
12307 fs.insert_tree(
12308 path!("/root"),
12309 json!({
12310 "a": {
12311 ".git": {},
12312 "src": {
12313 "main.rs": "fn main() {}",
12314 }
12315 },
12316 "b": {
12317 ".git": {},
12318 "src": {
12319 "main.rs": "fn main() {}",
12320 },
12321 "script": {
12322 "run.sh": "#!/bin/bash"
12323 }
12324 }
12325 }),
12326 )
12327 .await;
12328
12329 let project = Project::test(
12330 fs.clone(),
12331 [
12332 path!("/root/a").as_ref(),
12333 path!("/root/b/script").as_ref(),
12334 path!("/root/b").as_ref(),
12335 ],
12336 cx,
12337 )
12338 .await;
12339 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12340 scan_complete.await;
12341
12342 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12343 assert_eq!(worktrees.len(), 3);
12344
12345 let worktree_id_by_abs_path = worktrees
12346 .into_iter()
12347 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12348 .collect::<HashMap<_, _>>();
12349 let worktree_id = worktree_id_by_abs_path
12350 .get(Path::new(path!("/root/b/script")))
12351 .unwrap();
12352
12353 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12354 assert_eq!(repos.len(), 2);
12355
12356 project.update(cx, |project, cx| {
12357 project.remove_worktree(*worktree_id, cx);
12358 });
12359 cx.run_until_parked();
12360
12361 let mut repo_paths = project
12362 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12363 .values()
12364 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12365 .collect::<Vec<_>>();
12366 repo_paths.sort();
12367
12368 pretty_assertions::assert_eq!(
12369 repo_paths,
12370 [
12371 Path::new(path!("/root/a")).into(),
12372 Path::new(path!("/root/b")).into(),
12373 ]
12374 );
12375
12376 let active_repo_path = project
12377 .read_with(cx, |p, cx| {
12378 p.active_repository(cx)
12379 .map(|r| r.read(cx).work_directory_abs_path.clone())
12380 })
12381 .unwrap();
12382 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12383
12384 let worktree_id = worktree_id_by_abs_path
12385 .get(Path::new(path!("/root/a")))
12386 .unwrap();
12387 project.update(cx, |project, cx| {
12388 project.remove_worktree(*worktree_id, cx);
12389 });
12390 cx.run_until_parked();
12391
12392 let active_repo_path = project
12393 .read_with(cx, |p, cx| {
12394 p.active_repository(cx)
12395 .map(|r| r.read(cx).work_directory_abs_path.clone())
12396 })
12397 .unwrap();
12398 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12399
12400 let worktree_id = worktree_id_by_abs_path
12401 .get(Path::new(path!("/root/b")))
12402 .unwrap();
12403 project.update(cx, |project, cx| {
12404 project.remove_worktree(*worktree_id, cx);
12405 });
12406 cx.run_until_parked();
12407
12408 let active_repo_path = project.read_with(cx, |p, cx| {
12409 p.active_repository(cx)
12410 .map(|r| r.read(cx).work_directory_abs_path.clone())
12411 });
12412 assert!(active_repo_path.is_none());
12413}
12414
12415#[gpui::test]
12416async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12417 use DiffHunkSecondaryStatus::*;
12418 init_test(cx);
12419
12420 let committed_contents = r#"
12421 one
12422 two
12423 three
12424 "#
12425 .unindent();
12426 let file_contents = r#"
12427 one
12428 TWO
12429 three
12430 "#
12431 .unindent();
12432
12433 let fs = FakeFs::new(cx.background_executor.clone());
12434 fs.insert_tree(
12435 path!("/dir"),
12436 json!({
12437 ".git": {},
12438 "file.txt": file_contents.clone()
12439 }),
12440 )
12441 .await;
12442
12443 fs.set_head_and_index_for_repo(
12444 path!("/dir/.git").as_ref(),
12445 &[("file.txt", committed_contents.clone())],
12446 );
12447
12448 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12449
12450 let buffer = project
12451 .update(cx, |project, cx| {
12452 project.open_local_buffer(path!("/dir/file.txt"), cx)
12453 })
12454 .await
12455 .unwrap();
12456 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12457 let uncommitted_diff = project
12458 .update(cx, |project, cx| {
12459 project.open_uncommitted_diff(buffer.clone(), cx)
12460 })
12461 .await
12462 .unwrap();
12463
12464 // The hunk is initially unstaged.
12465 uncommitted_diff.read_with(cx, |diff, cx| {
12466 assert_hunks(
12467 diff.snapshot(cx).hunks(&snapshot),
12468 &snapshot,
12469 &diff.base_text_string(cx).unwrap(),
12470 &[(
12471 1..2,
12472 "two\n",
12473 "TWO\n",
12474 DiffHunkStatus::modified(HasSecondaryHunk),
12475 )],
12476 );
12477 });
12478
12479 // Get the repository handle.
12480 let repo = project.read_with(cx, |project, cx| {
12481 project.repositories(cx).values().next().unwrap().clone()
12482 });
12483
12484 // Stage the file.
12485 let stage_task = repo.update(cx, |repo, cx| {
12486 repo.stage_entries(vec![repo_path("file.txt")], cx)
12487 });
12488
12489 // Run a few ticks to let the job start and mark hunks as pending,
12490 // but don't run_until_parked which would complete the entire operation.
12491 for _ in 0..10 {
12492 cx.executor().tick();
12493 let [hunk]: [_; 1] = uncommitted_diff
12494 .read_with(cx, |diff, cx| {
12495 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12496 })
12497 .try_into()
12498 .unwrap();
12499 match hunk.secondary_status {
12500 HasSecondaryHunk => {}
12501 SecondaryHunkRemovalPending => break,
12502 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12503 _ => panic!("unexpected hunk state"),
12504 }
12505 }
12506 uncommitted_diff.read_with(cx, |diff, cx| {
12507 assert_hunks(
12508 diff.snapshot(cx).hunks(&snapshot),
12509 &snapshot,
12510 &diff.base_text_string(cx).unwrap(),
12511 &[(
12512 1..2,
12513 "two\n",
12514 "TWO\n",
12515 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12516 )],
12517 );
12518 });
12519
12520 // Let the staging complete.
12521 stage_task.await.unwrap();
12522 cx.run_until_parked();
12523
12524 // The hunk is now fully staged.
12525 uncommitted_diff.read_with(cx, |diff, cx| {
12526 assert_hunks(
12527 diff.snapshot(cx).hunks(&snapshot),
12528 &snapshot,
12529 &diff.base_text_string(cx).unwrap(),
12530 &[(
12531 1..2,
12532 "two\n",
12533 "TWO\n",
12534 DiffHunkStatus::modified(NoSecondaryHunk),
12535 )],
12536 );
12537 });
12538
12539 // Simulate a commit by updating HEAD to match the current file contents.
12540 // The FakeGitRepository's commit method is a no-op, so we need to manually
12541 // update HEAD to simulate the commit completing.
12542 fs.set_head_for_repo(
12543 path!("/dir/.git").as_ref(),
12544 &[("file.txt", file_contents.clone())],
12545 "newhead",
12546 );
12547 cx.run_until_parked();
12548
12549 // After committing, there are no more hunks.
12550 uncommitted_diff.read_with(cx, |diff, cx| {
12551 assert_hunks(
12552 diff.snapshot(cx).hunks(&snapshot),
12553 &snapshot,
12554 &diff.base_text_string(cx).unwrap(),
12555 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12556 );
12557 });
12558}
12559
12560#[gpui::test]
12561async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12562 init_test(cx);
12563
12564 // Configure read_only_files setting
12565 cx.update(|cx| {
12566 cx.update_global::<SettingsStore, _>(|store, cx| {
12567 store.update_user_settings(cx, |settings| {
12568 settings.project.worktree.read_only_files = Some(vec![
12569 "**/generated/**".to_string(),
12570 "**/*.gen.rs".to_string(),
12571 ]);
12572 });
12573 });
12574 });
12575
12576 let fs = FakeFs::new(cx.background_executor.clone());
12577 fs.insert_tree(
12578 path!("/root"),
12579 json!({
12580 "src": {
12581 "main.rs": "fn main() {}",
12582 "types.gen.rs": "// Generated file",
12583 },
12584 "generated": {
12585 "schema.rs": "// Auto-generated schema",
12586 }
12587 }),
12588 )
12589 .await;
12590
12591 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12592
12593 // Open a regular file - should be read-write
12594 let regular_buffer = project
12595 .update(cx, |project, cx| {
12596 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12597 })
12598 .await
12599 .unwrap();
12600
12601 regular_buffer.read_with(cx, |buffer, _| {
12602 assert!(!buffer.read_only(), "Regular file should not be read-only");
12603 });
12604
12605 // Open a file matching *.gen.rs pattern - should be read-only
12606 let gen_buffer = project
12607 .update(cx, |project, cx| {
12608 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12609 })
12610 .await
12611 .unwrap();
12612
12613 gen_buffer.read_with(cx, |buffer, _| {
12614 assert!(
12615 buffer.read_only(),
12616 "File matching *.gen.rs pattern should be read-only"
12617 );
12618 });
12619
12620 // Open a file in generated directory - should be read-only
12621 let generated_buffer = project
12622 .update(cx, |project, cx| {
12623 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12624 })
12625 .await
12626 .unwrap();
12627
12628 generated_buffer.read_with(cx, |buffer, _| {
12629 assert!(
12630 buffer.read_only(),
12631 "File in generated directory should be read-only"
12632 );
12633 });
12634}
12635
12636#[gpui::test]
12637async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12638 init_test(cx);
12639
12640 // Explicitly set read_only_files to empty (default behavior)
12641 cx.update(|cx| {
12642 cx.update_global::<SettingsStore, _>(|store, cx| {
12643 store.update_user_settings(cx, |settings| {
12644 settings.project.worktree.read_only_files = Some(vec![]);
12645 });
12646 });
12647 });
12648
12649 let fs = FakeFs::new(cx.background_executor.clone());
12650 fs.insert_tree(
12651 path!("/root"),
12652 json!({
12653 "src": {
12654 "main.rs": "fn main() {}",
12655 },
12656 "generated": {
12657 "schema.rs": "// Auto-generated schema",
12658 }
12659 }),
12660 )
12661 .await;
12662
12663 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12664
12665 // All files should be read-write when read_only_files is empty
12666 let main_buffer = project
12667 .update(cx, |project, cx| {
12668 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12669 })
12670 .await
12671 .unwrap();
12672
12673 main_buffer.read_with(cx, |buffer, _| {
12674 assert!(
12675 !buffer.read_only(),
12676 "Files should not be read-only when read_only_files is empty"
12677 );
12678 });
12679
12680 let generated_buffer = project
12681 .update(cx, |project, cx| {
12682 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12683 })
12684 .await
12685 .unwrap();
12686
12687 generated_buffer.read_with(cx, |buffer, _| {
12688 assert!(
12689 !buffer.read_only(),
12690 "Generated files should not be read-only when read_only_files is empty"
12691 );
12692 });
12693}
12694
12695#[gpui::test]
12696async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12697 init_test(cx);
12698
12699 // Configure to make lock files read-only
12700 cx.update(|cx| {
12701 cx.update_global::<SettingsStore, _>(|store, cx| {
12702 store.update_user_settings(cx, |settings| {
12703 settings.project.worktree.read_only_files = Some(vec![
12704 "**/*.lock".to_string(),
12705 "**/package-lock.json".to_string(),
12706 ]);
12707 });
12708 });
12709 });
12710
12711 let fs = FakeFs::new(cx.background_executor.clone());
12712 fs.insert_tree(
12713 path!("/root"),
12714 json!({
12715 "Cargo.lock": "# Lock file",
12716 "Cargo.toml": "[package]",
12717 "package-lock.json": "{}",
12718 "package.json": "{}",
12719 }),
12720 )
12721 .await;
12722
12723 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12724
12725 // Cargo.lock should be read-only
12726 let cargo_lock = project
12727 .update(cx, |project, cx| {
12728 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12729 })
12730 .await
12731 .unwrap();
12732
12733 cargo_lock.read_with(cx, |buffer, _| {
12734 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12735 });
12736
12737 // Cargo.toml should be read-write
12738 let cargo_toml = project
12739 .update(cx, |project, cx| {
12740 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12741 })
12742 .await
12743 .unwrap();
12744
12745 cargo_toml.read_with(cx, |buffer, _| {
12746 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12747 });
12748
12749 // package-lock.json should be read-only
12750 let package_lock = project
12751 .update(cx, |project, cx| {
12752 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12753 })
12754 .await
12755 .unwrap();
12756
12757 package_lock.read_with(cx, |buffer, _| {
12758 assert!(buffer.read_only(), "package-lock.json should be read-only");
12759 });
12760
12761 // package.json should be read-write
12762 let package_json = project
12763 .update(cx, |project, cx| {
12764 project.open_local_buffer(path!("/root/package.json"), cx)
12765 })
12766 .await
12767 .unwrap();
12768
12769 package_json.read_with(cx, |buffer, _| {
12770 assert!(!buffer.read_only(), "package.json should not be read-only");
12771 });
12772}
12773
12774mod disable_ai_settings_tests {
12775 use gpui::TestAppContext;
12776 use project::*;
12777 use settings::{Settings, SettingsStore};
12778
12779 #[gpui::test]
12780 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12781 cx.update(|cx| {
12782 settings::init(cx);
12783
12784 // Test 1: Default is false (AI enabled)
12785 assert!(
12786 !DisableAiSettings::get_global(cx).disable_ai,
12787 "Default should allow AI"
12788 );
12789 });
12790
12791 let disable_true = serde_json::json!({
12792 "disable_ai": true
12793 })
12794 .to_string();
12795 let disable_false = serde_json::json!({
12796 "disable_ai": false
12797 })
12798 .to_string();
12799
12800 cx.update_global::<SettingsStore, _>(|store, cx| {
12801 store.set_user_settings(&disable_false, cx).unwrap();
12802 store.set_global_settings(&disable_true, cx).unwrap();
12803 });
12804 cx.update(|cx| {
12805 assert!(
12806 DisableAiSettings::get_global(cx).disable_ai,
12807 "Local false cannot override global true"
12808 );
12809 });
12810
12811 cx.update_global::<SettingsStore, _>(|store, cx| {
12812 store.set_global_settings(&disable_false, cx).unwrap();
12813 store.set_user_settings(&disable_true, cx).unwrap();
12814 });
12815
12816 cx.update(|cx| {
12817 assert!(
12818 DisableAiSettings::get_global(cx).disable_ai,
12819 "Local false cannot override global true"
12820 );
12821 });
12822 }
12823
12824 #[gpui::test]
12825 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12826 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12827 use worktree::WorktreeId;
12828
12829 cx.update(|cx| {
12830 settings::init(cx);
12831
12832 // Default should allow AI
12833 assert!(
12834 !DisableAiSettings::get_global(cx).disable_ai,
12835 "Default should allow AI"
12836 );
12837 });
12838
12839 let worktree_id = WorktreeId::from_usize(1);
12840 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12841 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12842 };
12843 let project_path = rel_path("project");
12844 let settings_location = SettingsLocation {
12845 worktree_id,
12846 path: project_path.as_ref(),
12847 };
12848
12849 // Test: Project-level disable_ai=true should disable AI for files in that project
12850 cx.update_global::<SettingsStore, _>(|store, cx| {
12851 store
12852 .set_local_settings(
12853 worktree_id,
12854 LocalSettingsPath::InWorktree(project_path.clone()),
12855 LocalSettingsKind::Settings,
12856 Some(r#"{ "disable_ai": true }"#),
12857 cx,
12858 )
12859 .unwrap();
12860 });
12861
12862 cx.update(|cx| {
12863 let settings = DisableAiSettings::get(Some(settings_location), cx);
12864 assert!(
12865 settings.disable_ai,
12866 "Project-level disable_ai=true should disable AI for files in that project"
12867 );
12868 // Global should now also be true since project-level disable_ai is merged into global
12869 assert!(
12870 DisableAiSettings::get_global(cx).disable_ai,
12871 "Global setting should be affected by project-level disable_ai=true"
12872 );
12873 });
12874
12875 // Test: Setting project-level to false should allow AI for that project
12876 cx.update_global::<SettingsStore, _>(|store, cx| {
12877 store
12878 .set_local_settings(
12879 worktree_id,
12880 LocalSettingsPath::InWorktree(project_path.clone()),
12881 LocalSettingsKind::Settings,
12882 Some(r#"{ "disable_ai": false }"#),
12883 cx,
12884 )
12885 .unwrap();
12886 });
12887
12888 cx.update(|cx| {
12889 let settings = DisableAiSettings::get(Some(settings_location), cx);
12890 assert!(
12891 !settings.disable_ai,
12892 "Project-level disable_ai=false should allow AI"
12893 );
12894 // Global should also be false now
12895 assert!(
12896 !DisableAiSettings::get_global(cx).disable_ai,
12897 "Global setting should be false when project-level is false"
12898 );
12899 });
12900
12901 // Test: User-level true + project-level false = AI disabled (saturation)
12902 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12903 cx.update_global::<SettingsStore, _>(|store, cx| {
12904 store.set_user_settings(&disable_true, cx).unwrap();
12905 store
12906 .set_local_settings(
12907 worktree_id,
12908 LocalSettingsPath::InWorktree(project_path.clone()),
12909 LocalSettingsKind::Settings,
12910 Some(r#"{ "disable_ai": false }"#),
12911 cx,
12912 )
12913 .unwrap();
12914 });
12915
12916 cx.update(|cx| {
12917 let settings = DisableAiSettings::get(Some(settings_location), cx);
12918 assert!(
12919 settings.disable_ai,
12920 "Project-level false cannot override user-level true (SaturatingBool)"
12921 );
12922 });
12923 }
12924}