1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
45 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
46 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
47 ToolchainMetadata,
48 language_settings::{Formatter, FormatterList, LanguageSettings, LanguageSettingsContent},
49 markdown_lang, rust_lang, tree_sitter_typescript,
50};
51use lsp::{
52 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
53 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
54 Uri, WillRenameFiles, notification::DidRenameFiles,
55};
56use parking_lot::Mutex;
57use paths::{config_dir, global_gitignore_path, tasks_file};
58use postage::stream::Stream as _;
59use pretty_assertions::{assert_eq, assert_matches};
60use project::{
61 Event, TaskContexts,
62 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
63 search::{SearchQuery, SearchResult},
64 task_store::{TaskSettingsLocation, TaskStore},
65 *,
66};
67use rand::{Rng as _, rngs::StdRng};
68use serde_json::json;
69use settings::SettingsStore;
70#[cfg(not(windows))]
71use std::os;
72use std::{
73 cell::RefCell,
74 env, mem,
75 num::NonZeroU32,
76 ops::Range,
77 path::{Path, PathBuf},
78 rc::Rc,
79 str::FromStr,
80 sync::{Arc, OnceLock, atomic},
81 task::Poll,
82 time::Duration,
83};
84use sum_tree::SumTree;
85use task::{ResolvedTask, ShellKind, TaskContext};
86use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
87use unindent::Unindent as _;
88use util::{
89 TryFutureExt as _, assert_set_eq, maybe, path,
90 paths::{PathMatcher, PathStyle},
91 rel_path::{RelPath, rel_path},
92 test::{TempTree, marked_text_offsets},
93 uri,
94};
95use worktree::WorktreeModelHandle as _;
96
97#[gpui::test]
98async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
99 cx.executor().allow_parking();
100
101 let (tx, mut rx) = futures::channel::mpsc::unbounded();
102 let _thread = std::thread::spawn(move || {
103 #[cfg(not(target_os = "windows"))]
104 std::fs::metadata("/tmp").unwrap();
105 #[cfg(target_os = "windows")]
106 std::fs::metadata("C:/Windows").unwrap();
107 std::thread::sleep(Duration::from_millis(1000));
108 tx.unbounded_send(1).unwrap();
109 });
110 rx.next().await.unwrap();
111}
112
113#[gpui::test]
114async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
115 cx.executor().allow_parking();
116
117 let io_task = smol::unblock(move || {
118 println!("sleeping on thread {:?}", std::thread::current().id());
119 std::thread::sleep(Duration::from_millis(10));
120 1
121 });
122
123 let task = cx.foreground_executor().spawn(async move {
124 io_task.await;
125 });
126
127 task.await;
128}
129
130#[gpui::test]
131async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
132 cx: &mut gpui::TestAppContext,
133) {
134 init_test(cx);
135
136 let fs = FakeFs::new(cx.executor());
137 fs.insert_tree(
138 path!("/root"),
139 json!({
140 "dir-project": {
141 "src": {
142 "main.rs": "fn main() {}"
143 }
144 },
145 "single-file.rs": "fn helper() {}"
146 }),
147 )
148 .await;
149
150 let project = Project::test(
151 fs,
152 [
153 Path::new(path!("/root/single-file.rs")),
154 Path::new(path!("/root/dir-project")),
155 ],
156 cx,
157 )
158 .await;
159
160 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
161 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
162
163 assert_eq!(
164 ordered_paths,
165 vec![
166 PathBuf::from(path!("/root/dir-project")),
167 PathBuf::from(path!("/root")),
168 ]
169 );
170}
171
172#[gpui::test]
173async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
174 cx: &mut gpui::TestAppContext,
175) {
176 init_test(cx);
177
178 let fs = FakeFs::new(cx.executor());
179 let project = Project::test(fs, [], cx).await;
180
181 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
182 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
183
184 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
185}
186
187// NOTE:
188// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
189// we assume that they are not supported out of the box.
190#[cfg(not(windows))]
191#[gpui::test]
192async fn test_symlinks(cx: &mut gpui::TestAppContext) {
193 init_test(cx);
194 cx.executor().allow_parking();
195
196 let dir = TempTree::new(json!({
197 "root": {
198 "apple": "",
199 "banana": {
200 "carrot": {
201 "date": "",
202 "endive": "",
203 }
204 },
205 "fennel": {
206 "grape": "",
207 }
208 }
209 }));
210
211 let root_link_path = dir.path().join("root_link");
212 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
213 os::unix::fs::symlink(
214 dir.path().join("root/fennel"),
215 dir.path().join("root/finnochio"),
216 )
217 .unwrap();
218
219 let project = Project::test(
220 Arc::new(RealFs::new(None, cx.executor())),
221 [root_link_path.as_ref()],
222 cx,
223 )
224 .await;
225
226 project.update(cx, |project, cx| {
227 let tree = project.worktrees(cx).next().unwrap().read(cx);
228 assert_eq!(tree.file_count(), 5);
229 assert_eq!(
230 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
231 tree.entry_for_path(rel_path("finnochio/grape"))
232 .unwrap()
233 .inode
234 );
235 });
236}
237
238#[gpui::test]
239async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
240 init_test(cx);
241
242 let dir = TempTree::new(json!({
243 ".editorconfig": r#"
244 root = true
245 [*.rs]
246 indent_style = tab
247 indent_size = 3
248 end_of_line = lf
249 insert_final_newline = true
250 trim_trailing_whitespace = true
251 max_line_length = 120
252 [*.js]
253 tab_width = 10
254 max_line_length = off
255 "#,
256 ".zed": {
257 "settings.json": r#"{
258 "tab_size": 8,
259 "hard_tabs": false,
260 "ensure_final_newline_on_save": false,
261 "remove_trailing_whitespace_on_save": false,
262 "preferred_line_length": 64,
263 "soft_wrap": "editor_width",
264 }"#,
265 },
266 "a.rs": "fn a() {\n A\n}",
267 "b": {
268 ".editorconfig": r#"
269 [*.rs]
270 indent_size = 2
271 max_line_length = off,
272 "#,
273 "b.rs": "fn b() {\n B\n}",
274 },
275 "c.js": "def c\n C\nend",
276 "d": {
277 ".editorconfig": r#"
278 [*.rs]
279 indent_size = 1
280 "#,
281 "d.rs": "fn d() {\n D\n}",
282 },
283 "README.json": "tabs are better\n",
284 }));
285
286 let path = dir.path();
287 let fs = FakeFs::new(cx.executor());
288 fs.insert_tree_from_real_fs(path, path).await;
289 let project = Project::test(fs, [path], cx).await;
290
291 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
292 language_registry.add(js_lang());
293 language_registry.add(json_lang());
294 language_registry.add(rust_lang());
295
296 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
297
298 cx.executor().run_until_parked();
299
300 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
301 let buffer = project
302 .update(cx, |project, cx| {
303 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
304 })
305 .await
306 .unwrap();
307 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
308 };
309
310 let settings_a = settings_for("a.rs", cx).await;
311 let settings_b = settings_for("b/b.rs", cx).await;
312 let settings_c = settings_for("c.js", cx).await;
313 let settings_d = settings_for("d/d.rs", cx).await;
314 let settings_readme = settings_for("README.json", cx).await;
315 // .editorconfig overrides .zed/settings
316 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
317 assert_eq!(settings_a.hard_tabs, true);
318 assert_eq!(settings_a.ensure_final_newline_on_save, true);
319 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
320 assert_eq!(settings_a.preferred_line_length, 120);
321
322 // .editorconfig in b/ overrides .editorconfig in root
323 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
324
325 // .editorconfig in subdirectory overrides .editorconfig in root
326 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
327
328 // "indent_size" is not set, so "tab_width" is used
329 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
330
331 // When max_line_length is "off", default to .zed/settings.json
332 assert_eq!(settings_b.preferred_line_length, 64);
333 assert_eq!(settings_c.preferred_line_length, 64);
334
335 // README.md should not be affected by .editorconfig's globe "*.rs"
336 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
337}
338
339#[gpui::test]
340async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
341 init_test(cx);
342
343 let fs = FakeFs::new(cx.executor());
344 fs.insert_tree(
345 path!("/grandparent"),
346 json!({
347 ".editorconfig": "[*]\nindent_size = 4\n",
348 "parent": {
349 ".editorconfig": "[*.rs]\nindent_size = 2\n",
350 "worktree": {
351 ".editorconfig": "[*.md]\nindent_size = 3\n",
352 "main.rs": "fn main() {}",
353 "README.md": "# README",
354 "other.txt": "other content",
355 }
356 }
357 }),
358 )
359 .await;
360
361 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
362
363 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
364 language_registry.add(rust_lang());
365 language_registry.add(markdown_lang());
366
367 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
368
369 cx.executor().run_until_parked();
370 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
371 let buffer = project
372 .update(cx, |project, cx| {
373 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
374 })
375 .await
376 .unwrap();
377 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
378 };
379
380 let settings_rs = settings_for("main.rs", cx).await;
381 let settings_md = settings_for("README.md", cx).await;
382 let settings_txt = settings_for("other.txt", cx).await;
383
384 // main.rs gets indent_size = 2 from parent's external .editorconfig
385 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
386
387 // README.md gets indent_size = 3 from internal worktree .editorconfig
388 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
389
390 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
391 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
392}
393
394#[gpui::test]
395async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
396 init_test(cx);
397
398 let fs = FakeFs::new(cx.executor());
399 fs.insert_tree(
400 path!("/worktree"),
401 json!({
402 ".editorconfig": "[*]\nindent_size = 99\n",
403 "src": {
404 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
405 "file.rs": "fn main() {}",
406 }
407 }),
408 )
409 .await;
410
411 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
412
413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
414 language_registry.add(rust_lang());
415
416 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
417
418 cx.executor().run_until_parked();
419
420 let buffer = project
421 .update(cx, |project, cx| {
422 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
423 })
424 .await
425 .unwrap();
426 cx.update(|cx| {
427 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
428 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
429 });
430}
431
432#[gpui::test]
433async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
434 init_test(cx);
435
436 let fs = FakeFs::new(cx.executor());
437 fs.insert_tree(
438 path!("/parent"),
439 json!({
440 ".editorconfig": "[*]\nindent_size = 99\n",
441 "worktree": {
442 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
443 "file.rs": "fn main() {}",
444 }
445 }),
446 )
447 .await;
448
449 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
450
451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
452 language_registry.add(rust_lang());
453
454 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
455
456 cx.executor().run_until_parked();
457
458 let buffer = project
459 .update(cx, |project, cx| {
460 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
461 })
462 .await
463 .unwrap();
464
465 cx.update(|cx| {
466 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
467
468 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
469 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
470 });
471}
472
473#[gpui::test]
474async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
475 init_test(cx);
476
477 let fs = FakeFs::new(cx.executor());
478 fs.insert_tree(
479 path!("/grandparent"),
480 json!({
481 ".editorconfig": "[*]\nindent_size = 99\n",
482 "parent": {
483 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
484 "worktree": {
485 "file.rs": "fn main() {}",
486 }
487 }
488 }),
489 )
490 .await;
491
492 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
493
494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
495 language_registry.add(rust_lang());
496
497 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
498
499 cx.executor().run_until_parked();
500
501 let buffer = project
502 .update(cx, |project, cx| {
503 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
504 })
505 .await
506 .unwrap();
507
508 cx.update(|cx| {
509 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
510
511 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
512 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
513 });
514}
515
516#[gpui::test]
517async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/parent"),
523 json!({
524 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
525 "worktree_a": {
526 "file.rs": "fn a() {}",
527 ".editorconfig": "[*]\ninsert_final_newline = true\n",
528 },
529 "worktree_b": {
530 "file.rs": "fn b() {}",
531 ".editorconfig": "[*]\ninsert_final_newline = false\n",
532 }
533 }),
534 )
535 .await;
536
537 let project = Project::test(
538 fs,
539 [
540 path!("/parent/worktree_a").as_ref(),
541 path!("/parent/worktree_b").as_ref(),
542 ],
543 cx,
544 )
545 .await;
546
547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
548 language_registry.add(rust_lang());
549
550 cx.executor().run_until_parked();
551
552 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
553 assert_eq!(worktrees.len(), 2);
554
555 for worktree in worktrees {
556 let buffer = project
557 .update(cx, |project, cx| {
558 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
559 })
560 .await
561 .unwrap();
562
563 cx.update(|cx| {
564 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
565
566 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
567 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
568 });
569 }
570}
571
572#[gpui::test]
573async fn test_external_editorconfig_not_loaded_without_internal_config(
574 cx: &mut gpui::TestAppContext,
575) {
576 init_test(cx);
577
578 let fs = FakeFs::new(cx.executor());
579 fs.insert_tree(
580 path!("/parent"),
581 json!({
582 ".editorconfig": "[*]\nindent_size = 99\n",
583 "worktree": {
584 "file.rs": "fn main() {}",
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
591
592 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
593 language_registry.add(rust_lang());
594
595 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
596
597 cx.executor().run_until_parked();
598
599 let buffer = project
600 .update(cx, |project, cx| {
601 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
602 })
603 .await
604 .unwrap();
605
606 cx.update(|cx| {
607 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
608
609 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
610 // because without an internal .editorconfig, external configs are not loaded
611 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
612 });
613}
614
615#[gpui::test]
616async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 4\n",
624 "worktree": {
625 ".editorconfig": "[*]\n",
626 "file.rs": "fn main() {}",
627 }
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
633
634 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
635 language_registry.add(rust_lang());
636
637 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
638
639 cx.executor().run_until_parked();
640
641 let buffer = project
642 .update(cx, |project, cx| {
643 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
644 })
645 .await
646 .unwrap();
647
648 cx.update(|cx| {
649 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
650
651 // Test initial settings: tab_size = 4 from parent's external .editorconfig
652 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
653 });
654
655 fs.atomic_write(
656 PathBuf::from(path!("/parent/.editorconfig")),
657 "[*]\nindent_size = 8\n".to_owned(),
658 )
659 .await
660 .unwrap();
661
662 cx.executor().run_until_parked();
663
664 let buffer = project
665 .update(cx, |project, cx| {
666 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
667 })
668 .await
669 .unwrap();
670
671 cx.update(|cx| {
672 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
673
674 // Test settings updated: tab_size = 8
675 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
676 });
677}
678
679#[gpui::test]
680async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
681 init_test(cx);
682
683 let fs = FakeFs::new(cx.executor());
684 fs.insert_tree(
685 path!("/parent"),
686 json!({
687 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
688 "existing_worktree": {
689 ".editorconfig": "[*]\n",
690 "file.rs": "fn a() {}",
691 },
692 "new_worktree": {
693 ".editorconfig": "[*]\n",
694 "file.rs": "fn b() {}",
695 }
696 }),
697 )
698 .await;
699
700 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
701
702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
703 language_registry.add(rust_lang());
704
705 cx.executor().run_until_parked();
706
707 let buffer = project
708 .update(cx, |project, cx| {
709 let id = project.worktrees(cx).next().unwrap().read(cx).id();
710 project.open_buffer((id, rel_path("file.rs")), cx)
711 })
712 .await
713 .unwrap();
714
715 cx.update(|cx| {
716 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
717
718 // Test existing worktree has tab_size = 7
719 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
720 });
721
722 let (new_worktree, _) = project
723 .update(cx, |project, cx| {
724 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
725 })
726 .await
727 .unwrap();
728
729 cx.executor().run_until_parked();
730
731 let buffer = project
732 .update(cx, |project, cx| {
733 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
734 })
735 .await
736 .unwrap();
737
738 cx.update(|cx| {
739 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
740
741 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
742 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
743 });
744}
745
746#[gpui::test]
747async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
748 init_test(cx);
749
750 let fs = FakeFs::new(cx.executor());
751 fs.insert_tree(
752 path!("/parent"),
753 json!({
754 ".editorconfig": "[*]\nindent_size = 6\n",
755 "worktree": {
756 ".editorconfig": "[*]\n",
757 "file.rs": "fn main() {}",
758 }
759 }),
760 )
761 .await;
762
763 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
764
765 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
766 language_registry.add(rust_lang());
767
768 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
769 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
770
771 cx.executor().run_until_parked();
772
773 cx.update(|cx| {
774 let store = cx.global::<SettingsStore>();
775 let (worktree_ids, external_paths, watcher_paths) =
776 store.editorconfig_store.read(cx).test_state();
777
778 // Test external config is loaded
779 assert!(worktree_ids.contains(&worktree_id));
780 assert!(!external_paths.is_empty());
781 assert!(!watcher_paths.is_empty());
782 });
783
784 project.update(cx, |project, cx| {
785 project.remove_worktree(worktree_id, cx);
786 });
787
788 cx.executor().run_until_parked();
789
790 cx.update(|cx| {
791 let store = cx.global::<SettingsStore>();
792 let (worktree_ids, external_paths, watcher_paths) =
793 store.editorconfig_store.read(cx).test_state();
794
795 // Test worktree state, external configs, and watchers all removed
796 assert!(!worktree_ids.contains(&worktree_id));
797 assert!(external_paths.is_empty());
798 assert!(watcher_paths.is_empty());
799 });
800}
801
802#[gpui::test]
803async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
804 cx: &mut gpui::TestAppContext,
805) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor());
809 fs.insert_tree(
810 path!("/parent"),
811 json!({
812 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
813 "worktree_a": {
814 ".editorconfig": "[*]\n",
815 "file.rs": "fn a() {}",
816 },
817 "worktree_b": {
818 ".editorconfig": "[*]\n",
819 "file.rs": "fn b() {}",
820 }
821 }),
822 )
823 .await;
824
825 let project = Project::test(
826 fs,
827 [
828 path!("/parent/worktree_a").as_ref(),
829 path!("/parent/worktree_b").as_ref(),
830 ],
831 cx,
832 )
833 .await;
834
835 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
836 language_registry.add(rust_lang());
837
838 cx.executor().run_until_parked();
839
840 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
841 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
842 assert_eq!(worktrees.len(), 2);
843
844 let worktree_a = &worktrees[0];
845 let worktree_b = &worktrees[1];
846 let worktree_a_id = worktree_a.read(cx).id();
847 let worktree_b_id = worktree_b.read(cx).id();
848 (worktree_a_id, worktree_b.clone(), worktree_b_id)
849 });
850
851 cx.update(|cx| {
852 let store = cx.global::<SettingsStore>();
853 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
854
855 // Test both worktrees have settings and share external config
856 assert!(worktree_ids.contains(&worktree_a_id));
857 assert!(worktree_ids.contains(&worktree_b_id));
858 assert_eq!(external_paths.len(), 1); // single shared external config
859 });
860
861 project.update(cx, |project, cx| {
862 project.remove_worktree(worktree_a_id, cx);
863 });
864
865 cx.executor().run_until_parked();
866
867 cx.update(|cx| {
868 let store = cx.global::<SettingsStore>();
869 let (worktree_ids, external_paths, watcher_paths) =
870 store.editorconfig_store.read(cx).test_state();
871
872 // Test worktree_a is gone but external config remains for worktree_b
873 assert!(!worktree_ids.contains(&worktree_a_id));
874 assert!(worktree_ids.contains(&worktree_b_id));
875 // External config should still exist because worktree_b uses it
876 assert_eq!(external_paths.len(), 1);
877 assert_eq!(watcher_paths.len(), 1);
878 });
879
880 let buffer = project
881 .update(cx, |project, cx| {
882 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
883 })
884 .await
885 .unwrap();
886
887 cx.update(|cx| {
888 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
889
890 // Test worktree_b still has correct settings
891 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
892 });
893}
894
895#[gpui::test]
896async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
897 init_test(cx);
898 cx.update(|cx| {
899 GitHostingProviderRegistry::default_global(cx);
900 git_hosting_providers::init(cx);
901 });
902
903 let fs = FakeFs::new(cx.executor());
904 let str_path = path!("/dir");
905 let path = Path::new(str_path);
906
907 fs.insert_tree(
908 path!("/dir"),
909 json!({
910 ".zed": {
911 "settings.json": r#"{
912 "git_hosting_providers": [
913 {
914 "provider": "gitlab",
915 "base_url": "https://google.com",
916 "name": "foo"
917 }
918 ]
919 }"#
920 },
921 }),
922 )
923 .await;
924
925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
926 let (_worktree, _) =
927 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
928 cx.executor().run_until_parked();
929
930 cx.update(|cx| {
931 let provider = GitHostingProviderRegistry::global(cx);
932 assert!(
933 provider
934 .list_hosting_providers()
935 .into_iter()
936 .any(|provider| provider.name() == "foo")
937 );
938 });
939
940 fs.atomic_write(
941 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
942 "{}".into(),
943 )
944 .await
945 .unwrap();
946
947 cx.run_until_parked();
948
949 cx.update(|cx| {
950 let provider = GitHostingProviderRegistry::global(cx);
951 assert!(
952 !provider
953 .list_hosting_providers()
954 .into_iter()
955 .any(|provider| provider.name() == "foo")
956 );
957 });
958}
959
960#[gpui::test]
961async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
962 init_test(cx);
963 TaskStore::init(None);
964
965 let fs = FakeFs::new(cx.executor());
966 fs.insert_tree(
967 path!("/dir"),
968 json!({
969 ".zed": {
970 "settings.json": r#"{ "tab_size": 8 }"#,
971 "tasks.json": r#"[{
972 "label": "cargo check all",
973 "command": "cargo",
974 "args": ["check", "--all"]
975 },]"#,
976 },
977 "a": {
978 "a.rs": "fn a() {\n A\n}"
979 },
980 "b": {
981 ".zed": {
982 "settings.json": r#"{ "tab_size": 2 }"#,
983 "tasks.json": r#"[{
984 "label": "cargo check",
985 "command": "cargo",
986 "args": ["check"]
987 },]"#,
988 },
989 "b.rs": "fn b() {\n B\n}"
990 }
991 }),
992 )
993 .await;
994
995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
996 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
997
998 cx.executor().run_until_parked();
999 let worktree_id = cx.update(|cx| {
1000 project.update(cx, |project, cx| {
1001 project.worktrees(cx).next().unwrap().read(cx).id()
1002 })
1003 });
1004
1005 let mut task_contexts = TaskContexts::default();
1006 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1007 let task_contexts = Arc::new(task_contexts);
1008
1009 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1010 id: worktree_id,
1011 directory_in_worktree: rel_path(".zed").into(),
1012 id_base: "local worktree tasks from directory \".zed\"".into(),
1013 };
1014
1015 let buffer_a = project
1016 .update(cx, |project, cx| {
1017 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1018 })
1019 .await
1020 .unwrap();
1021 let buffer_b = project
1022 .update(cx, |project, cx| {
1023 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1024 })
1025 .await
1026 .unwrap();
1027 cx.update(|cx| {
1028 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1029 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1030
1031 assert_eq!(settings_a.tab_size.get(), 8);
1032 assert_eq!(settings_b.tab_size.get(), 2);
1033 });
1034
1035 let all_tasks = cx
1036 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1037 .await
1038 .into_iter()
1039 .map(|(source_kind, task)| {
1040 let resolved = task.resolved;
1041 (
1042 source_kind,
1043 task.resolved_label,
1044 resolved.args,
1045 resolved.env,
1046 )
1047 })
1048 .collect::<Vec<_>>();
1049 assert_eq!(
1050 all_tasks,
1051 vec![
1052 (
1053 TaskSourceKind::Worktree {
1054 id: worktree_id,
1055 directory_in_worktree: rel_path("b/.zed").into(),
1056 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1057 },
1058 "cargo check".to_string(),
1059 vec!["check".to_string()],
1060 HashMap::default(),
1061 ),
1062 (
1063 topmost_local_task_source_kind.clone(),
1064 "cargo check all".to_string(),
1065 vec!["check".to_string(), "--all".to_string()],
1066 HashMap::default(),
1067 ),
1068 ]
1069 );
1070
1071 let (_, resolved_task) = cx
1072 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1073 .await
1074 .into_iter()
1075 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1076 .expect("should have one global task");
1077 project.update(cx, |project, cx| {
1078 let task_inventory = project
1079 .task_store()
1080 .read(cx)
1081 .task_inventory()
1082 .cloned()
1083 .unwrap();
1084 task_inventory.update(cx, |inventory, _| {
1085 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1086 inventory
1087 .update_file_based_tasks(
1088 TaskSettingsLocation::Global(tasks_file()),
1089 Some(
1090 &json!([{
1091 "label": "cargo check unstable",
1092 "command": "cargo",
1093 "args": [
1094 "check",
1095 "--all",
1096 "--all-targets"
1097 ],
1098 "env": {
1099 "RUSTFLAGS": "-Zunstable-options"
1100 }
1101 }])
1102 .to_string(),
1103 ),
1104 )
1105 .unwrap();
1106 });
1107 });
1108 cx.run_until_parked();
1109
1110 let all_tasks = cx
1111 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1112 .await
1113 .into_iter()
1114 .map(|(source_kind, task)| {
1115 let resolved = task.resolved;
1116 (
1117 source_kind,
1118 task.resolved_label,
1119 resolved.args,
1120 resolved.env,
1121 )
1122 })
1123 .collect::<Vec<_>>();
1124 assert_eq!(
1125 all_tasks,
1126 vec![
1127 (
1128 topmost_local_task_source_kind.clone(),
1129 "cargo check all".to_string(),
1130 vec!["check".to_string(), "--all".to_string()],
1131 HashMap::default(),
1132 ),
1133 (
1134 TaskSourceKind::Worktree {
1135 id: worktree_id,
1136 directory_in_worktree: rel_path("b/.zed").into(),
1137 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1138 },
1139 "cargo check".to_string(),
1140 vec!["check".to_string()],
1141 HashMap::default(),
1142 ),
1143 (
1144 TaskSourceKind::AbsPath {
1145 abs_path: paths::tasks_file().clone(),
1146 id_base: "global tasks.json".into(),
1147 },
1148 "cargo check unstable".to_string(),
1149 vec![
1150 "check".to_string(),
1151 "--all".to_string(),
1152 "--all-targets".to_string(),
1153 ],
1154 HashMap::from_iter(Some((
1155 "RUSTFLAGS".to_string(),
1156 "-Zunstable-options".to_string()
1157 ))),
1158 ),
1159 ]
1160 );
1161}
1162
1163#[gpui::test]
1164async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1165 init_test(cx);
1166 TaskStore::init(None);
1167
1168 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1169 // event is emitted before we havd a chance to setup the event subscription.
1170 let fs = FakeFs::new(cx.executor());
1171 fs.insert_tree(
1172 path!("/dir"),
1173 json!({
1174 ".zed": {
1175 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1176 },
1177 "file.rs": ""
1178 }),
1179 )
1180 .await;
1181
1182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1183 let saw_toast = Rc::new(RefCell::new(false));
1184
1185 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1186 // later assert that the `Event::Toast` even is emitted.
1187 fs.save(
1188 path!("/dir/.zed/tasks.json").as_ref(),
1189 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1190 Default::default(),
1191 )
1192 .await
1193 .unwrap();
1194
1195 project.update(cx, |_, cx| {
1196 let saw_toast = saw_toast.clone();
1197
1198 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1199 Event::Toast {
1200 notification_id,
1201 message,
1202 link: Some(ToastLink { url, .. }),
1203 } => {
1204 assert!(notification_id.starts_with("local-tasks-"));
1205 assert!(message.contains("ZED_FOO"));
1206 assert_eq!(*url, "https://zed.dev/docs/tasks");
1207 *saw_toast.borrow_mut() = true;
1208 }
1209 _ => {}
1210 })
1211 .detach();
1212 });
1213
1214 cx.run_until_parked();
1215 assert!(
1216 *saw_toast.borrow(),
1217 "Expected `Event::Toast` was never emitted"
1218 );
1219}
1220
1221#[gpui::test]
1222async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1223 init_test(cx);
1224 TaskStore::init(None);
1225
1226 let fs = FakeFs::new(cx.executor());
1227 fs.insert_tree(
1228 path!("/dir"),
1229 json!({
1230 ".zed": {
1231 "tasks.json": r#"[{
1232 "label": "test worktree root",
1233 "command": "echo $ZED_WORKTREE_ROOT"
1234 }]"#,
1235 },
1236 "a": {
1237 "a.rs": "fn a() {\n A\n}"
1238 },
1239 }),
1240 )
1241 .await;
1242
1243 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1244 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1245
1246 cx.executor().run_until_parked();
1247 let worktree_id = cx.update(|cx| {
1248 project.update(cx, |project, cx| {
1249 project.worktrees(cx).next().unwrap().read(cx).id()
1250 })
1251 });
1252
1253 let active_non_worktree_item_tasks = cx
1254 .update(|cx| {
1255 get_all_tasks(
1256 &project,
1257 Arc::new(TaskContexts {
1258 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1259 active_worktree_context: None,
1260 other_worktree_contexts: Vec::new(),
1261 lsp_task_sources: HashMap::default(),
1262 latest_selection: None,
1263 }),
1264 cx,
1265 )
1266 })
1267 .await;
1268 assert!(
1269 active_non_worktree_item_tasks.is_empty(),
1270 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1271 );
1272
1273 let active_worktree_tasks = cx
1274 .update(|cx| {
1275 get_all_tasks(
1276 &project,
1277 Arc::new(TaskContexts {
1278 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1279 active_worktree_context: Some((worktree_id, {
1280 let mut worktree_context = TaskContext::default();
1281 worktree_context
1282 .task_variables
1283 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1284 worktree_context
1285 })),
1286 other_worktree_contexts: Vec::new(),
1287 lsp_task_sources: HashMap::default(),
1288 latest_selection: None,
1289 }),
1290 cx,
1291 )
1292 })
1293 .await;
1294 assert_eq!(
1295 active_worktree_tasks
1296 .into_iter()
1297 .map(|(source_kind, task)| {
1298 let resolved = task.resolved;
1299 (source_kind, resolved.command.unwrap())
1300 })
1301 .collect::<Vec<_>>(),
1302 vec![(
1303 TaskSourceKind::Worktree {
1304 id: worktree_id,
1305 directory_in_worktree: rel_path(".zed").into(),
1306 id_base: "local worktree tasks from directory \".zed\"".into(),
1307 },
1308 "echo /dir".to_string(),
1309 )]
1310 );
1311}
1312
1313#[gpui::test]
1314async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1315 cx: &mut gpui::TestAppContext,
1316) {
1317 pub(crate) struct PyprojectTomlManifestProvider;
1318
1319 impl ManifestProvider for PyprojectTomlManifestProvider {
1320 fn name(&self) -> ManifestName {
1321 SharedString::new_static("pyproject.toml").into()
1322 }
1323
1324 fn search(
1325 &self,
1326 ManifestQuery {
1327 path,
1328 depth,
1329 delegate,
1330 }: ManifestQuery,
1331 ) -> Option<Arc<RelPath>> {
1332 for path in path.ancestors().take(depth) {
1333 let p = path.join(rel_path("pyproject.toml"));
1334 if delegate.exists(&p, Some(false)) {
1335 return Some(path.into());
1336 }
1337 }
1338
1339 None
1340 }
1341 }
1342
1343 init_test(cx);
1344 let fs = FakeFs::new(cx.executor());
1345
1346 fs.insert_tree(
1347 path!("/the-root"),
1348 json!({
1349 ".zed": {
1350 "settings.json": r#"
1351 {
1352 "languages": {
1353 "Python": {
1354 "language_servers": ["ty"]
1355 }
1356 }
1357 }"#
1358 },
1359 "project-a": {
1360 ".venv": {},
1361 "file.py": "",
1362 "pyproject.toml": ""
1363 },
1364 "project-b": {
1365 ".venv": {},
1366 "source_file.py":"",
1367 "another_file.py": "",
1368 "pyproject.toml": ""
1369 }
1370 }),
1371 )
1372 .await;
1373 cx.update(|cx| {
1374 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1375 });
1376
1377 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1379 let _fake_python_server = language_registry.register_fake_lsp(
1380 "Python",
1381 FakeLspAdapter {
1382 name: "ty",
1383 capabilities: lsp::ServerCapabilities {
1384 ..Default::default()
1385 },
1386 ..Default::default()
1387 },
1388 );
1389
1390 language_registry.add(python_lang(fs.clone()));
1391 let (first_buffer, _handle) = project
1392 .update(cx, |project, cx| {
1393 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1394 })
1395 .await
1396 .unwrap();
1397 cx.executor().run_until_parked();
1398 let servers = project.update(cx, |project, cx| {
1399 project.lsp_store().update(cx, |this, cx| {
1400 first_buffer.update(cx, |buffer, cx| {
1401 this.running_language_servers_for_local_buffer(buffer, cx)
1402 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1403 .collect::<Vec<_>>()
1404 })
1405 })
1406 });
1407 cx.executor().run_until_parked();
1408 assert_eq!(servers.len(), 1);
1409 let (adapter, server) = servers.into_iter().next().unwrap();
1410 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1411 assert_eq!(server.server_id(), LanguageServerId(0));
1412 // `workspace_folders` are set to the rooting point.
1413 assert_eq!(
1414 server.workspace_folders(),
1415 BTreeSet::from_iter(
1416 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1417 )
1418 );
1419
1420 let (second_project_buffer, _other_handle) = project
1421 .update(cx, |project, cx| {
1422 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1423 })
1424 .await
1425 .unwrap();
1426 cx.executor().run_until_parked();
1427 let servers = project.update(cx, |project, cx| {
1428 project.lsp_store().update(cx, |this, cx| {
1429 second_project_buffer.update(cx, |buffer, cx| {
1430 this.running_language_servers_for_local_buffer(buffer, cx)
1431 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1432 .collect::<Vec<_>>()
1433 })
1434 })
1435 });
1436 cx.executor().run_until_parked();
1437 assert_eq!(servers.len(), 1);
1438 let (adapter, server) = servers.into_iter().next().unwrap();
1439 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1440 // We're not using venvs at all here, so both folders should fall under the same root.
1441 assert_eq!(server.server_id(), LanguageServerId(0));
1442 // Now, let's select a different toolchain for one of subprojects.
1443
1444 let Toolchains {
1445 toolchains: available_toolchains_for_b,
1446 root_path,
1447 ..
1448 } = project
1449 .update(cx, |this, cx| {
1450 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1451 this.available_toolchains(
1452 ProjectPath {
1453 worktree_id,
1454 path: rel_path("project-b/source_file.py").into(),
1455 },
1456 LanguageName::new_static("Python"),
1457 cx,
1458 )
1459 })
1460 .await
1461 .expect("A toolchain to be discovered");
1462 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1463 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1464 let currently_active_toolchain = project
1465 .update(cx, |this, cx| {
1466 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1467 this.active_toolchain(
1468 ProjectPath {
1469 worktree_id,
1470 path: rel_path("project-b/source_file.py").into(),
1471 },
1472 LanguageName::new_static("Python"),
1473 cx,
1474 )
1475 })
1476 .await;
1477
1478 assert!(currently_active_toolchain.is_none());
1479 let _ = project
1480 .update(cx, |this, cx| {
1481 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1482 this.activate_toolchain(
1483 ProjectPath {
1484 worktree_id,
1485 path: root_path,
1486 },
1487 available_toolchains_for_b
1488 .toolchains
1489 .into_iter()
1490 .next()
1491 .unwrap(),
1492 cx,
1493 )
1494 })
1495 .await
1496 .unwrap();
1497 cx.run_until_parked();
1498 let servers = project.update(cx, |project, cx| {
1499 project.lsp_store().update(cx, |this, cx| {
1500 second_project_buffer.update(cx, |buffer, cx| {
1501 this.running_language_servers_for_local_buffer(buffer, cx)
1502 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1503 .collect::<Vec<_>>()
1504 })
1505 })
1506 });
1507 cx.executor().run_until_parked();
1508 assert_eq!(servers.len(), 1);
1509 let (adapter, server) = servers.into_iter().next().unwrap();
1510 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1511 // There's a new language server in town.
1512 assert_eq!(server.server_id(), LanguageServerId(1));
1513}
1514
1515#[gpui::test]
1516async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree(
1521 path!("/dir"),
1522 json!({
1523 "test.rs": "const A: i32 = 1;",
1524 "test2.rs": "",
1525 "Cargo.toml": "a = 1",
1526 "package.json": "{\"a\": 1}",
1527 }),
1528 )
1529 .await;
1530
1531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1533
1534 let mut fake_rust_servers = language_registry.register_fake_lsp(
1535 "Rust",
1536 FakeLspAdapter {
1537 name: "the-rust-language-server",
1538 capabilities: lsp::ServerCapabilities {
1539 completion_provider: Some(lsp::CompletionOptions {
1540 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1541 ..Default::default()
1542 }),
1543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1544 lsp::TextDocumentSyncOptions {
1545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1546 ..Default::default()
1547 },
1548 )),
1549 ..Default::default()
1550 },
1551 ..Default::default()
1552 },
1553 );
1554 let mut fake_json_servers = language_registry.register_fake_lsp(
1555 "JSON",
1556 FakeLspAdapter {
1557 name: "the-json-language-server",
1558 capabilities: lsp::ServerCapabilities {
1559 completion_provider: Some(lsp::CompletionOptions {
1560 trigger_characters: Some(vec![":".to_string()]),
1561 ..Default::default()
1562 }),
1563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1564 lsp::TextDocumentSyncOptions {
1565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1566 ..Default::default()
1567 },
1568 )),
1569 ..Default::default()
1570 },
1571 ..Default::default()
1572 },
1573 );
1574
1575 // Open a buffer without an associated language server.
1576 let (toml_buffer, _handle) = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 // Open a buffer with an associated language server before the language for it has been loaded.
1584 let (rust_buffer, _handle2) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590 rust_buffer.update(cx, |buffer, _| {
1591 assert_eq!(buffer.language().map(|l| l.name()), None);
1592 });
1593
1594 // Now we add the languages to the project, and ensure they get assigned to all
1595 // the relevant open buffers.
1596 language_registry.add(json_lang());
1597 language_registry.add(rust_lang());
1598 cx.executor().run_until_parked();
1599 rust_buffer.update(cx, |buffer, _| {
1600 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1601 });
1602
1603 // A server is started up, and it is notified about Rust files.
1604 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1605 assert_eq!(
1606 fake_rust_server
1607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1608 .await
1609 .text_document,
1610 lsp::TextDocumentItem {
1611 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1612 version: 0,
1613 text: "const A: i32 = 1;".to_string(),
1614 language_id: "rust".to_string(),
1615 }
1616 );
1617
1618 // The buffer is configured based on the language server's capabilities.
1619 rust_buffer.update(cx, |buffer, _| {
1620 assert_eq!(
1621 buffer
1622 .completion_triggers()
1623 .iter()
1624 .cloned()
1625 .collect::<Vec<_>>(),
1626 &[".".to_string(), "::".to_string()]
1627 );
1628 });
1629 toml_buffer.update(cx, |buffer, _| {
1630 assert!(buffer.completion_triggers().is_empty());
1631 });
1632
1633 // Edit a buffer. The changes are reported to the language server.
1634 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1635 assert_eq!(
1636 fake_rust_server
1637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1638 .await
1639 .text_document,
1640 lsp::VersionedTextDocumentIdentifier::new(
1641 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1642 1
1643 )
1644 );
1645
1646 // Open a third buffer with a different associated language server.
1647 let (json_buffer, _json_handle) = project
1648 .update(cx, |project, cx| {
1649 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1650 })
1651 .await
1652 .unwrap();
1653
1654 // A json language server is started up and is only notified about the json buffer.
1655 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1656 assert_eq!(
1657 fake_json_server
1658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1659 .await
1660 .text_document,
1661 lsp::TextDocumentItem {
1662 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1663 version: 0,
1664 text: "{\"a\": 1}".to_string(),
1665 language_id: "json".to_string(),
1666 }
1667 );
1668
1669 // This buffer is configured based on the second language server's
1670 // capabilities.
1671 json_buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .completion_triggers()
1675 .iter()
1676 .cloned()
1677 .collect::<Vec<_>>(),
1678 &[":".to_string()]
1679 );
1680 });
1681
1682 // When opening another buffer whose language server is already running,
1683 // it is also configured based on the existing language server's capabilities.
1684 let (rust_buffer2, _handle4) = project
1685 .update(cx, |project, cx| {
1686 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1687 })
1688 .await
1689 .unwrap();
1690 rust_buffer2.update(cx, |buffer, _| {
1691 assert_eq!(
1692 buffer
1693 .completion_triggers()
1694 .iter()
1695 .cloned()
1696 .collect::<Vec<_>>(),
1697 &[".".to_string(), "::".to_string()]
1698 );
1699 });
1700
1701 // Changes are reported only to servers matching the buffer's language.
1702 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1703 rust_buffer2.update(cx, |buffer, cx| {
1704 buffer.edit([(0..0, "let x = 1;")], None, cx)
1705 });
1706 assert_eq!(
1707 fake_rust_server
1708 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1709 .await
1710 .text_document,
1711 lsp::VersionedTextDocumentIdentifier::new(
1712 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1713 1
1714 )
1715 );
1716
1717 // Save notifications are reported to all servers.
1718 project
1719 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1720 .await
1721 .unwrap();
1722 assert_eq!(
1723 fake_rust_server
1724 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1725 .await
1726 .text_document,
1727 lsp::TextDocumentIdentifier::new(
1728 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1729 )
1730 );
1731 assert_eq!(
1732 fake_json_server
1733 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1734 .await
1735 .text_document,
1736 lsp::TextDocumentIdentifier::new(
1737 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1738 )
1739 );
1740
1741 // Renames are reported only to servers matching the buffer's language.
1742 fs.rename(
1743 Path::new(path!("/dir/test2.rs")),
1744 Path::new(path!("/dir/test3.rs")),
1745 Default::default(),
1746 )
1747 .await
1748 .unwrap();
1749 assert_eq!(
1750 fake_rust_server
1751 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1752 .await
1753 .text_document,
1754 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1755 );
1756 assert_eq!(
1757 fake_rust_server
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document,
1761 lsp::TextDocumentItem {
1762 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1763 version: 0,
1764 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1765 language_id: "rust".to_string(),
1766 },
1767 );
1768
1769 rust_buffer2.update(cx, |buffer, cx| {
1770 buffer.update_diagnostics(
1771 LanguageServerId(0),
1772 DiagnosticSet::from_sorted_entries(
1773 vec![DiagnosticEntry {
1774 diagnostic: Default::default(),
1775 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1776 }],
1777 &buffer.snapshot(),
1778 ),
1779 cx,
1780 );
1781 assert_eq!(
1782 buffer
1783 .snapshot()
1784 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1785 .count(),
1786 1
1787 );
1788 });
1789
1790 // When the rename changes the extension of the file, the buffer gets closed on the old
1791 // language server and gets opened on the new one.
1792 fs.rename(
1793 Path::new(path!("/dir/test3.rs")),
1794 Path::new(path!("/dir/test3.json")),
1795 Default::default(),
1796 )
1797 .await
1798 .unwrap();
1799 assert_eq!(
1800 fake_rust_server
1801 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1802 .await
1803 .text_document,
1804 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1805 );
1806 assert_eq!(
1807 fake_json_server
1808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1809 .await
1810 .text_document,
1811 lsp::TextDocumentItem {
1812 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1813 version: 0,
1814 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1815 language_id: "json".to_string(),
1816 },
1817 );
1818
1819 // We clear the diagnostics, since the language has changed.
1820 rust_buffer2.update(cx, |buffer, _| {
1821 assert_eq!(
1822 buffer
1823 .snapshot()
1824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1825 .count(),
1826 0
1827 );
1828 });
1829
1830 // The renamed file's version resets after changing language server.
1831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1832 assert_eq!(
1833 fake_json_server
1834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1835 .await
1836 .text_document,
1837 lsp::VersionedTextDocumentIdentifier::new(
1838 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1839 1
1840 )
1841 );
1842
1843 // Restart language servers
1844 project.update(cx, |project, cx| {
1845 project.restart_language_servers_for_buffers(
1846 vec![rust_buffer.clone(), json_buffer.clone()],
1847 HashSet::default(),
1848 cx,
1849 );
1850 });
1851
1852 let mut rust_shutdown_requests = fake_rust_server
1853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1854 let mut json_shutdown_requests = fake_json_server
1855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1857
1858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1860
1861 // Ensure rust document is reopened in new rust language server
1862 assert_eq!(
1863 fake_rust_server
1864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1865 .await
1866 .text_document,
1867 lsp::TextDocumentItem {
1868 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1869 version: 0,
1870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1871 language_id: "rust".to_string(),
1872 }
1873 );
1874
1875 // Ensure json documents are reopened in new json language server
1876 assert_set_eq!(
1877 [
1878 fake_json_server
1879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1880 .await
1881 .text_document,
1882 fake_json_server
1883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1884 .await
1885 .text_document,
1886 ],
1887 [
1888 lsp::TextDocumentItem {
1889 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1890 version: 0,
1891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1892 language_id: "json".to_string(),
1893 },
1894 lsp::TextDocumentItem {
1895 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1896 version: 0,
1897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1898 language_id: "json".to_string(),
1899 }
1900 ]
1901 );
1902
1903 // Close notifications are reported only to servers matching the buffer's language.
1904 cx.update(|_| drop(_json_handle));
1905 let close_message = lsp::DidCloseTextDocumentParams {
1906 text_document: lsp::TextDocumentIdentifier::new(
1907 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1908 ),
1909 };
1910 assert_eq!(
1911 fake_json_server
1912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1913 .await,
1914 close_message,
1915 );
1916}
1917
1918#[gpui::test]
1919async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1920 init_test(cx);
1921
1922 let settings_json_contents = json!({
1923 "languages": {
1924 "Rust": {
1925 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1926 }
1927 },
1928 "lsp": {
1929 "my_fake_lsp": {
1930 "binary": {
1931 // file exists, so this is treated as a relative path
1932 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1933 }
1934 },
1935 "lsp_on_path": {
1936 "binary": {
1937 // file doesn't exist, so it will fall back on PATH env var
1938 "path": path!("lsp_on_path.exe").to_string(),
1939 }
1940 }
1941 },
1942 });
1943
1944 let fs = FakeFs::new(cx.executor());
1945 fs.insert_tree(
1946 path!("/the-root"),
1947 json!({
1948 ".zed": {
1949 "settings.json": settings_json_contents.to_string(),
1950 },
1951 ".relative_path": {
1952 "to": {
1953 "my_fake_lsp.exe": "",
1954 },
1955 },
1956 "src": {
1957 "main.rs": "",
1958 }
1959 }),
1960 )
1961 .await;
1962
1963 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1965 language_registry.add(rust_lang());
1966
1967 let mut my_fake_lsp = language_registry.register_fake_lsp(
1968 "Rust",
1969 FakeLspAdapter {
1970 name: "my_fake_lsp",
1971 ..Default::default()
1972 },
1973 );
1974 let mut lsp_on_path = language_registry.register_fake_lsp(
1975 "Rust",
1976 FakeLspAdapter {
1977 name: "lsp_on_path",
1978 ..Default::default()
1979 },
1980 );
1981
1982 cx.run_until_parked();
1983
1984 // Start the language server by opening a buffer with a compatible file extension.
1985 project
1986 .update(cx, |project, cx| {
1987 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1988 })
1989 .await
1990 .unwrap();
1991
1992 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1993 assert_eq!(
1994 lsp_path.to_string_lossy(),
1995 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1996 );
1997
1998 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1999 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2000}
2001
2002#[gpui::test]
2003async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let settings_json_contents = json!({
2007 "languages": {
2008 "Rust": {
2009 "language_servers": ["tilde_lsp"]
2010 }
2011 },
2012 "lsp": {
2013 "tilde_lsp": {
2014 "binary": {
2015 "path": "~/.local/bin/rust-analyzer",
2016 }
2017 }
2018 },
2019 });
2020
2021 let fs = FakeFs::new(cx.executor());
2022 fs.insert_tree(
2023 path!("/root"),
2024 json!({
2025 ".zed": {
2026 "settings.json": settings_json_contents.to_string(),
2027 },
2028 "src": {
2029 "main.rs": "fn main() {}",
2030 }
2031 }),
2032 )
2033 .await;
2034
2035 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2036 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2037 language_registry.add(rust_lang());
2038
2039 let mut tilde_lsp = language_registry.register_fake_lsp(
2040 "Rust",
2041 FakeLspAdapter {
2042 name: "tilde_lsp",
2043 ..Default::default()
2044 },
2045 );
2046 cx.run_until_parked();
2047
2048 project
2049 .update(cx, |project, cx| {
2050 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2051 })
2052 .await
2053 .unwrap();
2054
2055 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2056 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2057 assert_eq!(
2058 lsp_path, expected_path,
2059 "Tilde path should expand to home directory"
2060 );
2061}
2062
2063#[gpui::test]
2064async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2065 cx: &mut gpui::TestAppContext,
2066) {
2067 init_test(cx);
2068
2069 let fs = FakeFs::new(cx.executor());
2070 fs.insert_tree(
2071 path!("/the-root"),
2072 json!({
2073 "Cargo.lock": "",
2074 "src": {
2075 "a.rs": "",
2076 }
2077 }),
2078 )
2079 .await;
2080
2081 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2082 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2083 (project.languages().clone(), project.lsp_store())
2084 });
2085 language_registry.add(rust_lang());
2086 let mut fake_servers = language_registry.register_fake_lsp(
2087 "Rust",
2088 FakeLspAdapter {
2089 name: "the-language-server",
2090 ..Default::default()
2091 },
2092 );
2093
2094 cx.executor().run_until_parked();
2095
2096 project
2097 .update(cx, |project, cx| {
2098 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2099 })
2100 .await
2101 .unwrap();
2102
2103 let fake_server = fake_servers.next().await.unwrap();
2104 cx.executor().run_until_parked();
2105
2106 let file_changes = Arc::new(Mutex::new(Vec::new()));
2107 fake_server
2108 .request::<lsp::request::RegisterCapability>(
2109 lsp::RegistrationParams {
2110 registrations: vec![lsp::Registration {
2111 id: Default::default(),
2112 method: "workspace/didChangeWatchedFiles".to_string(),
2113 register_options: serde_json::to_value(
2114 lsp::DidChangeWatchedFilesRegistrationOptions {
2115 watchers: vec![lsp::FileSystemWatcher {
2116 glob_pattern: lsp::GlobPattern::String(
2117 path!("/the-root/Cargo.lock").to_string(),
2118 ),
2119 kind: None,
2120 }],
2121 },
2122 )
2123 .ok(),
2124 }],
2125 },
2126 DEFAULT_LSP_REQUEST_TIMEOUT,
2127 )
2128 .await
2129 .into_response()
2130 .unwrap();
2131 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2132 let file_changes = file_changes.clone();
2133 move |params, _| {
2134 let mut file_changes = file_changes.lock();
2135 file_changes.extend(params.changes);
2136 }
2137 });
2138
2139 cx.executor().run_until_parked();
2140 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2141
2142 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2143 cx.executor().run_until_parked();
2144
2145 assert_eq!(
2146 &*file_changes.lock(),
2147 &[lsp::FileEvent {
2148 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2149 typ: lsp::FileChangeType::CHANGED,
2150 }]
2151 );
2152}
2153
2154#[gpui::test]
2155async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let fs = FakeFs::new(cx.executor());
2159 fs.insert_tree(
2160 path!("/the-root"),
2161 json!({
2162 ".gitignore": "target\n",
2163 "Cargo.lock": "",
2164 "src": {
2165 "a.rs": "",
2166 "b.rs": "",
2167 },
2168 "target": {
2169 "x": {
2170 "out": {
2171 "x.rs": ""
2172 }
2173 },
2174 "y": {
2175 "out": {
2176 "y.rs": "",
2177 }
2178 },
2179 "z": {
2180 "out": {
2181 "z.rs": ""
2182 }
2183 }
2184 }
2185 }),
2186 )
2187 .await;
2188 fs.insert_tree(
2189 path!("/the-registry"),
2190 json!({
2191 "dep1": {
2192 "src": {
2193 "dep1.rs": "",
2194 }
2195 },
2196 "dep2": {
2197 "src": {
2198 "dep2.rs": "",
2199 }
2200 },
2201 }),
2202 )
2203 .await;
2204 fs.insert_tree(
2205 path!("/the/stdlib"),
2206 json!({
2207 "LICENSE": "",
2208 "src": {
2209 "string.rs": "",
2210 }
2211 }),
2212 )
2213 .await;
2214
2215 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2216 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2217 (project.languages().clone(), project.lsp_store())
2218 });
2219 language_registry.add(rust_lang());
2220 let mut fake_servers = language_registry.register_fake_lsp(
2221 "Rust",
2222 FakeLspAdapter {
2223 name: "the-language-server",
2224 ..Default::default()
2225 },
2226 );
2227
2228 cx.executor().run_until_parked();
2229
2230 // Start the language server by opening a buffer with a compatible file extension.
2231 project
2232 .update(cx, |project, cx| {
2233 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2234 })
2235 .await
2236 .unwrap();
2237
2238 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2239 project.update(cx, |project, cx| {
2240 let worktree = project.worktrees(cx).next().unwrap();
2241 assert_eq!(
2242 worktree
2243 .read(cx)
2244 .snapshot()
2245 .entries(true, 0)
2246 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2247 .collect::<Vec<_>>(),
2248 &[
2249 ("", false),
2250 (".gitignore", false),
2251 ("Cargo.lock", false),
2252 ("src", false),
2253 ("src/a.rs", false),
2254 ("src/b.rs", false),
2255 ("target", true),
2256 ]
2257 );
2258 });
2259
2260 let prev_read_dir_count = fs.read_dir_call_count();
2261
2262 let fake_server = fake_servers.next().await.unwrap();
2263 cx.executor().run_until_parked();
2264 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2265 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2266 id
2267 });
2268
2269 // Simulate jumping to a definition in a dependency outside of the worktree.
2270 let _out_of_worktree_buffer = project
2271 .update(cx, |project, cx| {
2272 project.open_local_buffer_via_lsp(
2273 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2274 server_id,
2275 cx,
2276 )
2277 })
2278 .await
2279 .unwrap();
2280
2281 // Keep track of the FS events reported to the language server.
2282 let file_changes = Arc::new(Mutex::new(Vec::new()));
2283 fake_server
2284 .request::<lsp::request::RegisterCapability>(
2285 lsp::RegistrationParams {
2286 registrations: vec![lsp::Registration {
2287 id: Default::default(),
2288 method: "workspace/didChangeWatchedFiles".to_string(),
2289 register_options: serde_json::to_value(
2290 lsp::DidChangeWatchedFilesRegistrationOptions {
2291 watchers: vec![
2292 lsp::FileSystemWatcher {
2293 glob_pattern: lsp::GlobPattern::String(
2294 path!("/the-root/Cargo.toml").to_string(),
2295 ),
2296 kind: None,
2297 },
2298 lsp::FileSystemWatcher {
2299 glob_pattern: lsp::GlobPattern::String(
2300 path!("/the-root/src/*.{rs,c}").to_string(),
2301 ),
2302 kind: None,
2303 },
2304 lsp::FileSystemWatcher {
2305 glob_pattern: lsp::GlobPattern::String(
2306 path!("/the-root/target/y/**/*.rs").to_string(),
2307 ),
2308 kind: None,
2309 },
2310 lsp::FileSystemWatcher {
2311 glob_pattern: lsp::GlobPattern::String(
2312 path!("/the/stdlib/src/**/*.rs").to_string(),
2313 ),
2314 kind: None,
2315 },
2316 lsp::FileSystemWatcher {
2317 glob_pattern: lsp::GlobPattern::String(
2318 path!("**/Cargo.lock").to_string(),
2319 ),
2320 kind: None,
2321 },
2322 ],
2323 },
2324 )
2325 .ok(),
2326 }],
2327 },
2328 DEFAULT_LSP_REQUEST_TIMEOUT,
2329 )
2330 .await
2331 .into_response()
2332 .unwrap();
2333 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2334 let file_changes = file_changes.clone();
2335 move |params, _| {
2336 let mut file_changes = file_changes.lock();
2337 file_changes.extend(params.changes);
2338 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2339 }
2340 });
2341
2342 cx.executor().run_until_parked();
2343 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2344 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2345
2346 let mut new_watched_paths = fs.watched_paths();
2347 new_watched_paths.retain(|path| {
2348 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2349 });
2350 assert_eq!(
2351 &new_watched_paths,
2352 &[
2353 Path::new(path!("/the-root")),
2354 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2355 Path::new(path!("/the/stdlib/src"))
2356 ]
2357 );
2358
2359 // Now the language server has asked us to watch an ignored directory path,
2360 // so we recursively load it.
2361 project.update(cx, |project, cx| {
2362 let worktree = project.visible_worktrees(cx).next().unwrap();
2363 assert_eq!(
2364 worktree
2365 .read(cx)
2366 .snapshot()
2367 .entries(true, 0)
2368 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2369 .collect::<Vec<_>>(),
2370 &[
2371 ("", false),
2372 (".gitignore", false),
2373 ("Cargo.lock", false),
2374 ("src", false),
2375 ("src/a.rs", false),
2376 ("src/b.rs", false),
2377 ("target", true),
2378 ("target/x", true),
2379 ("target/y", true),
2380 ("target/y/out", true),
2381 ("target/y/out/y.rs", true),
2382 ("target/z", true),
2383 ]
2384 );
2385 });
2386
2387 // Perform some file system mutations, two of which match the watched patterns,
2388 // and one of which does not.
2389 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2390 .await
2391 .unwrap();
2392 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2393 .await
2394 .unwrap();
2395 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2396 .await
2397 .unwrap();
2398 fs.create_file(
2399 path!("/the-root/target/x/out/x2.rs").as_ref(),
2400 Default::default(),
2401 )
2402 .await
2403 .unwrap();
2404 fs.create_file(
2405 path!("/the-root/target/y/out/y2.rs").as_ref(),
2406 Default::default(),
2407 )
2408 .await
2409 .unwrap();
2410 fs.save(
2411 path!("/the-root/Cargo.lock").as_ref(),
2412 &"".into(),
2413 Default::default(),
2414 )
2415 .await
2416 .unwrap();
2417 fs.save(
2418 path!("/the-stdlib/LICENSE").as_ref(),
2419 &"".into(),
2420 Default::default(),
2421 )
2422 .await
2423 .unwrap();
2424 fs.save(
2425 path!("/the/stdlib/src/string.rs").as_ref(),
2426 &"".into(),
2427 Default::default(),
2428 )
2429 .await
2430 .unwrap();
2431
2432 // The language server receives events for the FS mutations that match its watch patterns.
2433 cx.executor().run_until_parked();
2434 assert_eq!(
2435 &*file_changes.lock(),
2436 &[
2437 lsp::FileEvent {
2438 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2439 typ: lsp::FileChangeType::CHANGED,
2440 },
2441 lsp::FileEvent {
2442 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2443 typ: lsp::FileChangeType::DELETED,
2444 },
2445 lsp::FileEvent {
2446 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2447 typ: lsp::FileChangeType::CREATED,
2448 },
2449 lsp::FileEvent {
2450 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2451 typ: lsp::FileChangeType::CREATED,
2452 },
2453 lsp::FileEvent {
2454 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2455 typ: lsp::FileChangeType::CHANGED,
2456 },
2457 ]
2458 );
2459}
2460
2461#[gpui::test]
2462async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 path!("/dir"),
2468 json!({
2469 "a.rs": "let a = 1;",
2470 "b.rs": "let b = 2;"
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(
2476 fs,
2477 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2478 cx,
2479 )
2480 .await;
2481 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2482
2483 let buffer_a = project
2484 .update(cx, |project, cx| {
2485 project.open_local_buffer(path!("/dir/a.rs"), cx)
2486 })
2487 .await
2488 .unwrap();
2489 let buffer_b = project
2490 .update(cx, |project, cx| {
2491 project.open_local_buffer(path!("/dir/b.rs"), cx)
2492 })
2493 .await
2494 .unwrap();
2495
2496 lsp_store.update(cx, |lsp_store, cx| {
2497 lsp_store
2498 .update_diagnostics(
2499 LanguageServerId(0),
2500 lsp::PublishDiagnosticsParams {
2501 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: None,
2503 diagnostics: vec![lsp::Diagnostic {
2504 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2505 severity: Some(lsp::DiagnosticSeverity::ERROR),
2506 message: "error 1".to_string(),
2507 ..Default::default()
2508 }],
2509 },
2510 None,
2511 DiagnosticSourceKind::Pushed,
2512 &[],
2513 cx,
2514 )
2515 .unwrap();
2516 lsp_store
2517 .update_diagnostics(
2518 LanguageServerId(0),
2519 lsp::PublishDiagnosticsParams {
2520 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2521 version: None,
2522 diagnostics: vec![lsp::Diagnostic {
2523 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2524 severity: Some(DiagnosticSeverity::WARNING),
2525 message: "error 2".to_string(),
2526 ..Default::default()
2527 }],
2528 },
2529 None,
2530 DiagnosticSourceKind::Pushed,
2531 &[],
2532 cx,
2533 )
2534 .unwrap();
2535 });
2536
2537 buffer_a.update(cx, |buffer, _| {
2538 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2539 assert_eq!(
2540 chunks
2541 .iter()
2542 .map(|(s, d)| (s.as_str(), *d))
2543 .collect::<Vec<_>>(),
2544 &[
2545 ("let ", None),
2546 ("a", Some(DiagnosticSeverity::ERROR)),
2547 (" = 1;", None),
2548 ]
2549 );
2550 });
2551 buffer_b.update(cx, |buffer, _| {
2552 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2553 assert_eq!(
2554 chunks
2555 .iter()
2556 .map(|(s, d)| (s.as_str(), *d))
2557 .collect::<Vec<_>>(),
2558 &[
2559 ("let ", None),
2560 ("b", Some(DiagnosticSeverity::WARNING)),
2561 (" = 2;", None),
2562 ]
2563 );
2564 });
2565}
2566
2567#[gpui::test]
2568async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2569 init_test(cx);
2570
2571 let fs = FakeFs::new(cx.executor());
2572 fs.insert_tree(
2573 path!("/root"),
2574 json!({
2575 "dir": {
2576 ".git": {
2577 "HEAD": "ref: refs/heads/main",
2578 },
2579 ".gitignore": "b.rs",
2580 "a.rs": "let a = 1;",
2581 "b.rs": "let b = 2;",
2582 },
2583 "other.rs": "let b = c;"
2584 }),
2585 )
2586 .await;
2587
2588 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2589 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2590 let (worktree, _) = project
2591 .update(cx, |project, cx| {
2592 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2593 })
2594 .await
2595 .unwrap();
2596 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2597
2598 let (worktree, _) = project
2599 .update(cx, |project, cx| {
2600 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2601 })
2602 .await
2603 .unwrap();
2604 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2605
2606 let server_id = LanguageServerId(0);
2607 lsp_store.update(cx, |lsp_store, cx| {
2608 lsp_store
2609 .update_diagnostics(
2610 server_id,
2611 lsp::PublishDiagnosticsParams {
2612 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2613 version: None,
2614 diagnostics: vec![lsp::Diagnostic {
2615 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2616 severity: Some(lsp::DiagnosticSeverity::ERROR),
2617 message: "unused variable 'b'".to_string(),
2618 ..Default::default()
2619 }],
2620 },
2621 None,
2622 DiagnosticSourceKind::Pushed,
2623 &[],
2624 cx,
2625 )
2626 .unwrap();
2627 lsp_store
2628 .update_diagnostics(
2629 server_id,
2630 lsp::PublishDiagnosticsParams {
2631 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2632 version: None,
2633 diagnostics: vec![lsp::Diagnostic {
2634 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2635 severity: Some(lsp::DiagnosticSeverity::ERROR),
2636 message: "unknown variable 'c'".to_string(),
2637 ..Default::default()
2638 }],
2639 },
2640 None,
2641 DiagnosticSourceKind::Pushed,
2642 &[],
2643 cx,
2644 )
2645 .unwrap();
2646 });
2647
2648 let main_ignored_buffer = project
2649 .update(cx, |project, cx| {
2650 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2651 })
2652 .await
2653 .unwrap();
2654 main_ignored_buffer.update(cx, |buffer, _| {
2655 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2656 assert_eq!(
2657 chunks
2658 .iter()
2659 .map(|(s, d)| (s.as_str(), *d))
2660 .collect::<Vec<_>>(),
2661 &[
2662 ("let ", None),
2663 ("b", Some(DiagnosticSeverity::ERROR)),
2664 (" = 2;", None),
2665 ],
2666 "Gigitnored buffers should still get in-buffer diagnostics",
2667 );
2668 });
2669 let other_buffer = project
2670 .update(cx, |project, cx| {
2671 project.open_buffer((other_worktree_id, rel_path("")), cx)
2672 })
2673 .await
2674 .unwrap();
2675 other_buffer.update(cx, |buffer, _| {
2676 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2677 assert_eq!(
2678 chunks
2679 .iter()
2680 .map(|(s, d)| (s.as_str(), *d))
2681 .collect::<Vec<_>>(),
2682 &[
2683 ("let b = ", None),
2684 ("c", Some(DiagnosticSeverity::ERROR)),
2685 (";", None),
2686 ],
2687 "Buffers from hidden projects should still get in-buffer diagnostics"
2688 );
2689 });
2690
2691 project.update(cx, |project, cx| {
2692 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2693 assert_eq!(
2694 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2695 vec![(
2696 ProjectPath {
2697 worktree_id: main_worktree_id,
2698 path: rel_path("b.rs").into(),
2699 },
2700 server_id,
2701 DiagnosticSummary {
2702 error_count: 1,
2703 warning_count: 0,
2704 }
2705 )]
2706 );
2707 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2708 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2709 });
2710}
2711
2712#[gpui::test]
2713async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2714 init_test(cx);
2715
2716 let progress_token = "the-progress-token";
2717
2718 let fs = FakeFs::new(cx.executor());
2719 fs.insert_tree(
2720 path!("/dir"),
2721 json!({
2722 "a.rs": "fn a() { A }",
2723 "b.rs": "const y: i32 = 1",
2724 }),
2725 )
2726 .await;
2727
2728 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2730
2731 language_registry.add(rust_lang());
2732 let mut fake_servers = language_registry.register_fake_lsp(
2733 "Rust",
2734 FakeLspAdapter {
2735 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2736 disk_based_diagnostics_sources: vec!["disk".into()],
2737 ..Default::default()
2738 },
2739 );
2740
2741 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2742
2743 // Cause worktree to start the fake language server
2744 let _ = project
2745 .update(cx, |project, cx| {
2746 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2747 })
2748 .await
2749 .unwrap();
2750
2751 let mut events = cx.events(&project);
2752
2753 let fake_server = fake_servers.next().await.unwrap();
2754 assert_eq!(
2755 events.next().await.unwrap(),
2756 Event::LanguageServerAdded(
2757 LanguageServerId(0),
2758 fake_server.server.name(),
2759 Some(worktree_id)
2760 ),
2761 );
2762
2763 fake_server
2764 .start_progress(format!("{}/0", progress_token))
2765 .await;
2766 assert_eq!(
2767 events.next().await.unwrap(),
2768 Event::DiskBasedDiagnosticsStarted {
2769 language_server_id: LanguageServerId(0),
2770 }
2771 );
2772
2773 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2774 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2775 version: None,
2776 diagnostics: vec![lsp::Diagnostic {
2777 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2778 severity: Some(lsp::DiagnosticSeverity::ERROR),
2779 message: "undefined variable 'A'".to_string(),
2780 ..Default::default()
2781 }],
2782 });
2783 assert_eq!(
2784 events.next().await.unwrap(),
2785 Event::DiagnosticsUpdated {
2786 language_server_id: LanguageServerId(0),
2787 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2788 }
2789 );
2790
2791 fake_server.end_progress(format!("{}/0", progress_token));
2792 assert_eq!(
2793 events.next().await.unwrap(),
2794 Event::DiskBasedDiagnosticsFinished {
2795 language_server_id: LanguageServerId(0)
2796 }
2797 );
2798
2799 let buffer = project
2800 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2801 .await
2802 .unwrap();
2803
2804 buffer.update(cx, |buffer, _| {
2805 let snapshot = buffer.snapshot();
2806 let diagnostics = snapshot
2807 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2808 .collect::<Vec<_>>();
2809 assert_eq!(
2810 diagnostics,
2811 &[DiagnosticEntryRef {
2812 range: Point::new(0, 9)..Point::new(0, 10),
2813 diagnostic: &Diagnostic {
2814 severity: lsp::DiagnosticSeverity::ERROR,
2815 message: "undefined variable 'A'".to_string(),
2816 group_id: 0,
2817 is_primary: true,
2818 source_kind: DiagnosticSourceKind::Pushed,
2819 ..Diagnostic::default()
2820 }
2821 }]
2822 )
2823 });
2824
2825 // Ensure publishing empty diagnostics twice only results in one update event.
2826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2827 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2828 version: None,
2829 diagnostics: Default::default(),
2830 });
2831 assert_eq!(
2832 events.next().await.unwrap(),
2833 Event::DiagnosticsUpdated {
2834 language_server_id: LanguageServerId(0),
2835 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2836 }
2837 );
2838
2839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2840 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2841 version: None,
2842 diagnostics: Default::default(),
2843 });
2844 cx.executor().run_until_parked();
2845 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2846}
2847
2848#[gpui::test]
2849async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2850 init_test(cx);
2851
2852 let progress_token = "the-progress-token";
2853
2854 let fs = FakeFs::new(cx.executor());
2855 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2856
2857 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2858
2859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2860 language_registry.add(rust_lang());
2861 let mut fake_servers = language_registry.register_fake_lsp(
2862 "Rust",
2863 FakeLspAdapter {
2864 name: "the-language-server",
2865 disk_based_diagnostics_sources: vec!["disk".into()],
2866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2867 ..FakeLspAdapter::default()
2868 },
2869 );
2870
2871 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2872
2873 let (buffer, _handle) = project
2874 .update(cx, |project, cx| {
2875 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2876 })
2877 .await
2878 .unwrap();
2879 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2880 // Simulate diagnostics starting to update.
2881 let fake_server = fake_servers.next().await.unwrap();
2882 cx.executor().run_until_parked();
2883 fake_server.start_progress(progress_token).await;
2884
2885 // Restart the server before the diagnostics finish updating.
2886 project.update(cx, |project, cx| {
2887 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2888 });
2889 let mut events = cx.events(&project);
2890
2891 // Simulate the newly started server sending more diagnostics.
2892 let fake_server = fake_servers.next().await.unwrap();
2893 cx.executor().run_until_parked();
2894 assert_eq!(
2895 events.next().await.unwrap(),
2896 Event::LanguageServerRemoved(LanguageServerId(0))
2897 );
2898 assert_eq!(
2899 events.next().await.unwrap(),
2900 Event::LanguageServerAdded(
2901 LanguageServerId(1),
2902 fake_server.server.name(),
2903 Some(worktree_id)
2904 )
2905 );
2906 fake_server.start_progress(progress_token).await;
2907 assert_eq!(
2908 events.next().await.unwrap(),
2909 Event::LanguageServerBufferRegistered {
2910 server_id: LanguageServerId(1),
2911 buffer_id,
2912 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2913 name: Some(fake_server.server.name())
2914 }
2915 );
2916 assert_eq!(
2917 events.next().await.unwrap(),
2918 Event::DiskBasedDiagnosticsStarted {
2919 language_server_id: LanguageServerId(1)
2920 }
2921 );
2922 project.update(cx, |project, cx| {
2923 assert_eq!(
2924 project
2925 .language_servers_running_disk_based_diagnostics(cx)
2926 .collect::<Vec<_>>(),
2927 [LanguageServerId(1)]
2928 );
2929 });
2930
2931 // All diagnostics are considered done, despite the old server's diagnostic
2932 // task never completing.
2933 fake_server.end_progress(progress_token);
2934 assert_eq!(
2935 events.next().await.unwrap(),
2936 Event::DiskBasedDiagnosticsFinished {
2937 language_server_id: LanguageServerId(1)
2938 }
2939 );
2940 project.update(cx, |project, cx| {
2941 assert_eq!(
2942 project
2943 .language_servers_running_disk_based_diagnostics(cx)
2944 .collect::<Vec<_>>(),
2945 [] as [language::LanguageServerId; 0]
2946 );
2947 });
2948}
2949
2950#[gpui::test]
2951async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2952 init_test(cx);
2953
2954 let fs = FakeFs::new(cx.executor());
2955 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2956
2957 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2958
2959 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2960 language_registry.add(rust_lang());
2961 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2962
2963 let (buffer, _) = project
2964 .update(cx, |project, cx| {
2965 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2966 })
2967 .await
2968 .unwrap();
2969
2970 // Publish diagnostics
2971 let fake_server = fake_servers.next().await.unwrap();
2972 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2973 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2974 version: None,
2975 diagnostics: vec![lsp::Diagnostic {
2976 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2977 severity: Some(lsp::DiagnosticSeverity::ERROR),
2978 message: "the message".to_string(),
2979 ..Default::default()
2980 }],
2981 });
2982
2983 cx.executor().run_until_parked();
2984 buffer.update(cx, |buffer, _| {
2985 assert_eq!(
2986 buffer
2987 .snapshot()
2988 .diagnostics_in_range::<_, usize>(0..1, false)
2989 .map(|entry| entry.diagnostic.message.clone())
2990 .collect::<Vec<_>>(),
2991 ["the message".to_string()]
2992 );
2993 });
2994 project.update(cx, |project, cx| {
2995 assert_eq!(
2996 project.diagnostic_summary(false, cx),
2997 DiagnosticSummary {
2998 error_count: 1,
2999 warning_count: 0,
3000 }
3001 );
3002 });
3003
3004 project.update(cx, |project, cx| {
3005 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3006 });
3007
3008 // The diagnostics are cleared.
3009 cx.executor().run_until_parked();
3010 buffer.update(cx, |buffer, _| {
3011 assert_eq!(
3012 buffer
3013 .snapshot()
3014 .diagnostics_in_range::<_, usize>(0..1, false)
3015 .map(|entry| entry.diagnostic.message.clone())
3016 .collect::<Vec<_>>(),
3017 Vec::<String>::new(),
3018 );
3019 });
3020 project.update(cx, |project, cx| {
3021 assert_eq!(
3022 project.diagnostic_summary(false, cx),
3023 DiagnosticSummary {
3024 error_count: 0,
3025 warning_count: 0,
3026 }
3027 );
3028 });
3029}
3030
3031#[gpui::test]
3032async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let fs = FakeFs::new(cx.executor());
3036 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3037
3038 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3039 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3040
3041 language_registry.add(rust_lang());
3042 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3043
3044 let (buffer, _handle) = project
3045 .update(cx, |project, cx| {
3046 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3047 })
3048 .await
3049 .unwrap();
3050
3051 // Before restarting the server, report diagnostics with an unknown buffer version.
3052 let fake_server = fake_servers.next().await.unwrap();
3053 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3054 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3055 version: Some(10000),
3056 diagnostics: Vec::new(),
3057 });
3058 cx.executor().run_until_parked();
3059 project.update(cx, |project, cx| {
3060 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3061 });
3062
3063 let mut fake_server = fake_servers.next().await.unwrap();
3064 let notification = fake_server
3065 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3066 .await
3067 .text_document;
3068 assert_eq!(notification.version, 0);
3069}
3070
3071#[gpui::test]
3072async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let progress_token = "the-progress-token";
3076
3077 let fs = FakeFs::new(cx.executor());
3078 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3079
3080 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3081
3082 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3083 language_registry.add(rust_lang());
3084 let mut fake_servers = language_registry.register_fake_lsp(
3085 "Rust",
3086 FakeLspAdapter {
3087 name: "the-language-server",
3088 disk_based_diagnostics_sources: vec!["disk".into()],
3089 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3090 ..Default::default()
3091 },
3092 );
3093
3094 let (buffer, _handle) = project
3095 .update(cx, |project, cx| {
3096 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3097 })
3098 .await
3099 .unwrap();
3100
3101 // Simulate diagnostics starting to update.
3102 let mut fake_server = fake_servers.next().await.unwrap();
3103 fake_server
3104 .start_progress_with(
3105 "another-token",
3106 lsp::WorkDoneProgressBegin {
3107 cancellable: Some(false),
3108 ..Default::default()
3109 },
3110 DEFAULT_LSP_REQUEST_TIMEOUT,
3111 )
3112 .await;
3113 // Ensure progress notification is fully processed before starting the next one
3114 cx.executor().run_until_parked();
3115
3116 fake_server
3117 .start_progress_with(
3118 progress_token,
3119 lsp::WorkDoneProgressBegin {
3120 cancellable: Some(true),
3121 ..Default::default()
3122 },
3123 DEFAULT_LSP_REQUEST_TIMEOUT,
3124 )
3125 .await;
3126 // Ensure progress notification is fully processed before cancelling
3127 cx.executor().run_until_parked();
3128
3129 project.update(cx, |project, cx| {
3130 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3131 });
3132 cx.executor().run_until_parked();
3133
3134 let cancel_notification = fake_server
3135 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3136 .await;
3137 assert_eq!(
3138 cancel_notification.token,
3139 NumberOrString::String(progress_token.into())
3140 );
3141}
3142
3143#[gpui::test]
3144async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3149 .await;
3150
3151 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3153
3154 let mut fake_rust_servers = language_registry.register_fake_lsp(
3155 "Rust",
3156 FakeLspAdapter {
3157 name: "rust-lsp",
3158 ..Default::default()
3159 },
3160 );
3161 let mut fake_js_servers = language_registry.register_fake_lsp(
3162 "JavaScript",
3163 FakeLspAdapter {
3164 name: "js-lsp",
3165 ..Default::default()
3166 },
3167 );
3168 language_registry.add(rust_lang());
3169 language_registry.add(js_lang());
3170
3171 let _rs_buffer = project
3172 .update(cx, |project, cx| {
3173 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3174 })
3175 .await
3176 .unwrap();
3177 let _js_buffer = project
3178 .update(cx, |project, cx| {
3179 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3180 })
3181 .await
3182 .unwrap();
3183
3184 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3185 assert_eq!(
3186 fake_rust_server_1
3187 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3188 .await
3189 .text_document
3190 .uri
3191 .as_str(),
3192 uri!("file:///dir/a.rs")
3193 );
3194
3195 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3196 assert_eq!(
3197 fake_js_server
3198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3199 .await
3200 .text_document
3201 .uri
3202 .as_str(),
3203 uri!("file:///dir/b.js")
3204 );
3205
3206 // Disable Rust language server, ensuring only that server gets stopped.
3207 cx.update(|cx| {
3208 SettingsStore::update_global(cx, |settings, cx| {
3209 settings.update_user_settings(cx, |settings| {
3210 settings.languages_mut().insert(
3211 "Rust".into(),
3212 LanguageSettingsContent {
3213 enable_language_server: Some(false),
3214 ..Default::default()
3215 },
3216 );
3217 });
3218 })
3219 });
3220 fake_rust_server_1
3221 .receive_notification::<lsp::notification::Exit>()
3222 .await;
3223
3224 // Enable Rust and disable JavaScript language servers, ensuring that the
3225 // former gets started again and that the latter stops.
3226 cx.update(|cx| {
3227 SettingsStore::update_global(cx, |settings, cx| {
3228 settings.update_user_settings(cx, |settings| {
3229 settings.languages_mut().insert(
3230 "Rust".into(),
3231 LanguageSettingsContent {
3232 enable_language_server: Some(true),
3233 ..Default::default()
3234 },
3235 );
3236 settings.languages_mut().insert(
3237 "JavaScript".into(),
3238 LanguageSettingsContent {
3239 enable_language_server: Some(false),
3240 ..Default::default()
3241 },
3242 );
3243 });
3244 })
3245 });
3246 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3247 assert_eq!(
3248 fake_rust_server_2
3249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3250 .await
3251 .text_document
3252 .uri
3253 .as_str(),
3254 uri!("file:///dir/a.rs")
3255 );
3256 fake_js_server
3257 .receive_notification::<lsp::notification::Exit>()
3258 .await;
3259}
3260
3261#[gpui::test(iterations = 3)]
3262async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3263 init_test(cx);
3264
3265 let text = "
3266 fn a() { A }
3267 fn b() { BB }
3268 fn c() { CCC }
3269 "
3270 .unindent();
3271
3272 let fs = FakeFs::new(cx.executor());
3273 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3274
3275 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3276 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3277
3278 language_registry.add(rust_lang());
3279 let mut fake_servers = language_registry.register_fake_lsp(
3280 "Rust",
3281 FakeLspAdapter {
3282 disk_based_diagnostics_sources: vec!["disk".into()],
3283 ..Default::default()
3284 },
3285 );
3286
3287 let buffer = project
3288 .update(cx, |project, cx| {
3289 project.open_local_buffer(path!("/dir/a.rs"), cx)
3290 })
3291 .await
3292 .unwrap();
3293
3294 let _handle = project.update(cx, |project, cx| {
3295 project.register_buffer_with_language_servers(&buffer, cx)
3296 });
3297
3298 let mut fake_server = fake_servers.next().await.unwrap();
3299 let open_notification = fake_server
3300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3301 .await;
3302
3303 // Edit the buffer, moving the content down
3304 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3305 let change_notification_1 = fake_server
3306 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3307 .await;
3308 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3309
3310 // Report some diagnostics for the initial version of the buffer
3311 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3312 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3313 version: Some(open_notification.text_document.version),
3314 diagnostics: vec![
3315 lsp::Diagnostic {
3316 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3317 severity: Some(DiagnosticSeverity::ERROR),
3318 message: "undefined variable 'A'".to_string(),
3319 source: Some("disk".to_string()),
3320 ..Default::default()
3321 },
3322 lsp::Diagnostic {
3323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3324 severity: Some(DiagnosticSeverity::ERROR),
3325 message: "undefined variable 'BB'".to_string(),
3326 source: Some("disk".to_string()),
3327 ..Default::default()
3328 },
3329 lsp::Diagnostic {
3330 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3331 severity: Some(DiagnosticSeverity::ERROR),
3332 source: Some("disk".to_string()),
3333 message: "undefined variable 'CCC'".to_string(),
3334 ..Default::default()
3335 },
3336 ],
3337 });
3338
3339 // The diagnostics have moved down since they were created.
3340 cx.executor().run_until_parked();
3341 buffer.update(cx, |buffer, _| {
3342 assert_eq!(
3343 buffer
3344 .snapshot()
3345 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3346 .collect::<Vec<_>>(),
3347 &[
3348 DiagnosticEntry {
3349 range: Point::new(3, 9)..Point::new(3, 11),
3350 diagnostic: Diagnostic {
3351 source: Some("disk".into()),
3352 severity: DiagnosticSeverity::ERROR,
3353 message: "undefined variable 'BB'".to_string(),
3354 is_disk_based: true,
3355 group_id: 1,
3356 is_primary: true,
3357 source_kind: DiagnosticSourceKind::Pushed,
3358 ..Diagnostic::default()
3359 },
3360 },
3361 DiagnosticEntry {
3362 range: Point::new(4, 9)..Point::new(4, 12),
3363 diagnostic: Diagnostic {
3364 source: Some("disk".into()),
3365 severity: DiagnosticSeverity::ERROR,
3366 message: "undefined variable 'CCC'".to_string(),
3367 is_disk_based: true,
3368 group_id: 2,
3369 is_primary: true,
3370 source_kind: DiagnosticSourceKind::Pushed,
3371 ..Diagnostic::default()
3372 }
3373 }
3374 ]
3375 );
3376 assert_eq!(
3377 chunks_with_diagnostics(buffer, 0..buffer.len()),
3378 [
3379 ("\n\nfn a() { ".to_string(), None),
3380 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3381 (" }\nfn b() { ".to_string(), None),
3382 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3383 (" }\nfn c() { ".to_string(), None),
3384 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3385 (" }\n".to_string(), None),
3386 ]
3387 );
3388 assert_eq!(
3389 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3390 [
3391 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3392 (" }\nfn c() { ".to_string(), None),
3393 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3394 ]
3395 );
3396 });
3397
3398 // Ensure overlapping diagnostics are highlighted correctly.
3399 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3400 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3401 version: Some(open_notification.text_document.version),
3402 diagnostics: vec![
3403 lsp::Diagnostic {
3404 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3405 severity: Some(DiagnosticSeverity::ERROR),
3406 message: "undefined variable 'A'".to_string(),
3407 source: Some("disk".to_string()),
3408 ..Default::default()
3409 },
3410 lsp::Diagnostic {
3411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3412 severity: Some(DiagnosticSeverity::WARNING),
3413 message: "unreachable statement".to_string(),
3414 source: Some("disk".to_string()),
3415 ..Default::default()
3416 },
3417 ],
3418 });
3419
3420 cx.executor().run_until_parked();
3421 buffer.update(cx, |buffer, _| {
3422 assert_eq!(
3423 buffer
3424 .snapshot()
3425 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3426 .collect::<Vec<_>>(),
3427 &[
3428 DiagnosticEntry {
3429 range: Point::new(2, 9)..Point::new(2, 12),
3430 diagnostic: Diagnostic {
3431 source: Some("disk".into()),
3432 severity: DiagnosticSeverity::WARNING,
3433 message: "unreachable statement".to_string(),
3434 is_disk_based: true,
3435 group_id: 4,
3436 is_primary: true,
3437 source_kind: DiagnosticSourceKind::Pushed,
3438 ..Diagnostic::default()
3439 }
3440 },
3441 DiagnosticEntry {
3442 range: Point::new(2, 9)..Point::new(2, 10),
3443 diagnostic: Diagnostic {
3444 source: Some("disk".into()),
3445 severity: DiagnosticSeverity::ERROR,
3446 message: "undefined variable 'A'".to_string(),
3447 is_disk_based: true,
3448 group_id: 3,
3449 is_primary: true,
3450 source_kind: DiagnosticSourceKind::Pushed,
3451 ..Diagnostic::default()
3452 },
3453 }
3454 ]
3455 );
3456 assert_eq!(
3457 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3458 [
3459 ("fn a() { ".to_string(), None),
3460 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3461 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3462 ("\n".to_string(), None),
3463 ]
3464 );
3465 assert_eq!(
3466 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3467 [
3468 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3469 ("\n".to_string(), None),
3470 ]
3471 );
3472 });
3473
3474 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3475 // changes since the last save.
3476 buffer.update(cx, |buffer, cx| {
3477 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3478 buffer.edit(
3479 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3480 None,
3481 cx,
3482 );
3483 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3484 });
3485 let change_notification_2 = fake_server
3486 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3487 .await;
3488 assert!(
3489 change_notification_2.text_document.version > change_notification_1.text_document.version
3490 );
3491
3492 // Handle out-of-order diagnostics
3493 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3494 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3495 version: Some(change_notification_2.text_document.version),
3496 diagnostics: vec![
3497 lsp::Diagnostic {
3498 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3499 severity: Some(DiagnosticSeverity::ERROR),
3500 message: "undefined variable 'BB'".to_string(),
3501 source: Some("disk".to_string()),
3502 ..Default::default()
3503 },
3504 lsp::Diagnostic {
3505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3506 severity: Some(DiagnosticSeverity::WARNING),
3507 message: "undefined variable 'A'".to_string(),
3508 source: Some("disk".to_string()),
3509 ..Default::default()
3510 },
3511 ],
3512 });
3513
3514 cx.executor().run_until_parked();
3515 buffer.update(cx, |buffer, _| {
3516 assert_eq!(
3517 buffer
3518 .snapshot()
3519 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3520 .collect::<Vec<_>>(),
3521 &[
3522 DiagnosticEntry {
3523 range: Point::new(2, 21)..Point::new(2, 22),
3524 diagnostic: Diagnostic {
3525 source: Some("disk".into()),
3526 severity: DiagnosticSeverity::WARNING,
3527 message: "undefined variable 'A'".to_string(),
3528 is_disk_based: true,
3529 group_id: 6,
3530 is_primary: true,
3531 source_kind: DiagnosticSourceKind::Pushed,
3532 ..Diagnostic::default()
3533 }
3534 },
3535 DiagnosticEntry {
3536 range: Point::new(3, 9)..Point::new(3, 14),
3537 diagnostic: Diagnostic {
3538 source: Some("disk".into()),
3539 severity: DiagnosticSeverity::ERROR,
3540 message: "undefined variable 'BB'".to_string(),
3541 is_disk_based: true,
3542 group_id: 5,
3543 is_primary: true,
3544 source_kind: DiagnosticSourceKind::Pushed,
3545 ..Diagnostic::default()
3546 },
3547 }
3548 ]
3549 );
3550 });
3551}
3552
3553#[gpui::test]
3554async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3555 init_test(cx);
3556
3557 let text = concat!(
3558 "let one = ;\n", //
3559 "let two = \n",
3560 "let three = 3;\n",
3561 );
3562
3563 let fs = FakeFs::new(cx.executor());
3564 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3565
3566 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3567 let buffer = project
3568 .update(cx, |project, cx| {
3569 project.open_local_buffer(path!("/dir/a.rs"), cx)
3570 })
3571 .await
3572 .unwrap();
3573
3574 project.update(cx, |project, cx| {
3575 project.lsp_store().update(cx, |lsp_store, cx| {
3576 lsp_store
3577 .update_diagnostic_entries(
3578 LanguageServerId(0),
3579 PathBuf::from(path!("/dir/a.rs")),
3580 None,
3581 None,
3582 vec![
3583 DiagnosticEntry {
3584 range: Unclipped(PointUtf16::new(0, 10))
3585 ..Unclipped(PointUtf16::new(0, 10)),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::ERROR,
3588 message: "syntax error 1".to_string(),
3589 source_kind: DiagnosticSourceKind::Pushed,
3590 ..Diagnostic::default()
3591 },
3592 },
3593 DiagnosticEntry {
3594 range: Unclipped(PointUtf16::new(1, 10))
3595 ..Unclipped(PointUtf16::new(1, 10)),
3596 diagnostic: Diagnostic {
3597 severity: DiagnosticSeverity::ERROR,
3598 message: "syntax error 2".to_string(),
3599 source_kind: DiagnosticSourceKind::Pushed,
3600 ..Diagnostic::default()
3601 },
3602 },
3603 ],
3604 cx,
3605 )
3606 .unwrap();
3607 })
3608 });
3609
3610 // An empty range is extended forward to include the following character.
3611 // At the end of a line, an empty range is extended backward to include
3612 // the preceding character.
3613 buffer.update(cx, |buffer, _| {
3614 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3615 assert_eq!(
3616 chunks
3617 .iter()
3618 .map(|(s, d)| (s.as_str(), *d))
3619 .collect::<Vec<_>>(),
3620 &[
3621 ("let one = ", None),
3622 (";", Some(DiagnosticSeverity::ERROR)),
3623 ("\nlet two =", None),
3624 (" ", Some(DiagnosticSeverity::ERROR)),
3625 ("\nlet three = 3;\n", None)
3626 ]
3627 );
3628 });
3629}
3630
3631#[gpui::test]
3632async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3633 init_test(cx);
3634
3635 let fs = FakeFs::new(cx.executor());
3636 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3637 .await;
3638
3639 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 lsp_store.update(cx, |lsp_store, cx| {
3643 lsp_store
3644 .update_diagnostic_entries(
3645 LanguageServerId(0),
3646 Path::new(path!("/dir/a.rs")).to_owned(),
3647 None,
3648 None,
3649 vec![DiagnosticEntry {
3650 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3651 diagnostic: Diagnostic {
3652 severity: DiagnosticSeverity::ERROR,
3653 is_primary: true,
3654 message: "syntax error a1".to_string(),
3655 source_kind: DiagnosticSourceKind::Pushed,
3656 ..Diagnostic::default()
3657 },
3658 }],
3659 cx,
3660 )
3661 .unwrap();
3662 lsp_store
3663 .update_diagnostic_entries(
3664 LanguageServerId(1),
3665 Path::new(path!("/dir/a.rs")).to_owned(),
3666 None,
3667 None,
3668 vec![DiagnosticEntry {
3669 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3670 diagnostic: Diagnostic {
3671 severity: DiagnosticSeverity::ERROR,
3672 is_primary: true,
3673 message: "syntax error b1".to_string(),
3674 source_kind: DiagnosticSourceKind::Pushed,
3675 ..Diagnostic::default()
3676 },
3677 }],
3678 cx,
3679 )
3680 .unwrap();
3681
3682 assert_eq!(
3683 lsp_store.diagnostic_summary(false, cx),
3684 DiagnosticSummary {
3685 error_count: 2,
3686 warning_count: 0,
3687 }
3688 );
3689 });
3690}
3691
3692#[gpui::test]
3693async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3694 cx: &mut gpui::TestAppContext,
3695) {
3696 init_test(cx);
3697
3698 let fs = FakeFs::new(cx.executor());
3699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3700 .await;
3701
3702 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3703 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3704
3705 lsp_store.update(cx, |lsp_store, cx| {
3706 lsp_store
3707 .update_diagnostic_entries(
3708 LanguageServerId(0),
3709 Path::new(path!("/dir/a.rs")).to_owned(),
3710 None,
3711 None,
3712 vec![DiagnosticEntry {
3713 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3714 diagnostic: Diagnostic {
3715 severity: DiagnosticSeverity::ERROR,
3716 is_primary: true,
3717 message: "error in a".to_string(),
3718 source_kind: DiagnosticSourceKind::Pushed,
3719 ..Diagnostic::default()
3720 },
3721 }],
3722 cx,
3723 )
3724 .unwrap();
3725 lsp_store
3726 .update_diagnostic_entries(
3727 LanguageServerId(0),
3728 Path::new(path!("/dir/b.rs")).to_owned(),
3729 None,
3730 None,
3731 vec![DiagnosticEntry {
3732 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3733 diagnostic: Diagnostic {
3734 severity: DiagnosticSeverity::WARNING,
3735 is_primary: true,
3736 message: "warning in b".to_string(),
3737 source_kind: DiagnosticSourceKind::Pushed,
3738 ..Diagnostic::default()
3739 },
3740 }],
3741 cx,
3742 )
3743 .unwrap();
3744
3745 assert_eq!(
3746 lsp_store.diagnostic_summary(false, cx),
3747 DiagnosticSummary {
3748 error_count: 1,
3749 warning_count: 1,
3750 }
3751 );
3752 });
3753
3754 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3755 .await
3756 .unwrap();
3757 cx.executor().run_until_parked();
3758
3759 lsp_store.update(cx, |lsp_store, cx| {
3760 assert_eq!(
3761 lsp_store.diagnostic_summary(false, cx),
3762 DiagnosticSummary {
3763 error_count: 0,
3764 warning_count: 1,
3765 },
3766 );
3767 });
3768}
3769
3770#[gpui::test]
3771async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3772 init_test(cx);
3773
3774 let fs = FakeFs::new(cx.executor());
3775 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3776
3777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3778
3779 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3780 language_registry.add(rust_lang());
3781 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3782
3783 let (buffer, _handle) = project
3784 .update(cx, |project, cx| {
3785 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3786 })
3787 .await
3788 .unwrap();
3789
3790 let fake_server = fake_servers.next().await.unwrap();
3791 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3792 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3793 version: None,
3794 diagnostics: vec![lsp::Diagnostic {
3795 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3796 severity: Some(lsp::DiagnosticSeverity::ERROR),
3797 message: "error before restart".to_string(),
3798 ..Default::default()
3799 }],
3800 });
3801 cx.executor().run_until_parked();
3802
3803 project.update(cx, |project, cx| {
3804 assert_eq!(
3805 project.diagnostic_summary(false, cx),
3806 DiagnosticSummary {
3807 error_count: 1,
3808 warning_count: 0,
3809 }
3810 );
3811 });
3812
3813 let mut events = cx.events(&project);
3814
3815 project.update(cx, |project, cx| {
3816 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3817 });
3818 cx.executor().run_until_parked();
3819
3820 let mut received_diagnostics_updated = false;
3821 while let Some(Some(event)) =
3822 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3823 {
3824 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3825 received_diagnostics_updated = true;
3826 }
3827 }
3828 assert!(
3829 received_diagnostics_updated,
3830 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3831 );
3832
3833 project.update(cx, |project, cx| {
3834 assert_eq!(
3835 project.diagnostic_summary(false, cx),
3836 DiagnosticSummary {
3837 error_count: 0,
3838 warning_count: 0,
3839 }
3840 );
3841 });
3842}
3843
3844#[gpui::test]
3845async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3846 init_test(cx);
3847
3848 let fs = FakeFs::new(cx.executor());
3849 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3850 .await;
3851
3852 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3853
3854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3855 language_registry.add(rust_lang());
3856 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3857 let closure_pull_count = pull_count.clone();
3858 let mut fake_servers = language_registry.register_fake_lsp(
3859 "Rust",
3860 FakeLspAdapter {
3861 capabilities: lsp::ServerCapabilities {
3862 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3863 lsp::DiagnosticOptions {
3864 identifier: Some("test-reload".to_string()),
3865 inter_file_dependencies: true,
3866 workspace_diagnostics: false,
3867 work_done_progress_options: Default::default(),
3868 },
3869 )),
3870 ..lsp::ServerCapabilities::default()
3871 },
3872 initializer: Some(Box::new(move |fake_server| {
3873 let pull_count = closure_pull_count.clone();
3874 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3875 move |_, _| {
3876 let pull_count = pull_count.clone();
3877 async move {
3878 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3879 Ok(lsp::DocumentDiagnosticReportResult::Report(
3880 lsp::DocumentDiagnosticReport::Full(
3881 lsp::RelatedFullDocumentDiagnosticReport {
3882 related_documents: None,
3883 full_document_diagnostic_report:
3884 lsp::FullDocumentDiagnosticReport {
3885 result_id: None,
3886 items: Vec::new(),
3887 },
3888 },
3889 ),
3890 ))
3891 }
3892 },
3893 );
3894 })),
3895 ..FakeLspAdapter::default()
3896 },
3897 );
3898
3899 let (_buffer, _handle) = project
3900 .update(cx, |project, cx| {
3901 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3902 })
3903 .await
3904 .unwrap();
3905
3906 let fake_server = fake_servers.next().await.unwrap();
3907 cx.executor().run_until_parked();
3908
3909 // Publish initial diagnostics via the fake server.
3910 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3911 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3912 version: None,
3913 diagnostics: vec![lsp::Diagnostic {
3914 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3915 severity: Some(lsp::DiagnosticSeverity::ERROR),
3916 message: "error in a".to_string(),
3917 ..Default::default()
3918 }],
3919 });
3920 cx.executor().run_until_parked();
3921
3922 project.update(cx, |project, cx| {
3923 assert_eq!(
3924 project.diagnostic_summary(false, cx),
3925 DiagnosticSummary {
3926 error_count: 1,
3927 warning_count: 0,
3928 }
3929 );
3930 });
3931
3932 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3933
3934 // Change the file on disk. The FS event triggers buffer reload,
3935 // which in turn triggers pull_diagnostics_for_buffer.
3936 fs.save(
3937 path!("/dir/a.rs").as_ref(),
3938 &"fixed content".into(),
3939 LineEnding::Unix,
3940 )
3941 .await
3942 .unwrap();
3943 cx.executor().run_until_parked();
3944
3945 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3946 assert!(
3947 pulls_after > pulls_before,
3948 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3949 );
3950}
3951
3952#[gpui::test]
3953async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3954 init_test(cx);
3955
3956 let text = "
3957 fn a() {
3958 f1();
3959 }
3960 fn b() {
3961 f2();
3962 }
3963 fn c() {
3964 f3();
3965 }
3966 "
3967 .unindent();
3968
3969 let fs = FakeFs::new(cx.executor());
3970 fs.insert_tree(
3971 path!("/dir"),
3972 json!({
3973 "a.rs": text.clone(),
3974 }),
3975 )
3976 .await;
3977
3978 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3979 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3980
3981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3982 language_registry.add(rust_lang());
3983 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3984
3985 let (buffer, _handle) = project
3986 .update(cx, |project, cx| {
3987 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3988 })
3989 .await
3990 .unwrap();
3991
3992 let mut fake_server = fake_servers.next().await.unwrap();
3993 let lsp_document_version = fake_server
3994 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3995 .await
3996 .text_document
3997 .version;
3998
3999 // Simulate editing the buffer after the language server computes some edits.
4000 buffer.update(cx, |buffer, cx| {
4001 buffer.edit(
4002 [(
4003 Point::new(0, 0)..Point::new(0, 0),
4004 "// above first function\n",
4005 )],
4006 None,
4007 cx,
4008 );
4009 buffer.edit(
4010 [(
4011 Point::new(2, 0)..Point::new(2, 0),
4012 " // inside first function\n",
4013 )],
4014 None,
4015 cx,
4016 );
4017 buffer.edit(
4018 [(
4019 Point::new(6, 4)..Point::new(6, 4),
4020 "// inside second function ",
4021 )],
4022 None,
4023 cx,
4024 );
4025
4026 assert_eq!(
4027 buffer.text(),
4028 "
4029 // above first function
4030 fn a() {
4031 // inside first function
4032 f1();
4033 }
4034 fn b() {
4035 // inside second function f2();
4036 }
4037 fn c() {
4038 f3();
4039 }
4040 "
4041 .unindent()
4042 );
4043 });
4044
4045 let edits = lsp_store
4046 .update(cx, |lsp_store, cx| {
4047 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4048 &buffer,
4049 vec![
4050 // replace body of first function
4051 lsp::TextEdit {
4052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4053 new_text: "
4054 fn a() {
4055 f10();
4056 }
4057 "
4058 .unindent(),
4059 },
4060 // edit inside second function
4061 lsp::TextEdit {
4062 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4063 new_text: "00".into(),
4064 },
4065 // edit inside third function via two distinct edits
4066 lsp::TextEdit {
4067 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4068 new_text: "4000".into(),
4069 },
4070 lsp::TextEdit {
4071 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4072 new_text: "".into(),
4073 },
4074 ],
4075 LanguageServerId(0),
4076 Some(lsp_document_version),
4077 cx,
4078 )
4079 })
4080 .await
4081 .unwrap();
4082
4083 buffer.update(cx, |buffer, cx| {
4084 for (range, new_text) in edits {
4085 buffer.edit([(range, new_text)], None, cx);
4086 }
4087 assert_eq!(
4088 buffer.text(),
4089 "
4090 // above first function
4091 fn a() {
4092 // inside first function
4093 f10();
4094 }
4095 fn b() {
4096 // inside second function f200();
4097 }
4098 fn c() {
4099 f4000();
4100 }
4101 "
4102 .unindent()
4103 );
4104 });
4105}
4106
4107#[gpui::test]
4108async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4109 init_test(cx);
4110
4111 let text = "
4112 use a::b;
4113 use a::c;
4114
4115 fn f() {
4116 b();
4117 c();
4118 }
4119 "
4120 .unindent();
4121
4122 let fs = FakeFs::new(cx.executor());
4123 fs.insert_tree(
4124 path!("/dir"),
4125 json!({
4126 "a.rs": text.clone(),
4127 }),
4128 )
4129 .await;
4130
4131 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4132 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4133 let buffer = project
4134 .update(cx, |project, cx| {
4135 project.open_local_buffer(path!("/dir/a.rs"), cx)
4136 })
4137 .await
4138 .unwrap();
4139
4140 // Simulate the language server sending us a small edit in the form of a very large diff.
4141 // Rust-analyzer does this when performing a merge-imports code action.
4142 let edits = lsp_store
4143 .update(cx, |lsp_store, cx| {
4144 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4145 &buffer,
4146 [
4147 // Replace the first use statement without editing the semicolon.
4148 lsp::TextEdit {
4149 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4150 new_text: "a::{b, c}".into(),
4151 },
4152 // Reinsert the remainder of the file between the semicolon and the final
4153 // newline of the file.
4154 lsp::TextEdit {
4155 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4156 new_text: "\n\n".into(),
4157 },
4158 lsp::TextEdit {
4159 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4160 new_text: "
4161 fn f() {
4162 b();
4163 c();
4164 }"
4165 .unindent(),
4166 },
4167 // Delete everything after the first newline of the file.
4168 lsp::TextEdit {
4169 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4170 new_text: "".into(),
4171 },
4172 ],
4173 LanguageServerId(0),
4174 None,
4175 cx,
4176 )
4177 })
4178 .await
4179 .unwrap();
4180
4181 buffer.update(cx, |buffer, cx| {
4182 let edits = edits
4183 .into_iter()
4184 .map(|(range, text)| {
4185 (
4186 range.start.to_point(buffer)..range.end.to_point(buffer),
4187 text,
4188 )
4189 })
4190 .collect::<Vec<_>>();
4191
4192 assert_eq!(
4193 edits,
4194 [
4195 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4196 (Point::new(1, 0)..Point::new(2, 0), "".into())
4197 ]
4198 );
4199
4200 for (range, new_text) in edits {
4201 buffer.edit([(range, new_text)], None, cx);
4202 }
4203 assert_eq!(
4204 buffer.text(),
4205 "
4206 use a::{b, c};
4207
4208 fn f() {
4209 b();
4210 c();
4211 }
4212 "
4213 .unindent()
4214 );
4215 });
4216}
4217
4218#[gpui::test]
4219async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4220 cx: &mut gpui::TestAppContext,
4221) {
4222 init_test(cx);
4223
4224 let text = "Path()";
4225
4226 let fs = FakeFs::new(cx.executor());
4227 fs.insert_tree(
4228 path!("/dir"),
4229 json!({
4230 "a.rs": text
4231 }),
4232 )
4233 .await;
4234
4235 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4236 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4237 let buffer = project
4238 .update(cx, |project, cx| {
4239 project.open_local_buffer(path!("/dir/a.rs"), cx)
4240 })
4241 .await
4242 .unwrap();
4243
4244 // Simulate the language server sending us a pair of edits at the same location,
4245 // with an insertion following a replacement (which violates the LSP spec).
4246 let edits = lsp_store
4247 .update(cx, |lsp_store, cx| {
4248 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4249 &buffer,
4250 [
4251 lsp::TextEdit {
4252 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4253 new_text: "Path".into(),
4254 },
4255 lsp::TextEdit {
4256 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4257 new_text: "from path import Path\n\n\n".into(),
4258 },
4259 ],
4260 LanguageServerId(0),
4261 None,
4262 cx,
4263 )
4264 })
4265 .await
4266 .unwrap();
4267
4268 buffer.update(cx, |buffer, cx| {
4269 buffer.edit(edits, None, cx);
4270 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4271 });
4272}
4273
4274#[gpui::test]
4275async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4276 init_test(cx);
4277
4278 let text = "
4279 use a::b;
4280 use a::c;
4281
4282 fn f() {
4283 b();
4284 c();
4285 }
4286 "
4287 .unindent();
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 path!("/dir"),
4292 json!({
4293 "a.rs": text.clone(),
4294 }),
4295 )
4296 .await;
4297
4298 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4299 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4300 let buffer = project
4301 .update(cx, |project, cx| {
4302 project.open_local_buffer(path!("/dir/a.rs"), cx)
4303 })
4304 .await
4305 .unwrap();
4306
4307 // Simulate the language server sending us edits in a non-ordered fashion,
4308 // with ranges sometimes being inverted or pointing to invalid locations.
4309 let edits = lsp_store
4310 .update(cx, |lsp_store, cx| {
4311 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4312 &buffer,
4313 [
4314 lsp::TextEdit {
4315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4316 new_text: "\n\n".into(),
4317 },
4318 lsp::TextEdit {
4319 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4320 new_text: "a::{b, c}".into(),
4321 },
4322 lsp::TextEdit {
4323 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4324 new_text: "".into(),
4325 },
4326 lsp::TextEdit {
4327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4328 new_text: "
4329 fn f() {
4330 b();
4331 c();
4332 }"
4333 .unindent(),
4334 },
4335 ],
4336 LanguageServerId(0),
4337 None,
4338 cx,
4339 )
4340 })
4341 .await
4342 .unwrap();
4343
4344 buffer.update(cx, |buffer, cx| {
4345 let edits = edits
4346 .into_iter()
4347 .map(|(range, text)| {
4348 (
4349 range.start.to_point(buffer)..range.end.to_point(buffer),
4350 text,
4351 )
4352 })
4353 .collect::<Vec<_>>();
4354
4355 assert_eq!(
4356 edits,
4357 [
4358 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4359 (Point::new(1, 0)..Point::new(2, 0), "".into())
4360 ]
4361 );
4362
4363 for (range, new_text) in edits {
4364 buffer.edit([(range, new_text)], None, cx);
4365 }
4366 assert_eq!(
4367 buffer.text(),
4368 "
4369 use a::{b, c};
4370
4371 fn f() {
4372 b();
4373 c();
4374 }
4375 "
4376 .unindent()
4377 );
4378 });
4379}
4380
4381fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4382 buffer: &Buffer,
4383 range: Range<T>,
4384) -> Vec<(String, Option<DiagnosticSeverity>)> {
4385 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4386 for chunk in buffer.snapshot().chunks(
4387 range,
4388 LanguageAwareStyling {
4389 tree_sitter: true,
4390 diagnostics: true,
4391 },
4392 ) {
4393 if chunks
4394 .last()
4395 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4396 {
4397 chunks.last_mut().unwrap().0.push_str(chunk.text);
4398 } else {
4399 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4400 }
4401 }
4402 chunks
4403}
4404
4405#[gpui::test(iterations = 10)]
4406async fn test_definition(cx: &mut gpui::TestAppContext) {
4407 init_test(cx);
4408
4409 let fs = FakeFs::new(cx.executor());
4410 fs.insert_tree(
4411 path!("/dir"),
4412 json!({
4413 "a.rs": "const fn a() { A }",
4414 "b.rs": "const y: i32 = crate::a()",
4415 }),
4416 )
4417 .await;
4418
4419 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4420
4421 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4422 language_registry.add(rust_lang());
4423 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4424
4425 let (buffer, _handle) = project
4426 .update(cx, |project, cx| {
4427 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4428 })
4429 .await
4430 .unwrap();
4431
4432 let fake_server = fake_servers.next().await.unwrap();
4433 cx.executor().run_until_parked();
4434
4435 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4436 let params = params.text_document_position_params;
4437 assert_eq!(
4438 params.text_document.uri.to_file_path().unwrap(),
4439 Path::new(path!("/dir/b.rs")),
4440 );
4441 assert_eq!(params.position, lsp::Position::new(0, 22));
4442
4443 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4444 lsp::Location::new(
4445 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4446 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4447 ),
4448 )))
4449 });
4450 let mut definitions = project
4451 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4452 .await
4453 .unwrap()
4454 .unwrap();
4455
4456 // Assert no new language server started
4457 cx.executor().run_until_parked();
4458 assert!(fake_servers.try_recv().is_err());
4459
4460 assert_eq!(definitions.len(), 1);
4461 let definition = definitions.pop().unwrap();
4462 cx.update(|cx| {
4463 let target_buffer = definition.target.buffer.read(cx);
4464 assert_eq!(
4465 target_buffer
4466 .file()
4467 .unwrap()
4468 .as_local()
4469 .unwrap()
4470 .abs_path(cx),
4471 Path::new(path!("/dir/a.rs")),
4472 );
4473 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4474 assert_eq!(
4475 list_worktrees(&project, cx),
4476 [
4477 (path!("/dir/a.rs").as_ref(), false),
4478 (path!("/dir/b.rs").as_ref(), true)
4479 ],
4480 );
4481
4482 drop(definition);
4483 });
4484 cx.update(|cx| {
4485 assert_eq!(
4486 list_worktrees(&project, cx),
4487 [(path!("/dir/b.rs").as_ref(), true)]
4488 );
4489 });
4490
4491 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4492 project
4493 .read(cx)
4494 .worktrees(cx)
4495 .map(|worktree| {
4496 let worktree = worktree.read(cx);
4497 (
4498 worktree.as_local().unwrap().abs_path().as_ref(),
4499 worktree.is_visible(),
4500 )
4501 })
4502 .collect::<Vec<_>>()
4503 }
4504}
4505
4506#[gpui::test]
4507async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4508 init_test(cx);
4509
4510 let fs = FakeFs::new(cx.executor());
4511 fs.insert_tree(
4512 path!("/dir"),
4513 json!({
4514 "a.ts": "",
4515 }),
4516 )
4517 .await;
4518
4519 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4520
4521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4522 language_registry.add(typescript_lang());
4523 let mut fake_language_servers = language_registry.register_fake_lsp(
4524 "TypeScript",
4525 FakeLspAdapter {
4526 capabilities: lsp::ServerCapabilities {
4527 completion_provider: Some(lsp::CompletionOptions {
4528 trigger_characters: Some(vec![".".to_string()]),
4529 ..Default::default()
4530 }),
4531 ..Default::default()
4532 },
4533 ..Default::default()
4534 },
4535 );
4536
4537 let (buffer, _handle) = project
4538 .update(cx, |p, cx| {
4539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4540 })
4541 .await
4542 .unwrap();
4543
4544 let fake_server = fake_language_servers.next().await.unwrap();
4545 cx.executor().run_until_parked();
4546
4547 // When text_edit exists, it takes precedence over insert_text and label
4548 let text = "let a = obj.fqn";
4549 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4550 let completions = project.update(cx, |project, cx| {
4551 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4552 });
4553
4554 fake_server
4555 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4556 Ok(Some(lsp::CompletionResponse::Array(vec![
4557 lsp::CompletionItem {
4558 label: "labelText".into(),
4559 insert_text: Some("insertText".into()),
4560 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4561 range: lsp::Range::new(
4562 lsp::Position::new(0, text.len() as u32 - 3),
4563 lsp::Position::new(0, text.len() as u32),
4564 ),
4565 new_text: "textEditText".into(),
4566 })),
4567 ..Default::default()
4568 },
4569 ])))
4570 })
4571 .next()
4572 .await;
4573
4574 let completions = completions
4575 .await
4576 .unwrap()
4577 .into_iter()
4578 .flat_map(|response| response.completions)
4579 .collect::<Vec<_>>();
4580 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4581
4582 assert_eq!(completions.len(), 1);
4583 assert_eq!(completions[0].new_text, "textEditText");
4584 assert_eq!(
4585 completions[0].replace_range.to_offset(&snapshot),
4586 text.len() - 3..text.len()
4587 );
4588}
4589
4590#[gpui::test]
4591async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4592 init_test(cx);
4593
4594 let fs = FakeFs::new(cx.executor());
4595 fs.insert_tree(
4596 path!("/dir"),
4597 json!({
4598 "a.ts": "",
4599 }),
4600 )
4601 .await;
4602
4603 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4604
4605 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4606 language_registry.add(typescript_lang());
4607 let mut fake_language_servers = language_registry.register_fake_lsp(
4608 "TypeScript",
4609 FakeLspAdapter {
4610 capabilities: lsp::ServerCapabilities {
4611 completion_provider: Some(lsp::CompletionOptions {
4612 trigger_characters: Some(vec![".".to_string()]),
4613 ..Default::default()
4614 }),
4615 ..Default::default()
4616 },
4617 ..Default::default()
4618 },
4619 );
4620
4621 let (buffer, _handle) = project
4622 .update(cx, |p, cx| {
4623 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4624 })
4625 .await
4626 .unwrap();
4627
4628 let fake_server = fake_language_servers.next().await.unwrap();
4629 cx.executor().run_until_parked();
4630 let text = "let a = obj.fqn";
4631
4632 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4633 {
4634 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4635 let completions = project.update(cx, |project, cx| {
4636 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4637 });
4638
4639 fake_server
4640 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4641 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4642 is_incomplete: false,
4643 item_defaults: Some(lsp::CompletionListItemDefaults {
4644 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4645 lsp::Range::new(
4646 lsp::Position::new(0, text.len() as u32 - 3),
4647 lsp::Position::new(0, text.len() as u32),
4648 ),
4649 )),
4650 ..Default::default()
4651 }),
4652 items: vec![lsp::CompletionItem {
4653 label: "labelText".into(),
4654 text_edit_text: Some("textEditText".into()),
4655 text_edit: None,
4656 ..Default::default()
4657 }],
4658 })))
4659 })
4660 .next()
4661 .await;
4662
4663 let completions = completions
4664 .await
4665 .unwrap()
4666 .into_iter()
4667 .flat_map(|response| response.completions)
4668 .collect::<Vec<_>>();
4669 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4670
4671 assert_eq!(completions.len(), 1);
4672 assert_eq!(completions[0].new_text, "textEditText");
4673 assert_eq!(
4674 completions[0].replace_range.to_offset(&snapshot),
4675 text.len() - 3..text.len()
4676 );
4677 }
4678
4679 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4680 {
4681 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4682 let completions = project.update(cx, |project, cx| {
4683 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4684 });
4685
4686 fake_server
4687 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4688 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4689 is_incomplete: false,
4690 item_defaults: Some(lsp::CompletionListItemDefaults {
4691 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4692 lsp::Range::new(
4693 lsp::Position::new(0, text.len() as u32 - 3),
4694 lsp::Position::new(0, text.len() as u32),
4695 ),
4696 )),
4697 ..Default::default()
4698 }),
4699 items: vec![lsp::CompletionItem {
4700 label: "labelText".into(),
4701 text_edit_text: None,
4702 insert_text: Some("irrelevant".into()),
4703 text_edit: None,
4704 ..Default::default()
4705 }],
4706 })))
4707 })
4708 .next()
4709 .await;
4710
4711 let completions = completions
4712 .await
4713 .unwrap()
4714 .into_iter()
4715 .flat_map(|response| response.completions)
4716 .collect::<Vec<_>>();
4717 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4718
4719 assert_eq!(completions.len(), 1);
4720 assert_eq!(completions[0].new_text, "labelText");
4721 assert_eq!(
4722 completions[0].replace_range.to_offset(&snapshot),
4723 text.len() - 3..text.len()
4724 );
4725 }
4726}
4727
4728#[gpui::test]
4729async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4730 init_test(cx);
4731
4732 let fs = FakeFs::new(cx.executor());
4733 fs.insert_tree(
4734 path!("/dir"),
4735 json!({
4736 "a.ts": "",
4737 }),
4738 )
4739 .await;
4740
4741 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4742
4743 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4744 language_registry.add(typescript_lang());
4745 let mut fake_language_servers = language_registry.register_fake_lsp(
4746 "TypeScript",
4747 FakeLspAdapter {
4748 capabilities: lsp::ServerCapabilities {
4749 completion_provider: Some(lsp::CompletionOptions {
4750 trigger_characters: Some(vec![":".to_string()]),
4751 ..Default::default()
4752 }),
4753 ..Default::default()
4754 },
4755 ..Default::default()
4756 },
4757 );
4758
4759 let (buffer, _handle) = project
4760 .update(cx, |p, cx| {
4761 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4762 })
4763 .await
4764 .unwrap();
4765
4766 let fake_server = fake_language_servers.next().await.unwrap();
4767 cx.executor().run_until_parked();
4768
4769 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4770 let text = "let a = b.fqn";
4771 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4772 let completions = project.update(cx, |project, cx| {
4773 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4774 });
4775
4776 fake_server
4777 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4778 Ok(Some(lsp::CompletionResponse::Array(vec![
4779 lsp::CompletionItem {
4780 label: "fullyQualifiedName?".into(),
4781 insert_text: Some("fullyQualifiedName".into()),
4782 ..Default::default()
4783 },
4784 ])))
4785 })
4786 .next()
4787 .await;
4788 let completions = completions
4789 .await
4790 .unwrap()
4791 .into_iter()
4792 .flat_map(|response| response.completions)
4793 .collect::<Vec<_>>();
4794 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4795 assert_eq!(completions.len(), 1);
4796 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4797 assert_eq!(
4798 completions[0].replace_range.to_offset(&snapshot),
4799 text.len() - 3..text.len()
4800 );
4801
4802 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4803 let text = "let a = \"atoms/cmp\"";
4804 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4805 let completions = project.update(cx, |project, cx| {
4806 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4807 });
4808
4809 fake_server
4810 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4811 Ok(Some(lsp::CompletionResponse::Array(vec![
4812 lsp::CompletionItem {
4813 label: "component".into(),
4814 ..Default::default()
4815 },
4816 ])))
4817 })
4818 .next()
4819 .await;
4820 let completions = completions
4821 .await
4822 .unwrap()
4823 .into_iter()
4824 .flat_map(|response| response.completions)
4825 .collect::<Vec<_>>();
4826 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4827 assert_eq!(completions.len(), 1);
4828 assert_eq!(completions[0].new_text, "component");
4829 assert_eq!(
4830 completions[0].replace_range.to_offset(&snapshot),
4831 text.len() - 4..text.len() - 1
4832 );
4833}
4834
4835#[gpui::test]
4836async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4837 init_test(cx);
4838
4839 let fs = FakeFs::new(cx.executor());
4840 fs.insert_tree(
4841 path!("/dir"),
4842 json!({
4843 "a.ts": "",
4844 }),
4845 )
4846 .await;
4847
4848 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4849
4850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4851 language_registry.add(typescript_lang());
4852 let mut fake_language_servers = language_registry.register_fake_lsp(
4853 "TypeScript",
4854 FakeLspAdapter {
4855 capabilities: lsp::ServerCapabilities {
4856 completion_provider: Some(lsp::CompletionOptions {
4857 trigger_characters: Some(vec![":".to_string()]),
4858 ..Default::default()
4859 }),
4860 ..Default::default()
4861 },
4862 ..Default::default()
4863 },
4864 );
4865
4866 let (buffer, _handle) = project
4867 .update(cx, |p, cx| {
4868 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4869 })
4870 .await
4871 .unwrap();
4872
4873 let fake_server = fake_language_servers.next().await.unwrap();
4874 cx.executor().run_until_parked();
4875
4876 let text = "let a = b.fqn";
4877 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4878 let completions = project.update(cx, |project, cx| {
4879 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4880 });
4881
4882 fake_server
4883 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4884 Ok(Some(lsp::CompletionResponse::Array(vec![
4885 lsp::CompletionItem {
4886 label: "fullyQualifiedName?".into(),
4887 insert_text: Some("fully\rQualified\r\nName".into()),
4888 ..Default::default()
4889 },
4890 ])))
4891 })
4892 .next()
4893 .await;
4894 let completions = completions
4895 .await
4896 .unwrap()
4897 .into_iter()
4898 .flat_map(|response| response.completions)
4899 .collect::<Vec<_>>();
4900 assert_eq!(completions.len(), 1);
4901 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4902}
4903
4904#[gpui::test]
4905async fn test_supports_range_formatting_ignores_unrelated_language_servers(
4906 cx: &mut gpui::TestAppContext,
4907) {
4908 init_test(cx);
4909 cx.update(|cx| {
4910 SettingsStore::update_global(cx, |store, cx| {
4911 store.update_user_settings(cx, |settings| {
4912 settings.project.all_languages.defaults.formatter = Some(FormatterList::Single(
4913 Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current),
4914 ));
4915 });
4916 });
4917 });
4918
4919 let fs = FakeFs::new(cx.executor());
4920 fs.insert_tree(
4921 path!("/dir"),
4922 json!({
4923 "a.ts": "",
4924 "b.rs": "",
4925 }),
4926 )
4927 .await;
4928
4929 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4930 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4931 language_registry.add(typescript_lang());
4932 language_registry.add(rust_lang());
4933
4934 let mut typescript_language_servers = language_registry.register_fake_lsp(
4935 "TypeScript",
4936 FakeLspAdapter {
4937 name: "typescript-fake-language-server",
4938 capabilities: lsp::ServerCapabilities {
4939 document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
4940 ..lsp::ServerCapabilities::default()
4941 },
4942 ..FakeLspAdapter::default()
4943 },
4944 );
4945 let mut rust_language_servers = language_registry.register_fake_lsp(
4946 "Rust",
4947 FakeLspAdapter {
4948 name: "rust-fake-language-server",
4949 capabilities: lsp::ServerCapabilities {
4950 document_formatting_provider: Some(lsp::OneOf::Left(true)),
4951 document_range_formatting_provider: Some(lsp::OneOf::Left(false)),
4952 ..lsp::ServerCapabilities::default()
4953 },
4954 ..FakeLspAdapter::default()
4955 },
4956 );
4957
4958 let (typescript_buffer, _typescript_handle) = project
4959 .update(cx, |project, cx| {
4960 project.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4961 })
4962 .await
4963 .unwrap();
4964 let (rust_buffer, _rust_handle) = project
4965 .update(cx, |project, cx| {
4966 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4967 })
4968 .await
4969 .unwrap();
4970
4971 let _typescript_language_server = typescript_language_servers.next().await.unwrap();
4972 let _rust_language_server = rust_language_servers.next().await.unwrap();
4973 cx.executor().run_until_parked();
4974
4975 assert!(project.read_with(cx, |project, cx| {
4976 project.supports_range_formatting(&typescript_buffer, cx)
4977 }));
4978 assert!(!project.read_with(cx, |project, cx| {
4979 project.supports_range_formatting(&rust_buffer, cx)
4980 }));
4981}
4982
4983#[gpui::test(iterations = 10)]
4984async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4985 init_test(cx);
4986
4987 let fs = FakeFs::new(cx.executor());
4988 fs.insert_tree(
4989 path!("/dir"),
4990 json!({
4991 "a.ts": "a",
4992 }),
4993 )
4994 .await;
4995
4996 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4997
4998 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4999 language_registry.add(typescript_lang());
5000 let mut fake_language_servers = language_registry.register_fake_lsp(
5001 "TypeScript",
5002 FakeLspAdapter {
5003 capabilities: lsp::ServerCapabilities {
5004 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
5005 lsp::CodeActionOptions {
5006 resolve_provider: Some(true),
5007 ..lsp::CodeActionOptions::default()
5008 },
5009 )),
5010 execute_command_provider: Some(lsp::ExecuteCommandOptions {
5011 commands: vec!["_the/command".to_string()],
5012 ..lsp::ExecuteCommandOptions::default()
5013 }),
5014 ..lsp::ServerCapabilities::default()
5015 },
5016 ..FakeLspAdapter::default()
5017 },
5018 );
5019
5020 let (buffer, _handle) = project
5021 .update(cx, |p, cx| {
5022 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5023 })
5024 .await
5025 .unwrap();
5026
5027 let fake_server = fake_language_servers.next().await.unwrap();
5028 cx.executor().run_until_parked();
5029
5030 // Language server returns code actions that contain commands, and not edits.
5031 let actions = project.update(cx, |project, cx| {
5032 project.code_actions(&buffer, 0..0, None, cx)
5033 });
5034 fake_server
5035 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5036 Ok(Some(vec![
5037 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5038 title: "The code action".into(),
5039 data: Some(serde_json::json!({
5040 "command": "_the/command",
5041 })),
5042 ..lsp::CodeAction::default()
5043 }),
5044 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5045 title: "two".into(),
5046 ..lsp::CodeAction::default()
5047 }),
5048 ]))
5049 })
5050 .next()
5051 .await;
5052
5053 let action = actions.await.unwrap().unwrap()[0].clone();
5054 let apply = project.update(cx, |project, cx| {
5055 project.apply_code_action(buffer.clone(), action, true, cx)
5056 });
5057
5058 // Resolving the code action does not populate its edits. In absence of
5059 // edits, we must execute the given command.
5060 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
5061 |mut action, _| async move {
5062 if action.data.is_some() {
5063 action.command = Some(lsp::Command {
5064 title: "The command".into(),
5065 command: "_the/command".into(),
5066 arguments: Some(vec![json!("the-argument")]),
5067 });
5068 }
5069 Ok(action)
5070 },
5071 );
5072
5073 // While executing the command, the language server sends the editor
5074 // a `workspaceEdit` request.
5075 fake_server
5076 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
5077 let fake = fake_server.clone();
5078 move |params, _| {
5079 assert_eq!(params.command, "_the/command");
5080 let fake = fake.clone();
5081 async move {
5082 fake.server
5083 .request::<lsp::request::ApplyWorkspaceEdit>(
5084 lsp::ApplyWorkspaceEditParams {
5085 label: None,
5086 edit: lsp::WorkspaceEdit {
5087 changes: Some(
5088 [(
5089 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5090 vec![lsp::TextEdit {
5091 range: lsp::Range::new(
5092 lsp::Position::new(0, 0),
5093 lsp::Position::new(0, 0),
5094 ),
5095 new_text: "X".into(),
5096 }],
5097 )]
5098 .into_iter()
5099 .collect(),
5100 ),
5101 ..Default::default()
5102 },
5103 },
5104 DEFAULT_LSP_REQUEST_TIMEOUT,
5105 )
5106 .await
5107 .into_response()
5108 .unwrap();
5109 Ok(Some(json!(null)))
5110 }
5111 }
5112 })
5113 .next()
5114 .await;
5115
5116 // Applying the code action returns a project transaction containing the edits
5117 // sent by the language server in its `workspaceEdit` request.
5118 let transaction = apply.await.unwrap();
5119 assert!(transaction.0.contains_key(&buffer));
5120 buffer.update(cx, |buffer, cx| {
5121 assert_eq!(buffer.text(), "Xa");
5122 buffer.undo(cx);
5123 assert_eq!(buffer.text(), "a");
5124 });
5125}
5126
5127#[gpui::test]
5128async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5129 init_test(cx);
5130 let fs = FakeFs::new(cx.background_executor.clone());
5131 let expected_contents = "content";
5132 fs.as_fake()
5133 .insert_tree(
5134 "/root",
5135 json!({
5136 "test.txt": expected_contents
5137 }),
5138 )
5139 .await;
5140
5141 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5142
5143 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5144 let worktree = project.worktrees(cx).next().unwrap();
5145 let entry_id = worktree
5146 .read(cx)
5147 .entry_for_path(rel_path("test.txt"))
5148 .unwrap()
5149 .id;
5150 (worktree, entry_id)
5151 });
5152 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5153 let _result = project
5154 .update(cx, |project, cx| {
5155 project.rename_entry(
5156 entry_id,
5157 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5158 cx,
5159 )
5160 })
5161 .await
5162 .unwrap();
5163 worktree.read_with(cx, |worktree, _| {
5164 assert!(
5165 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5166 "Old file should have been removed"
5167 );
5168 assert!(
5169 worktree
5170 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5171 .is_some(),
5172 "Whole directory hierarchy and the new file should have been created"
5173 );
5174 });
5175 assert_eq!(
5176 worktree
5177 .update(cx, |worktree, cx| {
5178 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5179 })
5180 .await
5181 .unwrap()
5182 .text,
5183 expected_contents,
5184 "Moved file's contents should be preserved"
5185 );
5186
5187 let entry_id = worktree.read_with(cx, |worktree, _| {
5188 worktree
5189 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5190 .unwrap()
5191 .id
5192 });
5193
5194 let _result = project
5195 .update(cx, |project, cx| {
5196 project.rename_entry(
5197 entry_id,
5198 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5199 cx,
5200 )
5201 })
5202 .await
5203 .unwrap();
5204 worktree.read_with(cx, |worktree, _| {
5205 assert!(
5206 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5207 "First file should not reappear"
5208 );
5209 assert!(
5210 worktree
5211 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5212 .is_none(),
5213 "Old file should have been removed"
5214 );
5215 assert!(
5216 worktree
5217 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5218 .is_some(),
5219 "No error should have occurred after moving into existing directory"
5220 );
5221 });
5222 assert_eq!(
5223 worktree
5224 .update(cx, |worktree, cx| {
5225 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5226 })
5227 .await
5228 .unwrap()
5229 .text,
5230 expected_contents,
5231 "Moved file's contents should be preserved"
5232 );
5233}
5234
5235#[gpui::test(iterations = 10)]
5236async fn test_save_file(cx: &mut gpui::TestAppContext) {
5237 init_test(cx);
5238
5239 let fs = FakeFs::new(cx.executor());
5240 fs.insert_tree(
5241 path!("/dir"),
5242 json!({
5243 "file1": "the old contents",
5244 }),
5245 )
5246 .await;
5247
5248 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5249 let buffer = project
5250 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5251 .await
5252 .unwrap();
5253 buffer.update(cx, |buffer, cx| {
5254 assert_eq!(buffer.text(), "the old contents");
5255 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5256 });
5257
5258 project
5259 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5260 .await
5261 .unwrap();
5262
5263 let new_text = fs
5264 .load(Path::new(path!("/dir/file1")))
5265 .await
5266 .unwrap()
5267 .replace("\r\n", "\n");
5268 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5269}
5270
5271#[gpui::test(iterations = 10)]
5272async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5273 // Issue: #24349
5274 init_test(cx);
5275
5276 let fs = FakeFs::new(cx.executor());
5277 fs.insert_tree(path!("/dir"), json!({})).await;
5278
5279 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5281
5282 language_registry.add(rust_lang());
5283 let mut fake_rust_servers = language_registry.register_fake_lsp(
5284 "Rust",
5285 FakeLspAdapter {
5286 name: "the-rust-language-server",
5287 capabilities: lsp::ServerCapabilities {
5288 completion_provider: Some(lsp::CompletionOptions {
5289 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5290 ..Default::default()
5291 }),
5292 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5293 lsp::TextDocumentSyncOptions {
5294 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5295 ..Default::default()
5296 },
5297 )),
5298 ..Default::default()
5299 },
5300 ..Default::default()
5301 },
5302 );
5303
5304 let buffer = project
5305 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5306 .unwrap()
5307 .await;
5308 project.update(cx, |this, cx| {
5309 this.register_buffer_with_language_servers(&buffer, cx);
5310 buffer.update(cx, |buffer, cx| {
5311 assert!(!this.has_language_servers_for(buffer, cx));
5312 })
5313 });
5314
5315 project
5316 .update(cx, |this, cx| {
5317 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5318 this.save_buffer_as(
5319 buffer.clone(),
5320 ProjectPath {
5321 worktree_id,
5322 path: rel_path("file.rs").into(),
5323 },
5324 cx,
5325 )
5326 })
5327 .await
5328 .unwrap();
5329 // A server is started up, and it is notified about Rust files.
5330 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5331 assert_eq!(
5332 fake_rust_server
5333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5334 .await
5335 .text_document,
5336 lsp::TextDocumentItem {
5337 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5338 version: 0,
5339 text: "".to_string(),
5340 language_id: "rust".to_string(),
5341 }
5342 );
5343
5344 project.update(cx, |this, cx| {
5345 buffer.update(cx, |buffer, cx| {
5346 assert!(this.has_language_servers_for(buffer, cx));
5347 })
5348 });
5349}
5350
5351#[gpui::test(iterations = 30)]
5352async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5353 init_test(cx);
5354
5355 let fs = FakeFs::new(cx.executor());
5356 fs.insert_tree(
5357 path!("/dir"),
5358 json!({
5359 "file1": "the original contents",
5360 }),
5361 )
5362 .await;
5363
5364 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5365 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5366 let buffer = project
5367 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5368 .await
5369 .unwrap();
5370
5371 // Change the buffer's file on disk, and then wait for the file change
5372 // to be detected by the worktree, so that the buffer starts reloading.
5373 fs.save(
5374 path!("/dir/file1").as_ref(),
5375 &"the first contents".into(),
5376 Default::default(),
5377 )
5378 .await
5379 .unwrap();
5380 worktree.next_event(cx).await;
5381
5382 // Change the buffer's file again. Depending on the random seed, the
5383 // previous file change may still be in progress.
5384 fs.save(
5385 path!("/dir/file1").as_ref(),
5386 &"the second contents".into(),
5387 Default::default(),
5388 )
5389 .await
5390 .unwrap();
5391 worktree.next_event(cx).await;
5392
5393 cx.executor().run_until_parked();
5394 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5395 buffer.read_with(cx, |buffer, _| {
5396 assert_eq!(buffer.text(), on_disk_text);
5397 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5398 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5399 });
5400}
5401
5402#[gpui::test(iterations = 30)]
5403async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5404 init_test(cx);
5405
5406 let fs = FakeFs::new(cx.executor());
5407 fs.insert_tree(
5408 path!("/dir"),
5409 json!({
5410 "file1": "the original contents",
5411 }),
5412 )
5413 .await;
5414
5415 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5416 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5417 let buffer = project
5418 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5419 .await
5420 .unwrap();
5421
5422 // Change the buffer's file on disk, and then wait for the file change
5423 // to be detected by the worktree, so that the buffer starts reloading.
5424 fs.save(
5425 path!("/dir/file1").as_ref(),
5426 &"the first contents".into(),
5427 Default::default(),
5428 )
5429 .await
5430 .unwrap();
5431 worktree.next_event(cx).await;
5432
5433 cx.executor()
5434 .spawn(cx.executor().simulate_random_delay())
5435 .await;
5436
5437 // Perform a noop edit, causing the buffer's version to increase.
5438 buffer.update(cx, |buffer, cx| {
5439 buffer.edit([(0..0, " ")], None, cx);
5440 buffer.undo(cx);
5441 });
5442
5443 cx.executor().run_until_parked();
5444 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5445 buffer.read_with(cx, |buffer, _| {
5446 let buffer_text = buffer.text();
5447 if buffer_text == on_disk_text {
5448 assert!(
5449 !buffer.is_dirty() && !buffer.has_conflict(),
5450 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5451 );
5452 }
5453 // If the file change occurred while the buffer was processing the first
5454 // change, the buffer will be in a conflicting state.
5455 else {
5456 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5457 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5458 }
5459 });
5460}
5461
5462#[gpui::test]
5463async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let fs = FakeFs::new(cx.executor());
5467 fs.insert_tree(
5468 path!("/dir"),
5469 json!({
5470 "file1": "the old contents",
5471 }),
5472 )
5473 .await;
5474
5475 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5476 let buffer = project
5477 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5478 .await
5479 .unwrap();
5480 buffer.update(cx, |buffer, cx| {
5481 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5482 });
5483
5484 project
5485 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5486 .await
5487 .unwrap();
5488
5489 let new_text = fs
5490 .load(Path::new(path!("/dir/file1")))
5491 .await
5492 .unwrap()
5493 .replace("\r\n", "\n");
5494 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5495}
5496
5497#[gpui::test]
5498async fn test_save_as(cx: &mut gpui::TestAppContext) {
5499 init_test(cx);
5500
5501 let fs = FakeFs::new(cx.executor());
5502 fs.insert_tree("/dir", json!({})).await;
5503
5504 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5505
5506 let languages = project.update(cx, |project, _| project.languages().clone());
5507 languages.add(rust_lang());
5508
5509 let buffer = project.update(cx, |project, cx| {
5510 project.create_local_buffer("", None, false, cx)
5511 });
5512 buffer.update(cx, |buffer, cx| {
5513 buffer.edit([(0..0, "abc")], None, cx);
5514 assert!(buffer.is_dirty());
5515 assert!(!buffer.has_conflict());
5516 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5517 });
5518 project
5519 .update(cx, |project, cx| {
5520 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5521 let path = ProjectPath {
5522 worktree_id,
5523 path: rel_path("file1.rs").into(),
5524 };
5525 project.save_buffer_as(buffer.clone(), path, cx)
5526 })
5527 .await
5528 .unwrap();
5529 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5530
5531 cx.executor().run_until_parked();
5532 buffer.update(cx, |buffer, cx| {
5533 assert_eq!(
5534 buffer.file().unwrap().full_path(cx),
5535 Path::new("dir/file1.rs")
5536 );
5537 assert!(!buffer.is_dirty());
5538 assert!(!buffer.has_conflict());
5539 assert_eq!(buffer.language().unwrap().name(), "Rust");
5540 });
5541
5542 let opened_buffer = project
5543 .update(cx, |project, cx| {
5544 project.open_local_buffer("/dir/file1.rs", cx)
5545 })
5546 .await
5547 .unwrap();
5548 assert_eq!(opened_buffer, buffer);
5549}
5550
5551#[gpui::test]
5552async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5553 init_test(cx);
5554
5555 let fs = FakeFs::new(cx.executor());
5556 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5557
5558 fs.insert_tree(
5559 path!("/dir"),
5560 json!({
5561 "data_a.txt": "data about a"
5562 }),
5563 )
5564 .await;
5565
5566 let buffer = project
5567 .update(cx, |project, cx| {
5568 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5569 })
5570 .await
5571 .unwrap();
5572
5573 buffer.update(cx, |buffer, cx| {
5574 buffer.edit([(11..12, "b")], None, cx);
5575 });
5576
5577 // Save buffer's contents as a new file and confirm that the buffer's now
5578 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5579 // file associated with the buffer has now been updated to `data_b.txt`
5580 project
5581 .update(cx, |project, cx| {
5582 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5583 let new_path = ProjectPath {
5584 worktree_id,
5585 path: rel_path("data_b.txt").into(),
5586 };
5587
5588 project.save_buffer_as(buffer.clone(), new_path, cx)
5589 })
5590 .await
5591 .unwrap();
5592
5593 buffer.update(cx, |buffer, cx| {
5594 assert_eq!(
5595 buffer.file().unwrap().full_path(cx),
5596 Path::new("dir/data_b.txt")
5597 )
5598 });
5599
5600 // Open the original `data_a.txt` file, confirming that its contents are
5601 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5602 let original_buffer = project
5603 .update(cx, |project, cx| {
5604 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5605 })
5606 .await
5607 .unwrap();
5608
5609 original_buffer.update(cx, |buffer, cx| {
5610 assert_eq!(buffer.text(), "data about a");
5611 assert_eq!(
5612 buffer.file().unwrap().full_path(cx),
5613 Path::new("dir/data_a.txt")
5614 )
5615 });
5616}
5617
5618#[gpui::test(retries = 5)]
5619async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5620 use worktree::WorktreeModelHandle as _;
5621
5622 init_test(cx);
5623 cx.executor().allow_parking();
5624
5625 let dir = TempTree::new(json!({
5626 "a": {
5627 "file1": "",
5628 "file2": "",
5629 "file3": "",
5630 },
5631 "b": {
5632 "c": {
5633 "file4": "",
5634 "file5": "",
5635 }
5636 }
5637 }));
5638
5639 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5640
5641 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5642 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5643 async move { buffer.await.unwrap() }
5644 };
5645 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5646 project.update(cx, |project, cx| {
5647 let tree = project.worktrees(cx).next().unwrap();
5648 tree.read(cx)
5649 .entry_for_path(rel_path(path))
5650 .unwrap_or_else(|| panic!("no entry for path {}", path))
5651 .id
5652 })
5653 };
5654
5655 let buffer2 = buffer_for_path("a/file2", cx).await;
5656 let buffer3 = buffer_for_path("a/file3", cx).await;
5657 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5658 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5659
5660 let file2_id = id_for_path("a/file2", cx);
5661 let file3_id = id_for_path("a/file3", cx);
5662 let file4_id = id_for_path("b/c/file4", cx);
5663
5664 // Create a remote copy of this worktree.
5665 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5666 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5667
5668 let updates = Arc::new(Mutex::new(Vec::new()));
5669 tree.update(cx, |tree, cx| {
5670 let updates = updates.clone();
5671 tree.observe_updates(0, cx, move |update| {
5672 updates.lock().push(update);
5673 async { true }
5674 });
5675 });
5676
5677 let remote = cx.update(|cx| {
5678 Worktree::remote(
5679 0,
5680 ReplicaId::REMOTE_SERVER,
5681 metadata,
5682 project.read(cx).client().into(),
5683 project.read(cx).path_style(cx),
5684 cx,
5685 )
5686 });
5687
5688 cx.executor().run_until_parked();
5689
5690 cx.update(|cx| {
5691 assert!(!buffer2.read(cx).is_dirty());
5692 assert!(!buffer3.read(cx).is_dirty());
5693 assert!(!buffer4.read(cx).is_dirty());
5694 assert!(!buffer5.read(cx).is_dirty());
5695 });
5696
5697 // Rename and delete files and directories.
5698 tree.flush_fs_events(cx).await;
5699 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5700 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5701 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5702 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5703 tree.flush_fs_events(cx).await;
5704
5705 cx.update(|app| {
5706 assert_eq!(
5707 tree.read(app).paths().collect::<Vec<_>>(),
5708 vec![
5709 rel_path("a"),
5710 rel_path("a/file1"),
5711 rel_path("a/file2.new"),
5712 rel_path("b"),
5713 rel_path("d"),
5714 rel_path("d/file3"),
5715 rel_path("d/file4"),
5716 ]
5717 );
5718 });
5719
5720 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5721 assert_eq!(id_for_path("d/file3", cx), file3_id);
5722 assert_eq!(id_for_path("d/file4", cx), file4_id);
5723
5724 cx.update(|cx| {
5725 assert_eq!(
5726 buffer2.read(cx).file().unwrap().path().as_ref(),
5727 rel_path("a/file2.new")
5728 );
5729 assert_eq!(
5730 buffer3.read(cx).file().unwrap().path().as_ref(),
5731 rel_path("d/file3")
5732 );
5733 assert_eq!(
5734 buffer4.read(cx).file().unwrap().path().as_ref(),
5735 rel_path("d/file4")
5736 );
5737 assert_eq!(
5738 buffer5.read(cx).file().unwrap().path().as_ref(),
5739 rel_path("b/c/file5")
5740 );
5741
5742 assert_matches!(
5743 buffer2.read(cx).file().unwrap().disk_state(),
5744 DiskState::Present { .. }
5745 );
5746 assert_matches!(
5747 buffer3.read(cx).file().unwrap().disk_state(),
5748 DiskState::Present { .. }
5749 );
5750 assert_matches!(
5751 buffer4.read(cx).file().unwrap().disk_state(),
5752 DiskState::Present { .. }
5753 );
5754 assert_eq!(
5755 buffer5.read(cx).file().unwrap().disk_state(),
5756 DiskState::Deleted
5757 );
5758 });
5759
5760 // Update the remote worktree. Check that it becomes consistent with the
5761 // local worktree.
5762 cx.executor().run_until_parked();
5763
5764 remote.update(cx, |remote, _| {
5765 for update in updates.lock().drain(..) {
5766 remote.as_remote_mut().unwrap().update_from_remote(update);
5767 }
5768 });
5769 cx.executor().run_until_parked();
5770 remote.update(cx, |remote, _| {
5771 assert_eq!(
5772 remote.paths().collect::<Vec<_>>(),
5773 vec![
5774 rel_path("a"),
5775 rel_path("a/file1"),
5776 rel_path("a/file2.new"),
5777 rel_path("b"),
5778 rel_path("d"),
5779 rel_path("d/file3"),
5780 rel_path("d/file4"),
5781 ]
5782 );
5783 });
5784}
5785
5786#[cfg(target_os = "linux")]
5787#[gpui::test(retries = 5)]
5788async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5789 init_test(cx);
5790 cx.executor().allow_parking();
5791
5792 let dir = TempTree::new(json!({}));
5793 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5794 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5795
5796 tree.flush_fs_events(cx).await;
5797
5798 let repro_dir = dir.path().join("repro");
5799 std::fs::create_dir(&repro_dir).unwrap();
5800 tree.flush_fs_events(cx).await;
5801
5802 cx.update(|cx| {
5803 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5804 });
5805
5806 std::fs::remove_dir_all(&repro_dir).unwrap();
5807 tree.flush_fs_events(cx).await;
5808
5809 cx.update(|cx| {
5810 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5811 });
5812
5813 std::fs::create_dir(&repro_dir).unwrap();
5814 tree.flush_fs_events(cx).await;
5815
5816 cx.update(|cx| {
5817 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5818 });
5819
5820 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5821 tree.flush_fs_events(cx).await;
5822
5823 cx.update(|cx| {
5824 assert!(
5825 tree.read(cx)
5826 .entry_for_path(rel_path("repro/repro-marker"))
5827 .is_some()
5828 );
5829 });
5830}
5831
5832#[gpui::test(iterations = 10)]
5833async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5834 init_test(cx);
5835
5836 let fs = FakeFs::new(cx.executor());
5837 fs.insert_tree(
5838 path!("/dir"),
5839 json!({
5840 "a": {
5841 "file1": "",
5842 }
5843 }),
5844 )
5845 .await;
5846
5847 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5848 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5849 let tree_id = tree.update(cx, |tree, _| tree.id());
5850
5851 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5852 project.update(cx, |project, cx| {
5853 let tree = project.worktrees(cx).next().unwrap();
5854 tree.read(cx)
5855 .entry_for_path(rel_path(path))
5856 .unwrap_or_else(|| panic!("no entry for path {}", path))
5857 .id
5858 })
5859 };
5860
5861 let dir_id = id_for_path("a", cx);
5862 let file_id = id_for_path("a/file1", cx);
5863 let buffer = project
5864 .update(cx, |p, cx| {
5865 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5866 })
5867 .await
5868 .unwrap();
5869 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5870
5871 project
5872 .update(cx, |project, cx| {
5873 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5874 })
5875 .unwrap()
5876 .await
5877 .into_included()
5878 .unwrap();
5879 cx.executor().run_until_parked();
5880
5881 assert_eq!(id_for_path("b", cx), dir_id);
5882 assert_eq!(id_for_path("b/file1", cx), file_id);
5883 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5884}
5885
5886#[gpui::test]
5887async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5888 init_test(cx);
5889
5890 let fs = FakeFs::new(cx.executor());
5891 fs.insert_tree(
5892 "/dir",
5893 json!({
5894 "a.txt": "a-contents",
5895 "b.txt": "b-contents",
5896 }),
5897 )
5898 .await;
5899
5900 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5901
5902 // Spawn multiple tasks to open paths, repeating some paths.
5903 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5904 (
5905 p.open_local_buffer("/dir/a.txt", cx),
5906 p.open_local_buffer("/dir/b.txt", cx),
5907 p.open_local_buffer("/dir/a.txt", cx),
5908 )
5909 });
5910
5911 let buffer_a_1 = buffer_a_1.await.unwrap();
5912 let buffer_a_2 = buffer_a_2.await.unwrap();
5913 let buffer_b = buffer_b.await.unwrap();
5914 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5915 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5916
5917 // There is only one buffer per path.
5918 let buffer_a_id = buffer_a_1.entity_id();
5919 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5920
5921 // Open the same path again while it is still open.
5922 drop(buffer_a_1);
5923 let buffer_a_3 = project
5924 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5925 .await
5926 .unwrap();
5927
5928 // There's still only one buffer per path.
5929 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5930}
5931
5932#[gpui::test]
5933async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5934 init_test(cx);
5935
5936 let fs = FakeFs::new(cx.executor());
5937 fs.insert_tree(
5938 path!("/dir"),
5939 json!({
5940 "file1": "abc",
5941 "file2": "def",
5942 "file3": "ghi",
5943 }),
5944 )
5945 .await;
5946
5947 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5948
5949 let buffer1 = project
5950 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5951 .await
5952 .unwrap();
5953 let events = Arc::new(Mutex::new(Vec::new()));
5954
5955 // initially, the buffer isn't dirty.
5956 buffer1.update(cx, |buffer, cx| {
5957 cx.subscribe(&buffer1, {
5958 let events = events.clone();
5959 move |_, _, event, _| match event {
5960 BufferEvent::Operation { .. } => {}
5961 _ => events.lock().push(event.clone()),
5962 }
5963 })
5964 .detach();
5965
5966 assert!(!buffer.is_dirty());
5967 assert!(events.lock().is_empty());
5968
5969 buffer.edit([(1..2, "")], None, cx);
5970 });
5971
5972 // after the first edit, the buffer is dirty, and emits a dirtied event.
5973 buffer1.update(cx, |buffer, cx| {
5974 assert!(buffer.text() == "ac");
5975 assert!(buffer.is_dirty());
5976 assert_eq!(
5977 *events.lock(),
5978 &[
5979 language::BufferEvent::Edited { is_local: true },
5980 language::BufferEvent::DirtyChanged
5981 ]
5982 );
5983 events.lock().clear();
5984 buffer.did_save(
5985 buffer.version(),
5986 buffer.file().unwrap().disk_state().mtime(),
5987 cx,
5988 );
5989 });
5990
5991 // after saving, the buffer is not dirty, and emits a saved event.
5992 buffer1.update(cx, |buffer, cx| {
5993 assert!(!buffer.is_dirty());
5994 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5995 events.lock().clear();
5996
5997 buffer.edit([(1..1, "B")], None, cx);
5998 buffer.edit([(2..2, "D")], None, cx);
5999 });
6000
6001 // after editing again, the buffer is dirty, and emits another dirty event.
6002 buffer1.update(cx, |buffer, cx| {
6003 assert!(buffer.text() == "aBDc");
6004 assert!(buffer.is_dirty());
6005 assert_eq!(
6006 *events.lock(),
6007 &[
6008 language::BufferEvent::Edited { is_local: true },
6009 language::BufferEvent::DirtyChanged,
6010 language::BufferEvent::Edited { is_local: true },
6011 ],
6012 );
6013 events.lock().clear();
6014
6015 // After restoring the buffer to its previously-saved state,
6016 // the buffer is not considered dirty anymore.
6017 buffer.edit([(1..3, "")], None, cx);
6018 assert!(buffer.text() == "ac");
6019 assert!(!buffer.is_dirty());
6020 });
6021
6022 assert_eq!(
6023 *events.lock(),
6024 &[
6025 language::BufferEvent::Edited { is_local: true },
6026 language::BufferEvent::DirtyChanged
6027 ]
6028 );
6029
6030 // When a file is deleted, it is not considered dirty.
6031 let events = Arc::new(Mutex::new(Vec::new()));
6032 let buffer2 = project
6033 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6034 .await
6035 .unwrap();
6036 buffer2.update(cx, |_, cx| {
6037 cx.subscribe(&buffer2, {
6038 let events = events.clone();
6039 move |_, _, event, _| match event {
6040 BufferEvent::Operation { .. } => {}
6041 _ => events.lock().push(event.clone()),
6042 }
6043 })
6044 .detach();
6045 });
6046
6047 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
6048 .await
6049 .unwrap();
6050 cx.executor().run_until_parked();
6051 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
6052 assert_eq!(
6053 mem::take(&mut *events.lock()),
6054 &[language::BufferEvent::FileHandleChanged]
6055 );
6056
6057 // Buffer becomes dirty when edited.
6058 buffer2.update(cx, |buffer, cx| {
6059 buffer.edit([(2..3, "")], None, cx);
6060 assert_eq!(buffer.is_dirty(), true);
6061 });
6062 assert_eq!(
6063 mem::take(&mut *events.lock()),
6064 &[
6065 language::BufferEvent::Edited { is_local: true },
6066 language::BufferEvent::DirtyChanged
6067 ]
6068 );
6069
6070 // Buffer becomes clean again when all of its content is removed, because
6071 // the file was deleted.
6072 buffer2.update(cx, |buffer, cx| {
6073 buffer.edit([(0..2, "")], None, cx);
6074 assert_eq!(buffer.is_empty(), true);
6075 assert_eq!(buffer.is_dirty(), false);
6076 });
6077 assert_eq!(
6078 *events.lock(),
6079 &[
6080 language::BufferEvent::Edited { is_local: true },
6081 language::BufferEvent::DirtyChanged
6082 ]
6083 );
6084
6085 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6086 let events = Arc::new(Mutex::new(Vec::new()));
6087 let buffer3 = project
6088 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6089 .await
6090 .unwrap();
6091 buffer3.update(cx, |_, cx| {
6092 cx.subscribe(&buffer3, {
6093 let events = events.clone();
6094 move |_, _, event, _| match event {
6095 BufferEvent::Operation { .. } => {}
6096 _ => events.lock().push(event.clone()),
6097 }
6098 })
6099 .detach();
6100 });
6101
6102 buffer3.update(cx, |buffer, cx| {
6103 buffer.edit([(0..0, "x")], None, cx);
6104 });
6105 events.lock().clear();
6106 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6107 .await
6108 .unwrap();
6109 cx.executor().run_until_parked();
6110 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6111 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6112}
6113
6114#[gpui::test]
6115async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6116 init_test(cx);
6117
6118 let fs = FakeFs::new(cx.executor());
6119 fs.insert_tree(
6120 path!("/dir"),
6121 json!({
6122 "file.txt": "version 1",
6123 }),
6124 )
6125 .await;
6126
6127 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6128 let buffer = project
6129 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6130 .await
6131 .unwrap();
6132
6133 buffer.read_with(cx, |buffer, _| {
6134 assert_eq!(buffer.text(), "version 1");
6135 assert!(!buffer.is_dirty());
6136 });
6137
6138 // User makes an edit, making the buffer dirty.
6139 buffer.update(cx, |buffer, cx| {
6140 buffer.edit([(0..0, "user edit: ")], None, cx);
6141 });
6142
6143 buffer.read_with(cx, |buffer, _| {
6144 assert!(buffer.is_dirty());
6145 assert_eq!(buffer.text(), "user edit: version 1");
6146 });
6147
6148 // External tool writes new content while buffer is dirty.
6149 // file_updated() updates the File but suppresses ReloadNeeded.
6150 fs.save(
6151 path!("/dir/file.txt").as_ref(),
6152 &"version 2 from external tool".into(),
6153 Default::default(),
6154 )
6155 .await
6156 .unwrap();
6157 cx.executor().run_until_parked();
6158
6159 buffer.read_with(cx, |buffer, _| {
6160 assert!(buffer.has_conflict());
6161 assert_eq!(buffer.text(), "user edit: version 1");
6162 });
6163
6164 // User undoes their edit. Buffer becomes clean, but disk has different
6165 // content. did_edit() detects the dirty->clean transition and checks if
6166 // disk changed while dirty. Since mtime differs from saved_mtime, it
6167 // emits ReloadNeeded.
6168 buffer.update(cx, |buffer, cx| {
6169 buffer.undo(cx);
6170 });
6171 cx.executor().run_until_parked();
6172
6173 buffer.read_with(cx, |buffer, _| {
6174 assert_eq!(
6175 buffer.text(),
6176 "version 2 from external tool",
6177 "buffer should reload from disk after undo makes it clean"
6178 );
6179 assert!(!buffer.is_dirty());
6180 });
6181}
6182
6183#[gpui::test]
6184async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6185 init_test(cx);
6186
6187 let (initial_contents, initial_offsets) =
6188 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6189 let fs = FakeFs::new(cx.executor());
6190 fs.insert_tree(
6191 path!("/dir"),
6192 json!({
6193 "the-file": initial_contents,
6194 }),
6195 )
6196 .await;
6197 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6198 let buffer = project
6199 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6200 .await
6201 .unwrap();
6202
6203 let anchors = initial_offsets
6204 .iter()
6205 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6206 .collect::<Vec<_>>();
6207
6208 // Change the file on disk, adding two new lines of text, and removing
6209 // one line.
6210 buffer.update(cx, |buffer, _| {
6211 assert!(!buffer.is_dirty());
6212 assert!(!buffer.has_conflict());
6213 });
6214
6215 let (new_contents, new_offsets) =
6216 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6217 fs.save(
6218 path!("/dir/the-file").as_ref(),
6219 &new_contents.as_str().into(),
6220 LineEnding::Unix,
6221 )
6222 .await
6223 .unwrap();
6224
6225 // Because the buffer was not modified, it is reloaded from disk. Its
6226 // contents are edited according to the diff between the old and new
6227 // file contents.
6228 cx.executor().run_until_parked();
6229 buffer.update(cx, |buffer, _| {
6230 assert_eq!(buffer.text(), new_contents);
6231 assert!(!buffer.is_dirty());
6232 assert!(!buffer.has_conflict());
6233
6234 let anchor_offsets = anchors
6235 .iter()
6236 .map(|anchor| anchor.to_offset(&*buffer))
6237 .collect::<Vec<_>>();
6238 assert_eq!(anchor_offsets, new_offsets);
6239 });
6240
6241 // Modify the buffer
6242 buffer.update(cx, |buffer, cx| {
6243 buffer.edit([(0..0, " ")], None, cx);
6244 assert!(buffer.is_dirty());
6245 assert!(!buffer.has_conflict());
6246 });
6247
6248 // Change the file on disk again, adding blank lines to the beginning.
6249 fs.save(
6250 path!("/dir/the-file").as_ref(),
6251 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6252 LineEnding::Unix,
6253 )
6254 .await
6255 .unwrap();
6256
6257 // Because the buffer is modified, it doesn't reload from disk, but is
6258 // marked as having a conflict.
6259 cx.executor().run_until_parked();
6260 buffer.update(cx, |buffer, _| {
6261 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6262 assert!(buffer.has_conflict());
6263 });
6264}
6265
6266#[gpui::test]
6267async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6268 init_test(cx);
6269
6270 let fs = FakeFs::new(cx.executor());
6271 fs.insert_tree(
6272 path!("/dir"),
6273 json!({
6274 "file1": "a\nb\nc\n",
6275 "file2": "one\r\ntwo\r\nthree\r\n",
6276 }),
6277 )
6278 .await;
6279
6280 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6281 let buffer1 = project
6282 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6283 .await
6284 .unwrap();
6285 let buffer2 = project
6286 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6287 .await
6288 .unwrap();
6289
6290 buffer1.update(cx, |buffer, _| {
6291 assert_eq!(buffer.text(), "a\nb\nc\n");
6292 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6293 });
6294 buffer2.update(cx, |buffer, _| {
6295 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6296 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6297 });
6298
6299 // Change a file's line endings on disk from unix to windows. The buffer's
6300 // state updates correctly.
6301 fs.save(
6302 path!("/dir/file1").as_ref(),
6303 &"aaa\nb\nc\n".into(),
6304 LineEnding::Windows,
6305 )
6306 .await
6307 .unwrap();
6308 cx.executor().run_until_parked();
6309 buffer1.update(cx, |buffer, _| {
6310 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6311 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6312 });
6313
6314 // Save a file with windows line endings. The file is written correctly.
6315 buffer2.update(cx, |buffer, cx| {
6316 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6317 });
6318 project
6319 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6320 .await
6321 .unwrap();
6322 assert_eq!(
6323 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6324 "one\r\ntwo\r\nthree\r\nfour\r\n",
6325 );
6326}
6327
6328#[gpui::test]
6329async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6330 init_test(cx);
6331
6332 let fs = FakeFs::new(cx.executor());
6333 fs.insert_tree(
6334 path!("/dir"),
6335 json!({
6336 "a.rs": "
6337 fn foo(mut v: Vec<usize>) {
6338 for x in &v {
6339 v.push(1);
6340 }
6341 }
6342 "
6343 .unindent(),
6344 }),
6345 )
6346 .await;
6347
6348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6349 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6350 let buffer = project
6351 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6352 .await
6353 .unwrap();
6354
6355 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6356 let message = lsp::PublishDiagnosticsParams {
6357 uri: buffer_uri.clone(),
6358 diagnostics: vec![
6359 lsp::Diagnostic {
6360 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6361 severity: Some(DiagnosticSeverity::WARNING),
6362 message: "error 1".to_string(),
6363 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6364 location: lsp::Location {
6365 uri: buffer_uri.clone(),
6366 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6367 },
6368 message: "error 1 hint 1".to_string(),
6369 }]),
6370 ..Default::default()
6371 },
6372 lsp::Diagnostic {
6373 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6374 severity: Some(DiagnosticSeverity::HINT),
6375 message: "error 1 hint 1".to_string(),
6376 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6377 location: lsp::Location {
6378 uri: buffer_uri.clone(),
6379 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6380 },
6381 message: "original diagnostic".to_string(),
6382 }]),
6383 ..Default::default()
6384 },
6385 lsp::Diagnostic {
6386 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6387 severity: Some(DiagnosticSeverity::ERROR),
6388 message: "error 2".to_string(),
6389 related_information: Some(vec![
6390 lsp::DiagnosticRelatedInformation {
6391 location: lsp::Location {
6392 uri: buffer_uri.clone(),
6393 range: lsp::Range::new(
6394 lsp::Position::new(1, 13),
6395 lsp::Position::new(1, 15),
6396 ),
6397 },
6398 message: "error 2 hint 1".to_string(),
6399 },
6400 lsp::DiagnosticRelatedInformation {
6401 location: lsp::Location {
6402 uri: buffer_uri.clone(),
6403 range: lsp::Range::new(
6404 lsp::Position::new(1, 13),
6405 lsp::Position::new(1, 15),
6406 ),
6407 },
6408 message: "error 2 hint 2".to_string(),
6409 },
6410 ]),
6411 ..Default::default()
6412 },
6413 lsp::Diagnostic {
6414 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6415 severity: Some(DiagnosticSeverity::HINT),
6416 message: "error 2 hint 1".to_string(),
6417 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6418 location: lsp::Location {
6419 uri: buffer_uri.clone(),
6420 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6421 },
6422 message: "original diagnostic".to_string(),
6423 }]),
6424 ..Default::default()
6425 },
6426 lsp::Diagnostic {
6427 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6428 severity: Some(DiagnosticSeverity::HINT),
6429 message: "error 2 hint 2".to_string(),
6430 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6431 location: lsp::Location {
6432 uri: buffer_uri,
6433 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6434 },
6435 message: "original diagnostic".to_string(),
6436 }]),
6437 ..Default::default()
6438 },
6439 ],
6440 version: None,
6441 };
6442
6443 lsp_store
6444 .update(cx, |lsp_store, cx| {
6445 lsp_store.update_diagnostics(
6446 LanguageServerId(0),
6447 message,
6448 None,
6449 DiagnosticSourceKind::Pushed,
6450 &[],
6451 cx,
6452 )
6453 })
6454 .unwrap();
6455 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6456
6457 assert_eq!(
6458 buffer
6459 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6460 .collect::<Vec<_>>(),
6461 &[
6462 DiagnosticEntry {
6463 range: Point::new(1, 8)..Point::new(1, 9),
6464 diagnostic: Diagnostic {
6465 severity: DiagnosticSeverity::WARNING,
6466 message: "error 1".to_string(),
6467 group_id: 1,
6468 is_primary: true,
6469 source_kind: DiagnosticSourceKind::Pushed,
6470 ..Diagnostic::default()
6471 }
6472 },
6473 DiagnosticEntry {
6474 range: Point::new(1, 8)..Point::new(1, 9),
6475 diagnostic: Diagnostic {
6476 severity: DiagnosticSeverity::HINT,
6477 message: "error 1 hint 1".to_string(),
6478 group_id: 1,
6479 is_primary: false,
6480 source_kind: DiagnosticSourceKind::Pushed,
6481 ..Diagnostic::default()
6482 }
6483 },
6484 DiagnosticEntry {
6485 range: Point::new(1, 13)..Point::new(1, 15),
6486 diagnostic: Diagnostic {
6487 severity: DiagnosticSeverity::HINT,
6488 message: "error 2 hint 1".to_string(),
6489 group_id: 0,
6490 is_primary: false,
6491 source_kind: DiagnosticSourceKind::Pushed,
6492 ..Diagnostic::default()
6493 }
6494 },
6495 DiagnosticEntry {
6496 range: Point::new(1, 13)..Point::new(1, 15),
6497 diagnostic: Diagnostic {
6498 severity: DiagnosticSeverity::HINT,
6499 message: "error 2 hint 2".to_string(),
6500 group_id: 0,
6501 is_primary: false,
6502 source_kind: DiagnosticSourceKind::Pushed,
6503 ..Diagnostic::default()
6504 }
6505 },
6506 DiagnosticEntry {
6507 range: Point::new(2, 8)..Point::new(2, 17),
6508 diagnostic: Diagnostic {
6509 severity: DiagnosticSeverity::ERROR,
6510 message: "error 2".to_string(),
6511 group_id: 0,
6512 is_primary: true,
6513 source_kind: DiagnosticSourceKind::Pushed,
6514 ..Diagnostic::default()
6515 }
6516 }
6517 ]
6518 );
6519
6520 assert_eq!(
6521 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6522 &[
6523 DiagnosticEntry {
6524 range: Point::new(1, 13)..Point::new(1, 15),
6525 diagnostic: Diagnostic {
6526 severity: DiagnosticSeverity::HINT,
6527 message: "error 2 hint 1".to_string(),
6528 group_id: 0,
6529 is_primary: false,
6530 source_kind: DiagnosticSourceKind::Pushed,
6531 ..Diagnostic::default()
6532 }
6533 },
6534 DiagnosticEntry {
6535 range: Point::new(1, 13)..Point::new(1, 15),
6536 diagnostic: Diagnostic {
6537 severity: DiagnosticSeverity::HINT,
6538 message: "error 2 hint 2".to_string(),
6539 group_id: 0,
6540 is_primary: false,
6541 source_kind: DiagnosticSourceKind::Pushed,
6542 ..Diagnostic::default()
6543 }
6544 },
6545 DiagnosticEntry {
6546 range: Point::new(2, 8)..Point::new(2, 17),
6547 diagnostic: Diagnostic {
6548 severity: DiagnosticSeverity::ERROR,
6549 message: "error 2".to_string(),
6550 group_id: 0,
6551 is_primary: true,
6552 source_kind: DiagnosticSourceKind::Pushed,
6553 ..Diagnostic::default()
6554 }
6555 }
6556 ]
6557 );
6558
6559 assert_eq!(
6560 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6561 &[
6562 DiagnosticEntry {
6563 range: Point::new(1, 8)..Point::new(1, 9),
6564 diagnostic: Diagnostic {
6565 severity: DiagnosticSeverity::WARNING,
6566 message: "error 1".to_string(),
6567 group_id: 1,
6568 is_primary: true,
6569 source_kind: DiagnosticSourceKind::Pushed,
6570 ..Diagnostic::default()
6571 }
6572 },
6573 DiagnosticEntry {
6574 range: Point::new(1, 8)..Point::new(1, 9),
6575 diagnostic: Diagnostic {
6576 severity: DiagnosticSeverity::HINT,
6577 message: "error 1 hint 1".to_string(),
6578 group_id: 1,
6579 is_primary: false,
6580 source_kind: DiagnosticSourceKind::Pushed,
6581 ..Diagnostic::default()
6582 }
6583 },
6584 ]
6585 );
6586}
6587
6588#[gpui::test]
6589async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6590 init_test(cx);
6591
6592 let fs = FakeFs::new(cx.executor());
6593 fs.insert_tree(
6594 path!("/dir"),
6595 json!({
6596 "one.rs": "const ONE: usize = 1;",
6597 "two": {
6598 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6599 }
6600
6601 }),
6602 )
6603 .await;
6604 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6605
6606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6607 language_registry.add(rust_lang());
6608 let watched_paths = lsp::FileOperationRegistrationOptions {
6609 filters: vec![
6610 FileOperationFilter {
6611 scheme: Some("file".to_owned()),
6612 pattern: lsp::FileOperationPattern {
6613 glob: "**/*.rs".to_owned(),
6614 matches: Some(lsp::FileOperationPatternKind::File),
6615 options: None,
6616 },
6617 },
6618 FileOperationFilter {
6619 scheme: Some("file".to_owned()),
6620 pattern: lsp::FileOperationPattern {
6621 glob: "**/**".to_owned(),
6622 matches: Some(lsp::FileOperationPatternKind::Folder),
6623 options: None,
6624 },
6625 },
6626 ],
6627 };
6628 let mut fake_servers = language_registry.register_fake_lsp(
6629 "Rust",
6630 FakeLspAdapter {
6631 capabilities: lsp::ServerCapabilities {
6632 workspace: Some(lsp::WorkspaceServerCapabilities {
6633 workspace_folders: None,
6634 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6635 did_rename: Some(watched_paths.clone()),
6636 will_rename: Some(watched_paths),
6637 ..Default::default()
6638 }),
6639 }),
6640 ..Default::default()
6641 },
6642 ..Default::default()
6643 },
6644 );
6645
6646 let _ = project
6647 .update(cx, |project, cx| {
6648 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6649 })
6650 .await
6651 .unwrap();
6652
6653 let fake_server = fake_servers.next().await.unwrap();
6654 cx.executor().run_until_parked();
6655 let response = project.update(cx, |project, cx| {
6656 let worktree = project.worktrees(cx).next().unwrap();
6657 let entry = worktree
6658 .read(cx)
6659 .entry_for_path(rel_path("one.rs"))
6660 .unwrap();
6661 project.rename_entry(
6662 entry.id,
6663 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6664 cx,
6665 )
6666 });
6667 let expected_edit = lsp::WorkspaceEdit {
6668 changes: None,
6669 document_changes: Some(DocumentChanges::Edits({
6670 vec![TextDocumentEdit {
6671 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6672 range: lsp::Range {
6673 start: lsp::Position {
6674 line: 0,
6675 character: 1,
6676 },
6677 end: lsp::Position {
6678 line: 0,
6679 character: 3,
6680 },
6681 },
6682 new_text: "This is not a drill".to_owned(),
6683 })],
6684 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6685 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6686 version: Some(1337),
6687 },
6688 }]
6689 })),
6690 change_annotations: None,
6691 };
6692 let resolved_workspace_edit = Arc::new(OnceLock::new());
6693 fake_server
6694 .set_request_handler::<WillRenameFiles, _, _>({
6695 let resolved_workspace_edit = resolved_workspace_edit.clone();
6696 let expected_edit = expected_edit.clone();
6697 move |params, _| {
6698 let resolved_workspace_edit = resolved_workspace_edit.clone();
6699 let expected_edit = expected_edit.clone();
6700 async move {
6701 assert_eq!(params.files.len(), 1);
6702 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6703 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6704 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6705 Ok(Some(expected_edit))
6706 }
6707 }
6708 })
6709 .next()
6710 .await
6711 .unwrap();
6712 let _ = response.await.unwrap();
6713 fake_server
6714 .handle_notification::<DidRenameFiles, _>(|params, _| {
6715 assert_eq!(params.files.len(), 1);
6716 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6717 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6718 })
6719 .next()
6720 .await
6721 .unwrap();
6722 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6723}
6724
6725#[gpui::test]
6726async fn test_rename(cx: &mut gpui::TestAppContext) {
6727 // hi
6728 init_test(cx);
6729
6730 let fs = FakeFs::new(cx.executor());
6731 fs.insert_tree(
6732 path!("/dir"),
6733 json!({
6734 "one.rs": "const ONE: usize = 1;",
6735 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6736 }),
6737 )
6738 .await;
6739
6740 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6741
6742 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6743 language_registry.add(rust_lang());
6744 let mut fake_servers = language_registry.register_fake_lsp(
6745 "Rust",
6746 FakeLspAdapter {
6747 capabilities: lsp::ServerCapabilities {
6748 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6749 prepare_provider: Some(true),
6750 work_done_progress_options: Default::default(),
6751 })),
6752 ..Default::default()
6753 },
6754 ..Default::default()
6755 },
6756 );
6757
6758 let (buffer, _handle) = project
6759 .update(cx, |project, cx| {
6760 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6761 })
6762 .await
6763 .unwrap();
6764
6765 let fake_server = fake_servers.next().await.unwrap();
6766 cx.executor().run_until_parked();
6767
6768 let response = project.update(cx, |project, cx| {
6769 project.prepare_rename(buffer.clone(), 7, cx)
6770 });
6771 fake_server
6772 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6773 assert_eq!(
6774 params.text_document.uri.as_str(),
6775 uri!("file:///dir/one.rs")
6776 );
6777 assert_eq!(params.position, lsp::Position::new(0, 7));
6778 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6779 lsp::Position::new(0, 6),
6780 lsp::Position::new(0, 9),
6781 ))))
6782 })
6783 .next()
6784 .await
6785 .unwrap();
6786 let response = response.await.unwrap();
6787 let PrepareRenameResponse::Success(range) = response else {
6788 panic!("{:?}", response);
6789 };
6790 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6791 assert_eq!(range, 6..9);
6792
6793 let response = project.update(cx, |project, cx| {
6794 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6795 });
6796 fake_server
6797 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6798 assert_eq!(
6799 params.text_document_position.text_document.uri.as_str(),
6800 uri!("file:///dir/one.rs")
6801 );
6802 assert_eq!(
6803 params.text_document_position.position,
6804 lsp::Position::new(0, 7)
6805 );
6806 assert_eq!(params.new_name, "THREE");
6807 Ok(Some(lsp::WorkspaceEdit {
6808 changes: Some(
6809 [
6810 (
6811 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6812 vec![lsp::TextEdit::new(
6813 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6814 "THREE".to_string(),
6815 )],
6816 ),
6817 (
6818 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6819 vec![
6820 lsp::TextEdit::new(
6821 lsp::Range::new(
6822 lsp::Position::new(0, 24),
6823 lsp::Position::new(0, 27),
6824 ),
6825 "THREE".to_string(),
6826 ),
6827 lsp::TextEdit::new(
6828 lsp::Range::new(
6829 lsp::Position::new(0, 35),
6830 lsp::Position::new(0, 38),
6831 ),
6832 "THREE".to_string(),
6833 ),
6834 ],
6835 ),
6836 ]
6837 .into_iter()
6838 .collect(),
6839 ),
6840 ..Default::default()
6841 }))
6842 })
6843 .next()
6844 .await
6845 .unwrap();
6846 let mut transaction = response.await.unwrap().0;
6847 assert_eq!(transaction.len(), 2);
6848 assert_eq!(
6849 transaction
6850 .remove_entry(&buffer)
6851 .unwrap()
6852 .0
6853 .update(cx, |buffer, _| buffer.text()),
6854 "const THREE: usize = 1;"
6855 );
6856 assert_eq!(
6857 transaction
6858 .into_keys()
6859 .next()
6860 .unwrap()
6861 .update(cx, |buffer, _| buffer.text()),
6862 "const TWO: usize = one::THREE + one::THREE;"
6863 );
6864}
6865
6866#[gpui::test]
6867async fn test_search(cx: &mut gpui::TestAppContext) {
6868 init_test(cx);
6869
6870 let fs = FakeFs::new(cx.executor());
6871 fs.insert_tree(
6872 path!("/dir"),
6873 json!({
6874 "one.rs": "const ONE: usize = 1;",
6875 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6876 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6877 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6878 }),
6879 )
6880 .await;
6881 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6882 assert_eq!(
6883 search(
6884 &project,
6885 SearchQuery::text(
6886 "TWO",
6887 false,
6888 true,
6889 false,
6890 Default::default(),
6891 Default::default(),
6892 false,
6893 None
6894 )
6895 .unwrap(),
6896 cx
6897 )
6898 .await
6899 .unwrap(),
6900 HashMap::from_iter([
6901 (path!("dir/two.rs").to_string(), vec![6..9]),
6902 (path!("dir/three.rs").to_string(), vec![37..40])
6903 ])
6904 );
6905
6906 let buffer_4 = project
6907 .update(cx, |project, cx| {
6908 project.open_local_buffer(path!("/dir/four.rs"), cx)
6909 })
6910 .await
6911 .unwrap();
6912 buffer_4.update(cx, |buffer, cx| {
6913 let text = "two::TWO";
6914 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6915 });
6916
6917 assert_eq!(
6918 search(
6919 &project,
6920 SearchQuery::text(
6921 "TWO",
6922 false,
6923 true,
6924 false,
6925 Default::default(),
6926 Default::default(),
6927 false,
6928 None,
6929 )
6930 .unwrap(),
6931 cx
6932 )
6933 .await
6934 .unwrap(),
6935 HashMap::from_iter([
6936 (path!("dir/two.rs").to_string(), vec![6..9]),
6937 (path!("dir/three.rs").to_string(), vec![37..40]),
6938 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6939 ])
6940 );
6941}
6942
6943#[gpui::test]
6944async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6945 init_test(cx);
6946
6947 let search_query = "file";
6948
6949 let fs = FakeFs::new(cx.executor());
6950 fs.insert_tree(
6951 path!("/dir"),
6952 json!({
6953 "one.rs": r#"// Rust file one"#,
6954 "one.ts": r#"// TypeScript file one"#,
6955 "two.rs": r#"// Rust file two"#,
6956 "two.ts": r#"// TypeScript file two"#,
6957 }),
6958 )
6959 .await;
6960 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6961
6962 assert!(
6963 search(
6964 &project,
6965 SearchQuery::text(
6966 search_query,
6967 false,
6968 true,
6969 false,
6970 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6971 Default::default(),
6972 false,
6973 None
6974 )
6975 .unwrap(),
6976 cx
6977 )
6978 .await
6979 .unwrap()
6980 .is_empty(),
6981 "If no inclusions match, no files should be returned"
6982 );
6983
6984 assert_eq!(
6985 search(
6986 &project,
6987 SearchQuery::text(
6988 search_query,
6989 false,
6990 true,
6991 false,
6992 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6993 Default::default(),
6994 false,
6995 None
6996 )
6997 .unwrap(),
6998 cx
6999 )
7000 .await
7001 .unwrap(),
7002 HashMap::from_iter([
7003 (path!("dir/one.rs").to_string(), vec![8..12]),
7004 (path!("dir/two.rs").to_string(), vec![8..12]),
7005 ]),
7006 "Rust only search should give only Rust files"
7007 );
7008
7009 assert_eq!(
7010 search(
7011 &project,
7012 SearchQuery::text(
7013 search_query,
7014 false,
7015 true,
7016 false,
7017 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7018 .unwrap(),
7019 Default::default(),
7020 false,
7021 None,
7022 )
7023 .unwrap(),
7024 cx
7025 )
7026 .await
7027 .unwrap(),
7028 HashMap::from_iter([
7029 (path!("dir/one.ts").to_string(), vec![14..18]),
7030 (path!("dir/two.ts").to_string(), vec![14..18]),
7031 ]),
7032 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
7033 );
7034
7035 assert_eq!(
7036 search(
7037 &project,
7038 SearchQuery::text(
7039 search_query,
7040 false,
7041 true,
7042 false,
7043 PathMatcher::new(
7044 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7045 PathStyle::local()
7046 )
7047 .unwrap(),
7048 Default::default(),
7049 false,
7050 None,
7051 )
7052 .unwrap(),
7053 cx
7054 )
7055 .await
7056 .unwrap(),
7057 HashMap::from_iter([
7058 (path!("dir/two.ts").to_string(), vec![14..18]),
7059 (path!("dir/one.rs").to_string(), vec![8..12]),
7060 (path!("dir/one.ts").to_string(), vec![14..18]),
7061 (path!("dir/two.rs").to_string(), vec![8..12]),
7062 ]),
7063 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7064 );
7065}
7066
7067#[gpui::test]
7068async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7069 init_test(cx);
7070
7071 let search_query = "file";
7072
7073 let fs = FakeFs::new(cx.executor());
7074 fs.insert_tree(
7075 path!("/dir"),
7076 json!({
7077 "one.rs": r#"// Rust file one"#,
7078 "one.ts": r#"// TypeScript file one"#,
7079 "two.rs": r#"// Rust file two"#,
7080 "two.ts": r#"// TypeScript file two"#,
7081 }),
7082 )
7083 .await;
7084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7085
7086 assert_eq!(
7087 search(
7088 &project,
7089 SearchQuery::text(
7090 search_query,
7091 false,
7092 true,
7093 false,
7094 Default::default(),
7095 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7096 false,
7097 None,
7098 )
7099 .unwrap(),
7100 cx
7101 )
7102 .await
7103 .unwrap(),
7104 HashMap::from_iter([
7105 (path!("dir/one.rs").to_string(), vec![8..12]),
7106 (path!("dir/one.ts").to_string(), vec![14..18]),
7107 (path!("dir/two.rs").to_string(), vec![8..12]),
7108 (path!("dir/two.ts").to_string(), vec![14..18]),
7109 ]),
7110 "If no exclusions match, all files should be returned"
7111 );
7112
7113 assert_eq!(
7114 search(
7115 &project,
7116 SearchQuery::text(
7117 search_query,
7118 false,
7119 true,
7120 false,
7121 Default::default(),
7122 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7123 false,
7124 None,
7125 )
7126 .unwrap(),
7127 cx
7128 )
7129 .await
7130 .unwrap(),
7131 HashMap::from_iter([
7132 (path!("dir/one.ts").to_string(), vec![14..18]),
7133 (path!("dir/two.ts").to_string(), vec![14..18]),
7134 ]),
7135 "Rust exclusion search should give only TypeScript files"
7136 );
7137
7138 assert_eq!(
7139 search(
7140 &project,
7141 SearchQuery::text(
7142 search_query,
7143 false,
7144 true,
7145 false,
7146 Default::default(),
7147 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7148 .unwrap(),
7149 false,
7150 None,
7151 )
7152 .unwrap(),
7153 cx
7154 )
7155 .await
7156 .unwrap(),
7157 HashMap::from_iter([
7158 (path!("dir/one.rs").to_string(), vec![8..12]),
7159 (path!("dir/two.rs").to_string(), vec![8..12]),
7160 ]),
7161 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7162 );
7163
7164 assert!(
7165 search(
7166 &project,
7167 SearchQuery::text(
7168 search_query,
7169 false,
7170 true,
7171 false,
7172 Default::default(),
7173 PathMatcher::new(
7174 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7175 PathStyle::local(),
7176 )
7177 .unwrap(),
7178 false,
7179 None,
7180 )
7181 .unwrap(),
7182 cx
7183 )
7184 .await
7185 .unwrap()
7186 .is_empty(),
7187 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7188 );
7189}
7190
7191#[gpui::test]
7192async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7193 init_test(cx);
7194
7195 let search_query = "file";
7196
7197 let fs = FakeFs::new(cx.executor());
7198 fs.insert_tree(
7199 path!("/dir"),
7200 json!({
7201 "one.rs": r#"// Rust file one"#,
7202 "one.ts": r#"// TypeScript file one"#,
7203 "two.rs": r#"// Rust file two"#,
7204 "two.ts": r#"// TypeScript file two"#,
7205 }),
7206 )
7207 .await;
7208
7209 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7210 let path_style = PathStyle::local();
7211 let _buffer = project.update(cx, |project, cx| {
7212 project.create_local_buffer("file", None, false, cx)
7213 });
7214
7215 assert_eq!(
7216 search(
7217 &project,
7218 SearchQuery::text(
7219 search_query,
7220 false,
7221 true,
7222 false,
7223 Default::default(),
7224 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7225 false,
7226 None,
7227 )
7228 .unwrap(),
7229 cx
7230 )
7231 .await
7232 .unwrap(),
7233 HashMap::from_iter([
7234 (path!("dir/one.rs").to_string(), vec![8..12]),
7235 (path!("dir/one.ts").to_string(), vec![14..18]),
7236 (path!("dir/two.rs").to_string(), vec![8..12]),
7237 (path!("dir/two.ts").to_string(), vec![14..18]),
7238 ]),
7239 "If no exclusions match, all files should be returned"
7240 );
7241
7242 assert_eq!(
7243 search(
7244 &project,
7245 SearchQuery::text(
7246 search_query,
7247 false,
7248 true,
7249 false,
7250 Default::default(),
7251 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7252 false,
7253 None,
7254 )
7255 .unwrap(),
7256 cx
7257 )
7258 .await
7259 .unwrap(),
7260 HashMap::from_iter([
7261 (path!("dir/one.ts").to_string(), vec![14..18]),
7262 (path!("dir/two.ts").to_string(), vec![14..18]),
7263 ]),
7264 "Rust exclusion search should give only TypeScript files"
7265 );
7266
7267 assert_eq!(
7268 search(
7269 &project,
7270 SearchQuery::text(
7271 search_query,
7272 false,
7273 true,
7274 false,
7275 Default::default(),
7276 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7277 false,
7278 None,
7279 )
7280 .unwrap(),
7281 cx
7282 )
7283 .await
7284 .unwrap(),
7285 HashMap::from_iter([
7286 (path!("dir/one.rs").to_string(), vec![8..12]),
7287 (path!("dir/two.rs").to_string(), vec![8..12]),
7288 ]),
7289 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7290 );
7291
7292 assert!(
7293 search(
7294 &project,
7295 SearchQuery::text(
7296 search_query,
7297 false,
7298 true,
7299 false,
7300 Default::default(),
7301 PathMatcher::new(
7302 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7303 PathStyle::local(),
7304 )
7305 .unwrap(),
7306 false,
7307 None,
7308 )
7309 .unwrap(),
7310 cx
7311 )
7312 .await
7313 .unwrap()
7314 .is_empty(),
7315 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7316 );
7317}
7318
7319#[gpui::test]
7320async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7321 init_test(cx);
7322
7323 let search_query = "file";
7324
7325 let fs = FakeFs::new(cx.executor());
7326 fs.insert_tree(
7327 path!("/dir"),
7328 json!({
7329 "one.rs": r#"// Rust file one"#,
7330 "one.ts": r#"// TypeScript file one"#,
7331 "two.rs": r#"// Rust file two"#,
7332 "two.ts": r#"// TypeScript file two"#,
7333 }),
7334 )
7335 .await;
7336 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7337 assert!(
7338 search(
7339 &project,
7340 SearchQuery::text(
7341 search_query,
7342 false,
7343 true,
7344 false,
7345 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7346 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7347 false,
7348 None,
7349 )
7350 .unwrap(),
7351 cx
7352 )
7353 .await
7354 .unwrap()
7355 .is_empty(),
7356 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7357 );
7358
7359 assert!(
7360 search(
7361 &project,
7362 SearchQuery::text(
7363 search_query,
7364 false,
7365 true,
7366 false,
7367 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7368 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7369 false,
7370 None,
7371 )
7372 .unwrap(),
7373 cx
7374 )
7375 .await
7376 .unwrap()
7377 .is_empty(),
7378 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7379 );
7380
7381 assert!(
7382 search(
7383 &project,
7384 SearchQuery::text(
7385 search_query,
7386 false,
7387 true,
7388 false,
7389 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7390 .unwrap(),
7391 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7392 .unwrap(),
7393 false,
7394 None,
7395 )
7396 .unwrap(),
7397 cx
7398 )
7399 .await
7400 .unwrap()
7401 .is_empty(),
7402 "Non-matching inclusions and exclusions should not change that."
7403 );
7404
7405 assert_eq!(
7406 search(
7407 &project,
7408 SearchQuery::text(
7409 search_query,
7410 false,
7411 true,
7412 false,
7413 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7414 .unwrap(),
7415 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7416 .unwrap(),
7417 false,
7418 None,
7419 )
7420 .unwrap(),
7421 cx
7422 )
7423 .await
7424 .unwrap(),
7425 HashMap::from_iter([
7426 (path!("dir/one.ts").to_string(), vec![14..18]),
7427 (path!("dir/two.ts").to_string(), vec![14..18]),
7428 ]),
7429 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7430 );
7431}
7432
7433#[gpui::test]
7434async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7435 init_test(cx);
7436
7437 let fs = FakeFs::new(cx.executor());
7438 fs.insert_tree(
7439 path!("/worktree-a"),
7440 json!({
7441 "haystack.rs": r#"// NEEDLE"#,
7442 "haystack.ts": r#"// NEEDLE"#,
7443 }),
7444 )
7445 .await;
7446 fs.insert_tree(
7447 path!("/worktree-b"),
7448 json!({
7449 "haystack.rs": r#"// NEEDLE"#,
7450 "haystack.ts": r#"// NEEDLE"#,
7451 }),
7452 )
7453 .await;
7454
7455 let path_style = PathStyle::local();
7456 let project = Project::test(
7457 fs.clone(),
7458 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7459 cx,
7460 )
7461 .await;
7462
7463 assert_eq!(
7464 search(
7465 &project,
7466 SearchQuery::text(
7467 "NEEDLE",
7468 false,
7469 true,
7470 false,
7471 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7472 Default::default(),
7473 true,
7474 None,
7475 )
7476 .unwrap(),
7477 cx
7478 )
7479 .await
7480 .unwrap(),
7481 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7482 "should only return results from included worktree"
7483 );
7484 assert_eq!(
7485 search(
7486 &project,
7487 SearchQuery::text(
7488 "NEEDLE",
7489 false,
7490 true,
7491 false,
7492 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7493 Default::default(),
7494 true,
7495 None,
7496 )
7497 .unwrap(),
7498 cx
7499 )
7500 .await
7501 .unwrap(),
7502 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7503 "should only return results from included worktree"
7504 );
7505
7506 assert_eq!(
7507 search(
7508 &project,
7509 SearchQuery::text(
7510 "NEEDLE",
7511 false,
7512 true,
7513 false,
7514 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7515 Default::default(),
7516 false,
7517 None,
7518 )
7519 .unwrap(),
7520 cx
7521 )
7522 .await
7523 .unwrap(),
7524 HashMap::from_iter([
7525 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7526 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7527 ]),
7528 "should return results from both worktrees"
7529 );
7530}
7531
7532#[gpui::test]
7533async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7534 init_test(cx);
7535
7536 let fs = FakeFs::new(cx.background_executor.clone());
7537 fs.insert_tree(
7538 path!("/dir"),
7539 json!({
7540 ".git": {},
7541 ".gitignore": "**/target\n/node_modules\n",
7542 "target": {
7543 "index.txt": "index_key:index_value"
7544 },
7545 "node_modules": {
7546 "eslint": {
7547 "index.ts": "const eslint_key = 'eslint value'",
7548 "package.json": r#"{ "some_key": "some value" }"#,
7549 },
7550 "prettier": {
7551 "index.ts": "const prettier_key = 'prettier value'",
7552 "package.json": r#"{ "other_key": "other value" }"#,
7553 },
7554 },
7555 "package.json": r#"{ "main_key": "main value" }"#,
7556 }),
7557 )
7558 .await;
7559 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7560
7561 let query = "key";
7562 assert_eq!(
7563 search(
7564 &project,
7565 SearchQuery::text(
7566 query,
7567 false,
7568 false,
7569 false,
7570 Default::default(),
7571 Default::default(),
7572 false,
7573 None,
7574 )
7575 .unwrap(),
7576 cx
7577 )
7578 .await
7579 .unwrap(),
7580 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7581 "Only one non-ignored file should have the query"
7582 );
7583
7584 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7585 let path_style = PathStyle::local();
7586 assert_eq!(
7587 search(
7588 &project,
7589 SearchQuery::text(
7590 query,
7591 false,
7592 false,
7593 true,
7594 Default::default(),
7595 Default::default(),
7596 false,
7597 None,
7598 )
7599 .unwrap(),
7600 cx
7601 )
7602 .await
7603 .unwrap(),
7604 HashMap::from_iter([
7605 (path!("dir/package.json").to_string(), vec![8..11]),
7606 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7607 (
7608 path!("dir/node_modules/prettier/package.json").to_string(),
7609 vec![9..12]
7610 ),
7611 (
7612 path!("dir/node_modules/prettier/index.ts").to_string(),
7613 vec![15..18]
7614 ),
7615 (
7616 path!("dir/node_modules/eslint/index.ts").to_string(),
7617 vec![13..16]
7618 ),
7619 (
7620 path!("dir/node_modules/eslint/package.json").to_string(),
7621 vec![8..11]
7622 ),
7623 ]),
7624 "Unrestricted search with ignored directories should find every file with the query"
7625 );
7626
7627 let files_to_include =
7628 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7629 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7630 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7631 assert_eq!(
7632 search(
7633 &project,
7634 SearchQuery::text(
7635 query,
7636 false,
7637 false,
7638 true,
7639 files_to_include,
7640 files_to_exclude,
7641 false,
7642 None,
7643 )
7644 .unwrap(),
7645 cx
7646 )
7647 .await
7648 .unwrap(),
7649 HashMap::from_iter([(
7650 path!("dir/node_modules/prettier/package.json").to_string(),
7651 vec![9..12]
7652 )]),
7653 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7654 );
7655}
7656
7657#[gpui::test]
7658async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7659 init_test(cx);
7660
7661 let fs = FakeFs::new(cx.executor());
7662 fs.insert_tree(
7663 path!("/dir"),
7664 json!({
7665 "one.rs": "// ПРИВЕТ? привет!",
7666 "two.rs": "// ПРИВЕТ.",
7667 "three.rs": "// привет",
7668 }),
7669 )
7670 .await;
7671 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7672 let unicode_case_sensitive_query = SearchQuery::text(
7673 "привет",
7674 false,
7675 true,
7676 false,
7677 Default::default(),
7678 Default::default(),
7679 false,
7680 None,
7681 );
7682 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7683 assert_eq!(
7684 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7685 .await
7686 .unwrap(),
7687 HashMap::from_iter([
7688 (path!("dir/one.rs").to_string(), vec![17..29]),
7689 (path!("dir/three.rs").to_string(), vec![3..15]),
7690 ])
7691 );
7692
7693 let unicode_case_insensitive_query = SearchQuery::text(
7694 "привет",
7695 false,
7696 false,
7697 false,
7698 Default::default(),
7699 Default::default(),
7700 false,
7701 None,
7702 );
7703 assert_matches!(
7704 unicode_case_insensitive_query,
7705 Ok(SearchQuery::Regex { .. })
7706 );
7707 assert_eq!(
7708 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7709 .await
7710 .unwrap(),
7711 HashMap::from_iter([
7712 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7713 (path!("dir/two.rs").to_string(), vec![3..15]),
7714 (path!("dir/three.rs").to_string(), vec![3..15]),
7715 ])
7716 );
7717
7718 assert_eq!(
7719 search(
7720 &project,
7721 SearchQuery::text(
7722 "привет.",
7723 false,
7724 false,
7725 false,
7726 Default::default(),
7727 Default::default(),
7728 false,
7729 None,
7730 )
7731 .unwrap(),
7732 cx
7733 )
7734 .await
7735 .unwrap(),
7736 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7737 );
7738}
7739
7740#[gpui::test]
7741async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7742 init_test(cx);
7743
7744 let fs = FakeFs::new(cx.executor());
7745 fs.insert_tree(
7746 "/one/two",
7747 json!({
7748 "three": {
7749 "a.txt": "",
7750 "four": {}
7751 },
7752 "c.rs": ""
7753 }),
7754 )
7755 .await;
7756
7757 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7758 project
7759 .update(cx, |project, cx| {
7760 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7761 project.create_entry((id, rel_path("b..")), true, cx)
7762 })
7763 .await
7764 .unwrap()
7765 .into_included()
7766 .unwrap();
7767
7768 assert_eq!(
7769 fs.paths(true),
7770 vec![
7771 PathBuf::from(path!("/")),
7772 PathBuf::from(path!("/one")),
7773 PathBuf::from(path!("/one/two")),
7774 PathBuf::from(path!("/one/two/c.rs")),
7775 PathBuf::from(path!("/one/two/three")),
7776 PathBuf::from(path!("/one/two/three/a.txt")),
7777 PathBuf::from(path!("/one/two/three/b..")),
7778 PathBuf::from(path!("/one/two/three/four")),
7779 ]
7780 );
7781}
7782
7783#[gpui::test]
7784async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7785 init_test(cx);
7786
7787 let fs = FakeFs::new(cx.executor());
7788 fs.insert_tree(
7789 path!("/dir"),
7790 json!({
7791 "a.tsx": "a",
7792 }),
7793 )
7794 .await;
7795
7796 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7797
7798 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7799 language_registry.add(tsx_lang());
7800 let language_server_names = [
7801 "TypeScriptServer",
7802 "TailwindServer",
7803 "ESLintServer",
7804 "NoHoverCapabilitiesServer",
7805 ];
7806 let mut language_servers = [
7807 language_registry.register_fake_lsp(
7808 "tsx",
7809 FakeLspAdapter {
7810 name: language_server_names[0],
7811 capabilities: lsp::ServerCapabilities {
7812 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7813 ..lsp::ServerCapabilities::default()
7814 },
7815 ..FakeLspAdapter::default()
7816 },
7817 ),
7818 language_registry.register_fake_lsp(
7819 "tsx",
7820 FakeLspAdapter {
7821 name: language_server_names[1],
7822 capabilities: lsp::ServerCapabilities {
7823 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7824 ..lsp::ServerCapabilities::default()
7825 },
7826 ..FakeLspAdapter::default()
7827 },
7828 ),
7829 language_registry.register_fake_lsp(
7830 "tsx",
7831 FakeLspAdapter {
7832 name: language_server_names[2],
7833 capabilities: lsp::ServerCapabilities {
7834 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7835 ..lsp::ServerCapabilities::default()
7836 },
7837 ..FakeLspAdapter::default()
7838 },
7839 ),
7840 language_registry.register_fake_lsp(
7841 "tsx",
7842 FakeLspAdapter {
7843 name: language_server_names[3],
7844 capabilities: lsp::ServerCapabilities {
7845 hover_provider: None,
7846 ..lsp::ServerCapabilities::default()
7847 },
7848 ..FakeLspAdapter::default()
7849 },
7850 ),
7851 ];
7852
7853 let (buffer, _handle) = project
7854 .update(cx, |p, cx| {
7855 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7856 })
7857 .await
7858 .unwrap();
7859 cx.executor().run_until_parked();
7860
7861 let mut servers_with_hover_requests = HashMap::default();
7862 for i in 0..language_server_names.len() {
7863 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7864 panic!(
7865 "Failed to get language server #{i} with name {}",
7866 &language_server_names[i]
7867 )
7868 });
7869 let new_server_name = new_server.server.name();
7870 assert!(
7871 !servers_with_hover_requests.contains_key(&new_server_name),
7872 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7873 );
7874 match new_server_name.as_ref() {
7875 "TailwindServer" | "TypeScriptServer" => {
7876 servers_with_hover_requests.insert(
7877 new_server_name.clone(),
7878 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7879 move |_, _| {
7880 let name = new_server_name.clone();
7881 async move {
7882 Ok(Some(lsp::Hover {
7883 contents: lsp::HoverContents::Scalar(
7884 lsp::MarkedString::String(format!("{name} hover")),
7885 ),
7886 range: None,
7887 }))
7888 }
7889 },
7890 ),
7891 );
7892 }
7893 "ESLintServer" => {
7894 servers_with_hover_requests.insert(
7895 new_server_name,
7896 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7897 |_, _| async move { Ok(None) },
7898 ),
7899 );
7900 }
7901 "NoHoverCapabilitiesServer" => {
7902 let _never_handled = new_server
7903 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7904 panic!(
7905 "Should not call for hovers server with no corresponding capabilities"
7906 )
7907 });
7908 }
7909 unexpected => panic!("Unexpected server name: {unexpected}"),
7910 }
7911 }
7912
7913 let hover_task = project.update(cx, |project, cx| {
7914 project.hover(&buffer, Point::new(0, 0), cx)
7915 });
7916 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7917 |mut hover_request| async move {
7918 hover_request
7919 .next()
7920 .await
7921 .expect("All hover requests should have been triggered")
7922 },
7923 ))
7924 .await;
7925 assert_eq!(
7926 vec!["TailwindServer hover", "TypeScriptServer hover"],
7927 hover_task
7928 .await
7929 .into_iter()
7930 .flatten()
7931 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7932 .sorted()
7933 .collect::<Vec<_>>(),
7934 "Should receive hover responses from all related servers with hover capabilities"
7935 );
7936}
7937
7938#[gpui::test]
7939async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7940 init_test(cx);
7941
7942 let fs = FakeFs::new(cx.executor());
7943 fs.insert_tree(
7944 path!("/dir"),
7945 json!({
7946 "a.ts": "a",
7947 }),
7948 )
7949 .await;
7950
7951 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7952
7953 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7954 language_registry.add(typescript_lang());
7955 let mut fake_language_servers = language_registry.register_fake_lsp(
7956 "TypeScript",
7957 FakeLspAdapter {
7958 capabilities: lsp::ServerCapabilities {
7959 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7960 ..lsp::ServerCapabilities::default()
7961 },
7962 ..FakeLspAdapter::default()
7963 },
7964 );
7965
7966 let (buffer, _handle) = project
7967 .update(cx, |p, cx| {
7968 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7969 })
7970 .await
7971 .unwrap();
7972 cx.executor().run_until_parked();
7973
7974 let fake_server = fake_language_servers
7975 .next()
7976 .await
7977 .expect("failed to get the language server");
7978
7979 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7980 move |_, _| async move {
7981 Ok(Some(lsp::Hover {
7982 contents: lsp::HoverContents::Array(vec![
7983 lsp::MarkedString::String("".to_string()),
7984 lsp::MarkedString::String(" ".to_string()),
7985 lsp::MarkedString::String("\n\n\n".to_string()),
7986 ]),
7987 range: None,
7988 }))
7989 },
7990 );
7991
7992 let hover_task = project.update(cx, |project, cx| {
7993 project.hover(&buffer, Point::new(0, 0), cx)
7994 });
7995 let () = request_handled
7996 .next()
7997 .await
7998 .expect("All hover requests should have been triggered");
7999 assert_eq!(
8000 Vec::<String>::new(),
8001 hover_task
8002 .await
8003 .into_iter()
8004 .flatten()
8005 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8006 .sorted()
8007 .collect::<Vec<_>>(),
8008 "Empty hover parts should be ignored"
8009 );
8010}
8011
8012#[gpui::test]
8013async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
8014 init_test(cx);
8015
8016 let fs = FakeFs::new(cx.executor());
8017 fs.insert_tree(
8018 path!("/dir"),
8019 json!({
8020 "a.ts": "a",
8021 }),
8022 )
8023 .await;
8024
8025 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8026
8027 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8028 language_registry.add(typescript_lang());
8029 let mut fake_language_servers = language_registry.register_fake_lsp(
8030 "TypeScript",
8031 FakeLspAdapter {
8032 capabilities: lsp::ServerCapabilities {
8033 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8034 ..lsp::ServerCapabilities::default()
8035 },
8036 ..FakeLspAdapter::default()
8037 },
8038 );
8039
8040 let (buffer, _handle) = project
8041 .update(cx, |p, cx| {
8042 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8043 })
8044 .await
8045 .unwrap();
8046 cx.executor().run_until_parked();
8047
8048 let fake_server = fake_language_servers
8049 .next()
8050 .await
8051 .expect("failed to get the language server");
8052
8053 let mut request_handled = fake_server
8054 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
8055 Ok(Some(vec![
8056 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8057 title: "organize imports".to_string(),
8058 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
8059 ..lsp::CodeAction::default()
8060 }),
8061 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8062 title: "fix code".to_string(),
8063 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8064 ..lsp::CodeAction::default()
8065 }),
8066 ]))
8067 });
8068
8069 let code_actions_task = project.update(cx, |project, cx| {
8070 project.code_actions(
8071 &buffer,
8072 0..buffer.read(cx).len(),
8073 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8074 cx,
8075 )
8076 });
8077
8078 let () = request_handled
8079 .next()
8080 .await
8081 .expect("The code action request should have been triggered");
8082
8083 let code_actions = code_actions_task.await.unwrap().unwrap();
8084 assert_eq!(code_actions.len(), 1);
8085 assert_eq!(
8086 code_actions[0].lsp_action.action_kind(),
8087 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8088 );
8089}
8090
8091#[gpui::test]
8092async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8093 cx: &mut gpui::TestAppContext,
8094) {
8095 init_test(cx);
8096
8097 let fs = FakeFs::new(cx.executor());
8098 fs.insert_tree(
8099 path!("/dir"),
8100 json!({
8101 "a.ts": "a",
8102 }),
8103 )
8104 .await;
8105
8106 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8107
8108 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8109 language_registry.add(typescript_lang());
8110 let mut fake_language_servers = language_registry.register_fake_lsp(
8111 "TypeScript",
8112 FakeLspAdapter {
8113 capabilities: lsp::ServerCapabilities {
8114 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8115 lsp::CodeActionOptions {
8116 code_action_kinds: Some(vec![
8117 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8118 "source.doc".into(),
8119 ]),
8120 ..lsp::CodeActionOptions::default()
8121 },
8122 )),
8123 ..lsp::ServerCapabilities::default()
8124 },
8125 ..FakeLspAdapter::default()
8126 },
8127 );
8128
8129 let (buffer, _handle) = project
8130 .update(cx, |p, cx| {
8131 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8132 })
8133 .await
8134 .unwrap();
8135 cx.executor().run_until_parked();
8136
8137 let fake_server = fake_language_servers
8138 .next()
8139 .await
8140 .expect("failed to get the language server");
8141
8142 let mut request_handled = fake_server.set_request_handler::<
8143 lsp::request::CodeActionRequest,
8144 _,
8145 _,
8146 >(move |params, _| async move {
8147 assert_eq!(
8148 params.context.only, None,
8149 "Code action requests without explicit kind filters should not send `context.only`"
8150 );
8151 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8152 lsp::CodeAction {
8153 title: "Add test".to_string(),
8154 kind: Some("source.addTest".into()),
8155 ..lsp::CodeAction::default()
8156 },
8157 )]))
8158 });
8159
8160 let code_actions_task = project.update(cx, |project, cx| {
8161 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8162 });
8163
8164 let () = request_handled
8165 .next()
8166 .await
8167 .expect("The code action request should have been triggered");
8168
8169 let code_actions = code_actions_task.await.unwrap().unwrap();
8170 assert_eq!(code_actions.len(), 1);
8171 assert_eq!(
8172 code_actions[0].lsp_action.action_kind(),
8173 Some("source.addTest".into())
8174 );
8175}
8176
8177#[gpui::test]
8178async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8179 init_test(cx);
8180
8181 let fs = FakeFs::new(cx.executor());
8182 fs.insert_tree(
8183 path!("/dir"),
8184 json!({
8185 "a.tsx": "a",
8186 }),
8187 )
8188 .await;
8189
8190 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8191
8192 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8193 language_registry.add(tsx_lang());
8194 let language_server_names = [
8195 "TypeScriptServer",
8196 "TailwindServer",
8197 "ESLintServer",
8198 "NoActionsCapabilitiesServer",
8199 ];
8200
8201 let mut language_server_rxs = [
8202 language_registry.register_fake_lsp(
8203 "tsx",
8204 FakeLspAdapter {
8205 name: language_server_names[0],
8206 capabilities: lsp::ServerCapabilities {
8207 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8208 ..lsp::ServerCapabilities::default()
8209 },
8210 ..FakeLspAdapter::default()
8211 },
8212 ),
8213 language_registry.register_fake_lsp(
8214 "tsx",
8215 FakeLspAdapter {
8216 name: language_server_names[1],
8217 capabilities: lsp::ServerCapabilities {
8218 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8219 ..lsp::ServerCapabilities::default()
8220 },
8221 ..FakeLspAdapter::default()
8222 },
8223 ),
8224 language_registry.register_fake_lsp(
8225 "tsx",
8226 FakeLspAdapter {
8227 name: language_server_names[2],
8228 capabilities: lsp::ServerCapabilities {
8229 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8230 ..lsp::ServerCapabilities::default()
8231 },
8232 ..FakeLspAdapter::default()
8233 },
8234 ),
8235 language_registry.register_fake_lsp(
8236 "tsx",
8237 FakeLspAdapter {
8238 name: language_server_names[3],
8239 capabilities: lsp::ServerCapabilities {
8240 code_action_provider: None,
8241 ..lsp::ServerCapabilities::default()
8242 },
8243 ..FakeLspAdapter::default()
8244 },
8245 ),
8246 ];
8247
8248 let (buffer, _handle) = project
8249 .update(cx, |p, cx| {
8250 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8251 })
8252 .await
8253 .unwrap();
8254 cx.executor().run_until_parked();
8255
8256 let mut servers_with_actions_requests = HashMap::default();
8257 for i in 0..language_server_names.len() {
8258 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8259 panic!(
8260 "Failed to get language server #{i} with name {}",
8261 &language_server_names[i]
8262 )
8263 });
8264 let new_server_name = new_server.server.name();
8265
8266 assert!(
8267 !servers_with_actions_requests.contains_key(&new_server_name),
8268 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8269 );
8270 match new_server_name.0.as_ref() {
8271 "TailwindServer" | "TypeScriptServer" => {
8272 servers_with_actions_requests.insert(
8273 new_server_name.clone(),
8274 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8275 move |_, _| {
8276 let name = new_server_name.clone();
8277 async move {
8278 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8279 lsp::CodeAction {
8280 title: format!("{name} code action"),
8281 ..lsp::CodeAction::default()
8282 },
8283 )]))
8284 }
8285 },
8286 ),
8287 );
8288 }
8289 "ESLintServer" => {
8290 servers_with_actions_requests.insert(
8291 new_server_name,
8292 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8293 |_, _| async move { Ok(None) },
8294 ),
8295 );
8296 }
8297 "NoActionsCapabilitiesServer" => {
8298 let _never_handled = new_server
8299 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8300 panic!(
8301 "Should not call for code actions server with no corresponding capabilities"
8302 )
8303 });
8304 }
8305 unexpected => panic!("Unexpected server name: {unexpected}"),
8306 }
8307 }
8308
8309 let code_actions_task = project.update(cx, |project, cx| {
8310 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8311 });
8312
8313 // cx.run_until_parked();
8314 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8315 |mut code_actions_request| async move {
8316 code_actions_request
8317 .next()
8318 .await
8319 .expect("All code actions requests should have been triggered")
8320 },
8321 ))
8322 .await;
8323 assert_eq!(
8324 vec!["TailwindServer code action", "TypeScriptServer code action"],
8325 code_actions_task
8326 .await
8327 .unwrap()
8328 .unwrap()
8329 .into_iter()
8330 .map(|code_action| code_action.lsp_action.title().to_owned())
8331 .sorted()
8332 .collect::<Vec<_>>(),
8333 "Should receive code actions responses from all related servers with hover capabilities"
8334 );
8335}
8336
8337#[gpui::test]
8338async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8339 init_test(cx);
8340
8341 let fs = FakeFs::new(cx.executor());
8342 fs.insert_tree(
8343 "/dir",
8344 json!({
8345 "a.rs": "let a = 1;",
8346 "b.rs": "let b = 2;",
8347 "c.rs": "let c = 2;",
8348 }),
8349 )
8350 .await;
8351
8352 let project = Project::test(
8353 fs,
8354 [
8355 "/dir/a.rs".as_ref(),
8356 "/dir/b.rs".as_ref(),
8357 "/dir/c.rs".as_ref(),
8358 ],
8359 cx,
8360 )
8361 .await;
8362
8363 // check the initial state and get the worktrees
8364 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8365 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8366 assert_eq!(worktrees.len(), 3);
8367
8368 let worktree_a = worktrees[0].read(cx);
8369 let worktree_b = worktrees[1].read(cx);
8370 let worktree_c = worktrees[2].read(cx);
8371
8372 // check they start in the right order
8373 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8374 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8375 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8376
8377 (
8378 worktrees[0].clone(),
8379 worktrees[1].clone(),
8380 worktrees[2].clone(),
8381 )
8382 });
8383
8384 // move first worktree to after the second
8385 // [a, b, c] -> [b, a, c]
8386 project
8387 .update(cx, |project, cx| {
8388 let first = worktree_a.read(cx);
8389 let second = worktree_b.read(cx);
8390 project.move_worktree(first.id(), second.id(), cx)
8391 })
8392 .expect("moving first after second");
8393
8394 // check the state after moving
8395 project.update(cx, |project, cx| {
8396 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8397 assert_eq!(worktrees.len(), 3);
8398
8399 let first = worktrees[0].read(cx);
8400 let second = worktrees[1].read(cx);
8401 let third = worktrees[2].read(cx);
8402
8403 // check they are now in the right order
8404 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8405 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8406 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8407 });
8408
8409 // move the second worktree to before the first
8410 // [b, a, c] -> [a, b, c]
8411 project
8412 .update(cx, |project, cx| {
8413 let second = worktree_a.read(cx);
8414 let first = worktree_b.read(cx);
8415 project.move_worktree(first.id(), second.id(), cx)
8416 })
8417 .expect("moving second before first");
8418
8419 // check the state after moving
8420 project.update(cx, |project, cx| {
8421 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8422 assert_eq!(worktrees.len(), 3);
8423
8424 let first = worktrees[0].read(cx);
8425 let second = worktrees[1].read(cx);
8426 let third = worktrees[2].read(cx);
8427
8428 // check they are now in the right order
8429 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8430 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8431 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8432 });
8433
8434 // move the second worktree to after the third
8435 // [a, b, c] -> [a, c, b]
8436 project
8437 .update(cx, |project, cx| {
8438 let second = worktree_b.read(cx);
8439 let third = worktree_c.read(cx);
8440 project.move_worktree(second.id(), third.id(), cx)
8441 })
8442 .expect("moving second after third");
8443
8444 // check the state after moving
8445 project.update(cx, |project, cx| {
8446 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8447 assert_eq!(worktrees.len(), 3);
8448
8449 let first = worktrees[0].read(cx);
8450 let second = worktrees[1].read(cx);
8451 let third = worktrees[2].read(cx);
8452
8453 // check they are now in the right order
8454 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8455 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8456 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8457 });
8458
8459 // move the third worktree to before the second
8460 // [a, c, b] -> [a, b, c]
8461 project
8462 .update(cx, |project, cx| {
8463 let third = worktree_c.read(cx);
8464 let second = worktree_b.read(cx);
8465 project.move_worktree(third.id(), second.id(), cx)
8466 })
8467 .expect("moving third before second");
8468
8469 // check the state after moving
8470 project.update(cx, |project, cx| {
8471 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8472 assert_eq!(worktrees.len(), 3);
8473
8474 let first = worktrees[0].read(cx);
8475 let second = worktrees[1].read(cx);
8476 let third = worktrees[2].read(cx);
8477
8478 // check they are now in the right order
8479 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8480 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8481 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8482 });
8483
8484 // move the first worktree to after the third
8485 // [a, b, c] -> [b, c, a]
8486 project
8487 .update(cx, |project, cx| {
8488 let first = worktree_a.read(cx);
8489 let third = worktree_c.read(cx);
8490 project.move_worktree(first.id(), third.id(), cx)
8491 })
8492 .expect("moving first after third");
8493
8494 // check the state after moving
8495 project.update(cx, |project, cx| {
8496 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8497 assert_eq!(worktrees.len(), 3);
8498
8499 let first = worktrees[0].read(cx);
8500 let second = worktrees[1].read(cx);
8501 let third = worktrees[2].read(cx);
8502
8503 // check they are now in the right order
8504 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8505 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8506 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8507 });
8508
8509 // move the third worktree to before the first
8510 // [b, c, a] -> [a, b, c]
8511 project
8512 .update(cx, |project, cx| {
8513 let third = worktree_a.read(cx);
8514 let first = worktree_b.read(cx);
8515 project.move_worktree(third.id(), first.id(), cx)
8516 })
8517 .expect("moving third before first");
8518
8519 // check the state after moving
8520 project.update(cx, |project, cx| {
8521 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8522 assert_eq!(worktrees.len(), 3);
8523
8524 let first = worktrees[0].read(cx);
8525 let second = worktrees[1].read(cx);
8526 let third = worktrees[2].read(cx);
8527
8528 // check they are now in the right order
8529 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8530 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8531 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8532 });
8533}
8534
8535#[gpui::test]
8536async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8537 init_test(cx);
8538
8539 let staged_contents = r#"
8540 fn main() {
8541 println!("hello world");
8542 }
8543 "#
8544 .unindent();
8545 let file_contents = r#"
8546 // print goodbye
8547 fn main() {
8548 println!("goodbye world");
8549 }
8550 "#
8551 .unindent();
8552
8553 let fs = FakeFs::new(cx.background_executor.clone());
8554 fs.insert_tree(
8555 "/dir",
8556 json!({
8557 ".git": {},
8558 "src": {
8559 "main.rs": file_contents,
8560 }
8561 }),
8562 )
8563 .await;
8564
8565 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8566
8567 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8568
8569 let buffer = project
8570 .update(cx, |project, cx| {
8571 project.open_local_buffer("/dir/src/main.rs", cx)
8572 })
8573 .await
8574 .unwrap();
8575 let unstaged_diff = project
8576 .update(cx, |project, cx| {
8577 project.open_unstaged_diff(buffer.clone(), cx)
8578 })
8579 .await
8580 .unwrap();
8581
8582 cx.run_until_parked();
8583 unstaged_diff.update(cx, |unstaged_diff, cx| {
8584 let snapshot = buffer.read(cx).snapshot();
8585 assert_hunks(
8586 unstaged_diff.snapshot(cx).hunks(&snapshot),
8587 &snapshot,
8588 &unstaged_diff.base_text_string(cx).unwrap(),
8589 &[
8590 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8591 (
8592 2..3,
8593 " println!(\"hello world\");\n",
8594 " println!(\"goodbye world\");\n",
8595 DiffHunkStatus::modified_none(),
8596 ),
8597 ],
8598 );
8599 });
8600
8601 let staged_contents = r#"
8602 // print goodbye
8603 fn main() {
8604 }
8605 "#
8606 .unindent();
8607
8608 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8609
8610 cx.run_until_parked();
8611 unstaged_diff.update(cx, |unstaged_diff, cx| {
8612 let snapshot = buffer.read(cx).snapshot();
8613 assert_hunks(
8614 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8615 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8616 &snapshot,
8617 ),
8618 &snapshot,
8619 &unstaged_diff.base_text(cx).text(),
8620 &[(
8621 2..3,
8622 "",
8623 " println!(\"goodbye world\");\n",
8624 DiffHunkStatus::added_none(),
8625 )],
8626 );
8627 });
8628}
8629
8630#[gpui::test]
8631async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8632 init_test(cx);
8633
8634 let committed_contents = r#"
8635 fn main() {
8636 println!("hello world");
8637 }
8638 "#
8639 .unindent();
8640 let staged_contents = r#"
8641 fn main() {
8642 println!("goodbye world");
8643 }
8644 "#
8645 .unindent();
8646 let file_contents = r#"
8647 // print goodbye
8648 fn main() {
8649 println!("goodbye world");
8650 }
8651 "#
8652 .unindent();
8653
8654 let fs = FakeFs::new(cx.background_executor.clone());
8655 fs.insert_tree(
8656 "/dir",
8657 json!({
8658 ".git": {},
8659 "src": {
8660 "modification.rs": file_contents,
8661 }
8662 }),
8663 )
8664 .await;
8665
8666 fs.set_head_for_repo(
8667 Path::new("/dir/.git"),
8668 &[
8669 ("src/modification.rs", committed_contents),
8670 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8671 ],
8672 "deadbeef",
8673 );
8674 fs.set_index_for_repo(
8675 Path::new("/dir/.git"),
8676 &[
8677 ("src/modification.rs", staged_contents),
8678 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8679 ],
8680 );
8681
8682 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8683 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8684 let language = rust_lang();
8685 language_registry.add(language.clone());
8686
8687 let buffer_1 = project
8688 .update(cx, |project, cx| {
8689 project.open_local_buffer("/dir/src/modification.rs", cx)
8690 })
8691 .await
8692 .unwrap();
8693 let diff_1 = project
8694 .update(cx, |project, cx| {
8695 project.open_uncommitted_diff(buffer_1.clone(), cx)
8696 })
8697 .await
8698 .unwrap();
8699 diff_1.read_with(cx, |diff, cx| {
8700 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8701 });
8702 cx.run_until_parked();
8703 diff_1.update(cx, |diff, cx| {
8704 let snapshot = buffer_1.read(cx).snapshot();
8705 assert_hunks(
8706 diff.snapshot(cx).hunks_intersecting_range(
8707 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8708 &snapshot,
8709 ),
8710 &snapshot,
8711 &diff.base_text_string(cx).unwrap(),
8712 &[
8713 (
8714 0..1,
8715 "",
8716 "// print goodbye\n",
8717 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8718 ),
8719 (
8720 2..3,
8721 " println!(\"hello world\");\n",
8722 " println!(\"goodbye world\");\n",
8723 DiffHunkStatus::modified_none(),
8724 ),
8725 ],
8726 );
8727 });
8728
8729 // Reset HEAD to a version that differs from both the buffer and the index.
8730 let committed_contents = r#"
8731 // print goodbye
8732 fn main() {
8733 }
8734 "#
8735 .unindent();
8736 fs.set_head_for_repo(
8737 Path::new("/dir/.git"),
8738 &[
8739 ("src/modification.rs", committed_contents.clone()),
8740 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8741 ],
8742 "deadbeef",
8743 );
8744
8745 // Buffer now has an unstaged hunk.
8746 cx.run_until_parked();
8747 diff_1.update(cx, |diff, cx| {
8748 let snapshot = buffer_1.read(cx).snapshot();
8749 assert_hunks(
8750 diff.snapshot(cx).hunks_intersecting_range(
8751 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8752 &snapshot,
8753 ),
8754 &snapshot,
8755 &diff.base_text(cx).text(),
8756 &[(
8757 2..3,
8758 "",
8759 " println!(\"goodbye world\");\n",
8760 DiffHunkStatus::added_none(),
8761 )],
8762 );
8763 });
8764
8765 // Open a buffer for a file that's been deleted.
8766 let buffer_2 = project
8767 .update(cx, |project, cx| {
8768 project.open_local_buffer("/dir/src/deletion.rs", cx)
8769 })
8770 .await
8771 .unwrap();
8772 let diff_2 = project
8773 .update(cx, |project, cx| {
8774 project.open_uncommitted_diff(buffer_2.clone(), cx)
8775 })
8776 .await
8777 .unwrap();
8778 cx.run_until_parked();
8779 diff_2.update(cx, |diff, cx| {
8780 let snapshot = buffer_2.read(cx).snapshot();
8781 assert_hunks(
8782 diff.snapshot(cx).hunks_intersecting_range(
8783 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8784 &snapshot,
8785 ),
8786 &snapshot,
8787 &diff.base_text_string(cx).unwrap(),
8788 &[(
8789 0..0,
8790 "// the-deleted-contents\n",
8791 "",
8792 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8793 )],
8794 );
8795 });
8796
8797 // Stage the deletion of this file
8798 fs.set_index_for_repo(
8799 Path::new("/dir/.git"),
8800 &[("src/modification.rs", committed_contents.clone())],
8801 );
8802 cx.run_until_parked();
8803 diff_2.update(cx, |diff, cx| {
8804 let snapshot = buffer_2.read(cx).snapshot();
8805 assert_hunks(
8806 diff.snapshot(cx).hunks_intersecting_range(
8807 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8808 &snapshot,
8809 ),
8810 &snapshot,
8811 &diff.base_text_string(cx).unwrap(),
8812 &[(
8813 0..0,
8814 "// the-deleted-contents\n",
8815 "",
8816 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8817 )],
8818 );
8819 });
8820}
8821
8822#[gpui::test]
8823async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8824 use DiffHunkSecondaryStatus::*;
8825 init_test(cx);
8826
8827 let committed_contents = r#"
8828 zero
8829 one
8830 two
8831 three
8832 four
8833 five
8834 "#
8835 .unindent();
8836 let file_contents = r#"
8837 one
8838 TWO
8839 three
8840 FOUR
8841 five
8842 "#
8843 .unindent();
8844
8845 let fs = FakeFs::new(cx.background_executor.clone());
8846 fs.insert_tree(
8847 "/dir",
8848 json!({
8849 ".git": {},
8850 "file.txt": file_contents.clone()
8851 }),
8852 )
8853 .await;
8854
8855 fs.set_head_and_index_for_repo(
8856 path!("/dir/.git").as_ref(),
8857 &[("file.txt", committed_contents.clone())],
8858 );
8859
8860 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8861
8862 let buffer = project
8863 .update(cx, |project, cx| {
8864 project.open_local_buffer("/dir/file.txt", cx)
8865 })
8866 .await
8867 .unwrap();
8868 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8869 let uncommitted_diff = project
8870 .update(cx, |project, cx| {
8871 project.open_uncommitted_diff(buffer.clone(), cx)
8872 })
8873 .await
8874 .unwrap();
8875 let mut diff_events = cx.events(&uncommitted_diff);
8876
8877 // The hunks are initially unstaged.
8878 uncommitted_diff.read_with(cx, |diff, cx| {
8879 assert_hunks(
8880 diff.snapshot(cx).hunks(&snapshot),
8881 &snapshot,
8882 &diff.base_text_string(cx).unwrap(),
8883 &[
8884 (
8885 0..0,
8886 "zero\n",
8887 "",
8888 DiffHunkStatus::deleted(HasSecondaryHunk),
8889 ),
8890 (
8891 1..2,
8892 "two\n",
8893 "TWO\n",
8894 DiffHunkStatus::modified(HasSecondaryHunk),
8895 ),
8896 (
8897 3..4,
8898 "four\n",
8899 "FOUR\n",
8900 DiffHunkStatus::modified(HasSecondaryHunk),
8901 ),
8902 ],
8903 );
8904 });
8905
8906 // Stage a hunk. It appears as optimistically staged.
8907 uncommitted_diff.update(cx, |diff, cx| {
8908 let range =
8909 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8910 let hunks = diff
8911 .snapshot(cx)
8912 .hunks_intersecting_range(range, &snapshot)
8913 .collect::<Vec<_>>();
8914 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8915
8916 assert_hunks(
8917 diff.snapshot(cx).hunks(&snapshot),
8918 &snapshot,
8919 &diff.base_text_string(cx).unwrap(),
8920 &[
8921 (
8922 0..0,
8923 "zero\n",
8924 "",
8925 DiffHunkStatus::deleted(HasSecondaryHunk),
8926 ),
8927 (
8928 1..2,
8929 "two\n",
8930 "TWO\n",
8931 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8932 ),
8933 (
8934 3..4,
8935 "four\n",
8936 "FOUR\n",
8937 DiffHunkStatus::modified(HasSecondaryHunk),
8938 ),
8939 ],
8940 );
8941 });
8942
8943 // The diff emits a change event for the range of the staged hunk.
8944 assert!(matches!(
8945 diff_events.next().await.unwrap(),
8946 BufferDiffEvent::HunksStagedOrUnstaged(_)
8947 ));
8948 let event = diff_events.next().await.unwrap();
8949 if let BufferDiffEvent::DiffChanged(DiffChanged {
8950 changed_range: Some(changed_range),
8951 base_text_changed_range: _,
8952 extended_range: _,
8953 }) = event
8954 {
8955 let changed_range = changed_range.to_point(&snapshot);
8956 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8957 } else {
8958 panic!("Unexpected event {event:?}");
8959 }
8960
8961 // When the write to the index completes, it appears as staged.
8962 cx.run_until_parked();
8963 uncommitted_diff.update(cx, |diff, cx| {
8964 assert_hunks(
8965 diff.snapshot(cx).hunks(&snapshot),
8966 &snapshot,
8967 &diff.base_text_string(cx).unwrap(),
8968 &[
8969 (
8970 0..0,
8971 "zero\n",
8972 "",
8973 DiffHunkStatus::deleted(HasSecondaryHunk),
8974 ),
8975 (
8976 1..2,
8977 "two\n",
8978 "TWO\n",
8979 DiffHunkStatus::modified(NoSecondaryHunk),
8980 ),
8981 (
8982 3..4,
8983 "four\n",
8984 "FOUR\n",
8985 DiffHunkStatus::modified(HasSecondaryHunk),
8986 ),
8987 ],
8988 );
8989 });
8990
8991 // The diff emits a change event for the changed index text.
8992 let event = diff_events.next().await.unwrap();
8993 if let BufferDiffEvent::DiffChanged(DiffChanged {
8994 changed_range: Some(changed_range),
8995 base_text_changed_range: _,
8996 extended_range: _,
8997 }) = event
8998 {
8999 let changed_range = changed_range.to_point(&snapshot);
9000 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
9001 } else {
9002 panic!("Unexpected event {event:?}");
9003 }
9004
9005 // Simulate a problem writing to the git index.
9006 fs.set_error_message_for_index_write(
9007 "/dir/.git".as_ref(),
9008 Some("failed to write git index".into()),
9009 );
9010
9011 // Stage another hunk.
9012 uncommitted_diff.update(cx, |diff, cx| {
9013 let range =
9014 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
9015 let hunks = diff
9016 .snapshot(cx)
9017 .hunks_intersecting_range(range, &snapshot)
9018 .collect::<Vec<_>>();
9019 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9020
9021 assert_hunks(
9022 diff.snapshot(cx).hunks(&snapshot),
9023 &snapshot,
9024 &diff.base_text_string(cx).unwrap(),
9025 &[
9026 (
9027 0..0,
9028 "zero\n",
9029 "",
9030 DiffHunkStatus::deleted(HasSecondaryHunk),
9031 ),
9032 (
9033 1..2,
9034 "two\n",
9035 "TWO\n",
9036 DiffHunkStatus::modified(NoSecondaryHunk),
9037 ),
9038 (
9039 3..4,
9040 "four\n",
9041 "FOUR\n",
9042 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9043 ),
9044 ],
9045 );
9046 });
9047 assert!(matches!(
9048 diff_events.next().await.unwrap(),
9049 BufferDiffEvent::HunksStagedOrUnstaged(_)
9050 ));
9051 let event = diff_events.next().await.unwrap();
9052 if let BufferDiffEvent::DiffChanged(DiffChanged {
9053 changed_range: Some(changed_range),
9054 base_text_changed_range: _,
9055 extended_range: _,
9056 }) = event
9057 {
9058 let changed_range = changed_range.to_point(&snapshot);
9059 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
9060 } else {
9061 panic!("Unexpected event {event:?}");
9062 }
9063
9064 // When the write fails, the hunk returns to being unstaged.
9065 cx.run_until_parked();
9066 uncommitted_diff.update(cx, |diff, cx| {
9067 assert_hunks(
9068 diff.snapshot(cx).hunks(&snapshot),
9069 &snapshot,
9070 &diff.base_text_string(cx).unwrap(),
9071 &[
9072 (
9073 0..0,
9074 "zero\n",
9075 "",
9076 DiffHunkStatus::deleted(HasSecondaryHunk),
9077 ),
9078 (
9079 1..2,
9080 "two\n",
9081 "TWO\n",
9082 DiffHunkStatus::modified(NoSecondaryHunk),
9083 ),
9084 (
9085 3..4,
9086 "four\n",
9087 "FOUR\n",
9088 DiffHunkStatus::modified(HasSecondaryHunk),
9089 ),
9090 ],
9091 );
9092 });
9093
9094 let event = diff_events.next().await.unwrap();
9095 if let BufferDiffEvent::DiffChanged(DiffChanged {
9096 changed_range: Some(changed_range),
9097 base_text_changed_range: _,
9098 extended_range: _,
9099 }) = event
9100 {
9101 let changed_range = changed_range.to_point(&snapshot);
9102 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9103 } else {
9104 panic!("Unexpected event {event:?}");
9105 }
9106
9107 // Allow writing to the git index to succeed again.
9108 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9109
9110 // Stage two hunks with separate operations.
9111 uncommitted_diff.update(cx, |diff, cx| {
9112 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9113 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9114 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9115 });
9116
9117 // Both staged hunks appear as pending.
9118 uncommitted_diff.update(cx, |diff, cx| {
9119 assert_hunks(
9120 diff.snapshot(cx).hunks(&snapshot),
9121 &snapshot,
9122 &diff.base_text_string(cx).unwrap(),
9123 &[
9124 (
9125 0..0,
9126 "zero\n",
9127 "",
9128 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9129 ),
9130 (
9131 1..2,
9132 "two\n",
9133 "TWO\n",
9134 DiffHunkStatus::modified(NoSecondaryHunk),
9135 ),
9136 (
9137 3..4,
9138 "four\n",
9139 "FOUR\n",
9140 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9141 ),
9142 ],
9143 );
9144 });
9145
9146 // Both staging operations take effect.
9147 cx.run_until_parked();
9148 uncommitted_diff.update(cx, |diff, cx| {
9149 assert_hunks(
9150 diff.snapshot(cx).hunks(&snapshot),
9151 &snapshot,
9152 &diff.base_text_string(cx).unwrap(),
9153 &[
9154 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9155 (
9156 1..2,
9157 "two\n",
9158 "TWO\n",
9159 DiffHunkStatus::modified(NoSecondaryHunk),
9160 ),
9161 (
9162 3..4,
9163 "four\n",
9164 "FOUR\n",
9165 DiffHunkStatus::modified(NoSecondaryHunk),
9166 ),
9167 ],
9168 );
9169 });
9170}
9171
9172#[gpui::test(iterations = 10)]
9173async fn test_uncommitted_diff_opened_before_unstaged_diff(cx: &mut gpui::TestAppContext) {
9174 use DiffHunkSecondaryStatus::*;
9175 init_test(cx);
9176
9177 let committed_contents = "one\ntwo\nthree\n";
9178 let file_contents = "one\nTWO\nthree\n";
9179
9180 let fs = FakeFs::new(cx.background_executor.clone());
9181 fs.insert_tree(
9182 "/dir",
9183 json!({
9184 ".git": {},
9185 "file.txt": file_contents,
9186 }),
9187 )
9188 .await;
9189 fs.set_head_and_index_for_repo(
9190 path!("/dir/.git").as_ref(),
9191 &[("file.txt", committed_contents.into())],
9192 );
9193
9194 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9195 let buffer = project
9196 .update(cx, |project, cx| {
9197 project.open_local_buffer("/dir/file.txt", cx)
9198 })
9199 .await
9200 .unwrap();
9201
9202 let uncommitted_diff_task = project.update(cx, |project, cx| {
9203 project.open_uncommitted_diff(buffer.clone(), cx)
9204 });
9205 let unstaged_diff_task = project.update(cx, |project, cx| {
9206 project.open_unstaged_diff(buffer.clone(), cx)
9207 });
9208 let (uncommitted_diff, _unstaged_diff) =
9209 futures::future::join(uncommitted_diff_task, unstaged_diff_task).await;
9210 let uncommitted_diff = uncommitted_diff.unwrap();
9211 let _unstaged_diff = _unstaged_diff.unwrap();
9212
9213 cx.run_until_parked();
9214
9215 uncommitted_diff.read_with(cx, |diff, cx| {
9216 let snapshot = buffer.read(cx).snapshot();
9217 assert_hunks(
9218 diff.snapshot(cx).hunks_intersecting_range(
9219 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9220 &snapshot,
9221 ),
9222 &snapshot,
9223 &diff.base_text_string(cx).unwrap(),
9224 &[(
9225 1..2,
9226 "two\n",
9227 "TWO\n",
9228 DiffHunkStatus::modified(HasSecondaryHunk),
9229 )],
9230 );
9231 });
9232}
9233
9234#[gpui::test(seeds(340, 472))]
9235async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9236 use DiffHunkSecondaryStatus::*;
9237 init_test(cx);
9238
9239 let committed_contents = r#"
9240 zero
9241 one
9242 two
9243 three
9244 four
9245 five
9246 "#
9247 .unindent();
9248 let file_contents = r#"
9249 one
9250 TWO
9251 three
9252 FOUR
9253 five
9254 "#
9255 .unindent();
9256
9257 let fs = FakeFs::new(cx.background_executor.clone());
9258 fs.insert_tree(
9259 "/dir",
9260 json!({
9261 ".git": {},
9262 "file.txt": file_contents.clone()
9263 }),
9264 )
9265 .await;
9266
9267 fs.set_head_for_repo(
9268 "/dir/.git".as_ref(),
9269 &[("file.txt", committed_contents.clone())],
9270 "deadbeef",
9271 );
9272 fs.set_index_for_repo(
9273 "/dir/.git".as_ref(),
9274 &[("file.txt", committed_contents.clone())],
9275 );
9276
9277 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9278
9279 let buffer = project
9280 .update(cx, |project, cx| {
9281 project.open_local_buffer("/dir/file.txt", cx)
9282 })
9283 .await
9284 .unwrap();
9285 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9286 let uncommitted_diff = project
9287 .update(cx, |project, cx| {
9288 project.open_uncommitted_diff(buffer.clone(), cx)
9289 })
9290 .await
9291 .unwrap();
9292
9293 // The hunks are initially unstaged.
9294 uncommitted_diff.read_with(cx, |diff, cx| {
9295 assert_hunks(
9296 diff.snapshot(cx).hunks(&snapshot),
9297 &snapshot,
9298 &diff.base_text_string(cx).unwrap(),
9299 &[
9300 (
9301 0..0,
9302 "zero\n",
9303 "",
9304 DiffHunkStatus::deleted(HasSecondaryHunk),
9305 ),
9306 (
9307 1..2,
9308 "two\n",
9309 "TWO\n",
9310 DiffHunkStatus::modified(HasSecondaryHunk),
9311 ),
9312 (
9313 3..4,
9314 "four\n",
9315 "FOUR\n",
9316 DiffHunkStatus::modified(HasSecondaryHunk),
9317 ),
9318 ],
9319 );
9320 });
9321
9322 // Pause IO events
9323 fs.pause_events();
9324
9325 // Stage the first hunk.
9326 uncommitted_diff.update(cx, |diff, cx| {
9327 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9328 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9329 assert_hunks(
9330 diff.snapshot(cx).hunks(&snapshot),
9331 &snapshot,
9332 &diff.base_text_string(cx).unwrap(),
9333 &[
9334 (
9335 0..0,
9336 "zero\n",
9337 "",
9338 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9339 ),
9340 (
9341 1..2,
9342 "two\n",
9343 "TWO\n",
9344 DiffHunkStatus::modified(HasSecondaryHunk),
9345 ),
9346 (
9347 3..4,
9348 "four\n",
9349 "FOUR\n",
9350 DiffHunkStatus::modified(HasSecondaryHunk),
9351 ),
9352 ],
9353 );
9354 });
9355
9356 // Stage the second hunk *before* receiving the FS event for the first hunk.
9357 cx.run_until_parked();
9358 uncommitted_diff.update(cx, |diff, cx| {
9359 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9360 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9361 assert_hunks(
9362 diff.snapshot(cx).hunks(&snapshot),
9363 &snapshot,
9364 &diff.base_text_string(cx).unwrap(),
9365 &[
9366 (
9367 0..0,
9368 "zero\n",
9369 "",
9370 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9371 ),
9372 (
9373 1..2,
9374 "two\n",
9375 "TWO\n",
9376 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9377 ),
9378 (
9379 3..4,
9380 "four\n",
9381 "FOUR\n",
9382 DiffHunkStatus::modified(HasSecondaryHunk),
9383 ),
9384 ],
9385 );
9386 });
9387
9388 // Process the FS event for staging the first hunk (second event is still pending).
9389 fs.flush_events(1);
9390 cx.run_until_parked();
9391
9392 // Stage the third hunk before receiving the second FS event.
9393 uncommitted_diff.update(cx, |diff, cx| {
9394 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9395 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9396 });
9397
9398 // Wait for all remaining IO.
9399 cx.run_until_parked();
9400 fs.flush_events(fs.buffered_event_count());
9401
9402 // Now all hunks are staged.
9403 cx.run_until_parked();
9404 uncommitted_diff.update(cx, |diff, cx| {
9405 assert_hunks(
9406 diff.snapshot(cx).hunks(&snapshot),
9407 &snapshot,
9408 &diff.base_text_string(cx).unwrap(),
9409 &[
9410 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9411 (
9412 1..2,
9413 "two\n",
9414 "TWO\n",
9415 DiffHunkStatus::modified(NoSecondaryHunk),
9416 ),
9417 (
9418 3..4,
9419 "four\n",
9420 "FOUR\n",
9421 DiffHunkStatus::modified(NoSecondaryHunk),
9422 ),
9423 ],
9424 );
9425 });
9426}
9427
9428#[gpui::test(iterations = 25)]
9429async fn test_staging_random_hunks(
9430 mut rng: StdRng,
9431 _executor: BackgroundExecutor,
9432 cx: &mut gpui::TestAppContext,
9433) {
9434 let operations = env::var("OPERATIONS")
9435 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9436 .unwrap_or(20);
9437
9438 use DiffHunkSecondaryStatus::*;
9439 init_test(cx);
9440
9441 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9442 let index_text = committed_text.clone();
9443 let buffer_text = (0..30)
9444 .map(|i| match i % 5 {
9445 0 => format!("line {i} (modified)\n"),
9446 _ => format!("line {i}\n"),
9447 })
9448 .collect::<String>();
9449
9450 let fs = FakeFs::new(cx.background_executor.clone());
9451 fs.insert_tree(
9452 path!("/dir"),
9453 json!({
9454 ".git": {},
9455 "file.txt": buffer_text.clone()
9456 }),
9457 )
9458 .await;
9459 fs.set_head_for_repo(
9460 path!("/dir/.git").as_ref(),
9461 &[("file.txt", committed_text.clone())],
9462 "deadbeef",
9463 );
9464 fs.set_index_for_repo(
9465 path!("/dir/.git").as_ref(),
9466 &[("file.txt", index_text.clone())],
9467 );
9468 let repo = fs
9469 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9470 .unwrap();
9471
9472 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9473 let buffer = project
9474 .update(cx, |project, cx| {
9475 project.open_local_buffer(path!("/dir/file.txt"), cx)
9476 })
9477 .await
9478 .unwrap();
9479 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9480 let uncommitted_diff = project
9481 .update(cx, |project, cx| {
9482 project.open_uncommitted_diff(buffer.clone(), cx)
9483 })
9484 .await
9485 .unwrap();
9486
9487 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9488 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9489 });
9490 assert_eq!(hunks.len(), 6);
9491
9492 for _i in 0..operations {
9493 let hunk_ix = rng.random_range(0..hunks.len());
9494 let hunk = &mut hunks[hunk_ix];
9495 let row = hunk.range.start.row;
9496
9497 if hunk.status().has_secondary_hunk() {
9498 log::info!("staging hunk at {row}");
9499 uncommitted_diff.update(cx, |diff, cx| {
9500 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9501 });
9502 hunk.secondary_status = SecondaryHunkRemovalPending;
9503 } else {
9504 log::info!("unstaging hunk at {row}");
9505 uncommitted_diff.update(cx, |diff, cx| {
9506 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9507 });
9508 hunk.secondary_status = SecondaryHunkAdditionPending;
9509 }
9510
9511 for _ in 0..rng.random_range(0..10) {
9512 log::info!("yielding");
9513 cx.executor().simulate_random_delay().await;
9514 }
9515 }
9516
9517 cx.executor().run_until_parked();
9518
9519 for hunk in &mut hunks {
9520 if hunk.secondary_status == SecondaryHunkRemovalPending {
9521 hunk.secondary_status = NoSecondaryHunk;
9522 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9523 hunk.secondary_status = HasSecondaryHunk;
9524 }
9525 }
9526
9527 log::info!(
9528 "index text:\n{}",
9529 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9530 .await
9531 .unwrap()
9532 );
9533
9534 uncommitted_diff.update(cx, |diff, cx| {
9535 let expected_hunks = hunks
9536 .iter()
9537 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9538 .collect::<Vec<_>>();
9539 let actual_hunks = diff
9540 .snapshot(cx)
9541 .hunks(&snapshot)
9542 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9543 .collect::<Vec<_>>();
9544 assert_eq!(actual_hunks, expected_hunks);
9545 });
9546}
9547
9548#[gpui::test]
9549async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9550 init_test(cx);
9551
9552 let committed_contents = r#"
9553 fn main() {
9554 println!("hello from HEAD");
9555 }
9556 "#
9557 .unindent();
9558 let file_contents = r#"
9559 fn main() {
9560 println!("hello from the working copy");
9561 }
9562 "#
9563 .unindent();
9564
9565 let fs = FakeFs::new(cx.background_executor.clone());
9566 fs.insert_tree(
9567 "/dir",
9568 json!({
9569 ".git": {},
9570 "src": {
9571 "main.rs": file_contents,
9572 }
9573 }),
9574 )
9575 .await;
9576
9577 fs.set_head_for_repo(
9578 Path::new("/dir/.git"),
9579 &[("src/main.rs", committed_contents.clone())],
9580 "deadbeef",
9581 );
9582 fs.set_index_for_repo(
9583 Path::new("/dir/.git"),
9584 &[("src/main.rs", committed_contents.clone())],
9585 );
9586
9587 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9588
9589 let buffer = project
9590 .update(cx, |project, cx| {
9591 project.open_local_buffer("/dir/src/main.rs", cx)
9592 })
9593 .await
9594 .unwrap();
9595 let uncommitted_diff = project
9596 .update(cx, |project, cx| {
9597 project.open_uncommitted_diff(buffer.clone(), cx)
9598 })
9599 .await
9600 .unwrap();
9601
9602 cx.run_until_parked();
9603 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9604 let snapshot = buffer.read(cx).snapshot();
9605 assert_hunks(
9606 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9607 &snapshot,
9608 &uncommitted_diff.base_text_string(cx).unwrap(),
9609 &[(
9610 1..2,
9611 " println!(\"hello from HEAD\");\n",
9612 " println!(\"hello from the working copy\");\n",
9613 DiffHunkStatus {
9614 kind: DiffHunkStatusKind::Modified,
9615 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9616 },
9617 )],
9618 );
9619 });
9620}
9621
9622// TODO: Should we test this on Windows also?
9623#[gpui::test]
9624#[cfg(not(windows))]
9625async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9626 use std::os::unix::fs::PermissionsExt;
9627 init_test(cx);
9628 cx.executor().allow_parking();
9629 let committed_contents = "bar\n";
9630 let file_contents = "baz\n";
9631 let root = TempTree::new(json!({
9632 "project": {
9633 "foo": committed_contents
9634 },
9635 }));
9636
9637 let work_dir = root.path().join("project");
9638 let file_path = work_dir.join("foo");
9639 let repo = git_init(work_dir.as_path());
9640 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9641 perms.set_mode(0o755);
9642 std::fs::set_permissions(&file_path, perms).unwrap();
9643 git_add("foo", &repo);
9644 git_commit("Initial commit", &repo);
9645 std::fs::write(&file_path, file_contents).unwrap();
9646
9647 let project = Project::test(
9648 Arc::new(RealFs::new(None, cx.executor())),
9649 [root.path()],
9650 cx,
9651 )
9652 .await;
9653
9654 let buffer = project
9655 .update(cx, |project, cx| {
9656 project.open_local_buffer(file_path.as_path(), cx)
9657 })
9658 .await
9659 .unwrap();
9660
9661 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9662
9663 let uncommitted_diff = project
9664 .update(cx, |project, cx| {
9665 project.open_uncommitted_diff(buffer.clone(), cx)
9666 })
9667 .await
9668 .unwrap();
9669
9670 uncommitted_diff.update(cx, |diff, cx| {
9671 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9672 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9673 });
9674
9675 cx.run_until_parked();
9676
9677 let output = smol::process::Command::new("git")
9678 .current_dir(&work_dir)
9679 .args(["diff", "--staged"])
9680 .output()
9681 .await
9682 .unwrap();
9683
9684 let staged_diff = String::from_utf8_lossy(&output.stdout);
9685
9686 assert!(
9687 !staged_diff.contains("new mode 100644"),
9688 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9689 staged_diff
9690 );
9691
9692 let output = smol::process::Command::new("git")
9693 .current_dir(&work_dir)
9694 .args(["ls-files", "-s"])
9695 .output()
9696 .await
9697 .unwrap();
9698 let index_contents = String::from_utf8_lossy(&output.stdout);
9699
9700 assert!(
9701 index_contents.contains("100755"),
9702 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9703 index_contents
9704 );
9705}
9706
9707#[gpui::test]
9708async fn test_repository_and_path_for_project_path(
9709 background_executor: BackgroundExecutor,
9710 cx: &mut gpui::TestAppContext,
9711) {
9712 init_test(cx);
9713 let fs = FakeFs::new(background_executor);
9714 fs.insert_tree(
9715 path!("/root"),
9716 json!({
9717 "c.txt": "",
9718 "dir1": {
9719 ".git": {},
9720 "deps": {
9721 "dep1": {
9722 ".git": {},
9723 "src": {
9724 "a.txt": ""
9725 }
9726 }
9727 },
9728 "src": {
9729 "b.txt": ""
9730 }
9731 },
9732 }),
9733 )
9734 .await;
9735
9736 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9737 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9738 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9739 project
9740 .update(cx, |project, cx| project.git_scans_complete(cx))
9741 .await;
9742 cx.run_until_parked();
9743
9744 project.read_with(cx, |project, cx| {
9745 let git_store = project.git_store().read(cx);
9746 let pairs = [
9747 ("c.txt", None),
9748 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9749 (
9750 "dir1/deps/dep1/src/a.txt",
9751 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9752 ),
9753 ];
9754 let expected = pairs
9755 .iter()
9756 .map(|(path, result)| {
9757 (
9758 path,
9759 result.map(|(repo, repo_path)| {
9760 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9761 }),
9762 )
9763 })
9764 .collect::<Vec<_>>();
9765 let actual = pairs
9766 .iter()
9767 .map(|(path, _)| {
9768 let project_path = (tree_id, rel_path(path)).into();
9769 let result = maybe!({
9770 let (repo, repo_path) =
9771 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9772 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9773 });
9774 (path, result)
9775 })
9776 .collect::<Vec<_>>();
9777 pretty_assertions::assert_eq!(expected, actual);
9778 });
9779
9780 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9781 .await
9782 .unwrap();
9783 cx.run_until_parked();
9784
9785 project.read_with(cx, |project, cx| {
9786 let git_store = project.git_store().read(cx);
9787 assert_eq!(
9788 git_store.repository_and_path_for_project_path(
9789 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9790 cx
9791 ),
9792 None
9793 );
9794 });
9795}
9796
9797#[gpui::test]
9798async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9799 init_test(cx);
9800 let fs = FakeFs::new(cx.background_executor.clone());
9801 let home = paths::home_dir();
9802 fs.insert_tree(
9803 home,
9804 json!({
9805 ".git": {},
9806 "project": {
9807 "a.txt": "A"
9808 },
9809 }),
9810 )
9811 .await;
9812
9813 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9814 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9815 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9816
9817 project
9818 .update(cx, |project, cx| project.git_scans_complete(cx))
9819 .await;
9820 tree.flush_fs_events(cx).await;
9821
9822 project.read_with(cx, |project, cx| {
9823 let containing = project
9824 .git_store()
9825 .read(cx)
9826 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9827 assert!(containing.is_none());
9828 });
9829
9830 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9831 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9832 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9833 project
9834 .update(cx, |project, cx| project.git_scans_complete(cx))
9835 .await;
9836 tree.flush_fs_events(cx).await;
9837
9838 project.read_with(cx, |project, cx| {
9839 let containing = project
9840 .git_store()
9841 .read(cx)
9842 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9843 assert_eq!(
9844 containing
9845 .unwrap()
9846 .0
9847 .read(cx)
9848 .work_directory_abs_path
9849 .as_ref(),
9850 home,
9851 );
9852 });
9853}
9854
9855#[gpui::test]
9856async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9857 init_test(cx);
9858 cx.executor().allow_parking();
9859
9860 let root = TempTree::new(json!({
9861 "project": {
9862 "a.txt": "a", // Modified
9863 "b.txt": "bb", // Added
9864 "c.txt": "ccc", // Unchanged
9865 "d.txt": "dddd", // Deleted
9866 },
9867 }));
9868
9869 // Set up git repository before creating the project.
9870 let work_dir = root.path().join("project");
9871 let repo = git_init(work_dir.as_path());
9872 git_add("a.txt", &repo);
9873 git_add("c.txt", &repo);
9874 git_add("d.txt", &repo);
9875 git_commit("Initial commit", &repo);
9876 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9877 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9878
9879 let project = Project::test(
9880 Arc::new(RealFs::new(None, cx.executor())),
9881 [root.path()],
9882 cx,
9883 )
9884 .await;
9885
9886 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9887 tree.flush_fs_events(cx).await;
9888 project
9889 .update(cx, |project, cx| project.git_scans_complete(cx))
9890 .await;
9891 cx.executor().run_until_parked();
9892
9893 let repository = project.read_with(cx, |project, cx| {
9894 project.repositories(cx).values().next().unwrap().clone()
9895 });
9896
9897 // Check that the right git state is observed on startup
9898 repository.read_with(cx, |repository, _| {
9899 let entries = repository.cached_status().collect::<Vec<_>>();
9900 assert_eq!(
9901 entries,
9902 [
9903 StatusEntry {
9904 repo_path: repo_path("a.txt"),
9905 status: StatusCode::Modified.worktree(),
9906 diff_stat: Some(DiffStat {
9907 added: 1,
9908 deleted: 1,
9909 }),
9910 },
9911 StatusEntry {
9912 repo_path: repo_path("b.txt"),
9913 status: FileStatus::Untracked,
9914 diff_stat: None,
9915 },
9916 StatusEntry {
9917 repo_path: repo_path("d.txt"),
9918 status: StatusCode::Deleted.worktree(),
9919 diff_stat: Some(DiffStat {
9920 added: 0,
9921 deleted: 1,
9922 }),
9923 },
9924 ]
9925 );
9926 });
9927
9928 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9929
9930 tree.flush_fs_events(cx).await;
9931 project
9932 .update(cx, |project, cx| project.git_scans_complete(cx))
9933 .await;
9934 cx.executor().run_until_parked();
9935
9936 repository.read_with(cx, |repository, _| {
9937 let entries = repository.cached_status().collect::<Vec<_>>();
9938 assert_eq!(
9939 entries,
9940 [
9941 StatusEntry {
9942 repo_path: repo_path("a.txt"),
9943 status: StatusCode::Modified.worktree(),
9944 diff_stat: Some(DiffStat {
9945 added: 1,
9946 deleted: 1,
9947 }),
9948 },
9949 StatusEntry {
9950 repo_path: repo_path("b.txt"),
9951 status: FileStatus::Untracked,
9952 diff_stat: None,
9953 },
9954 StatusEntry {
9955 repo_path: repo_path("c.txt"),
9956 status: StatusCode::Modified.worktree(),
9957 diff_stat: Some(DiffStat {
9958 added: 1,
9959 deleted: 1,
9960 }),
9961 },
9962 StatusEntry {
9963 repo_path: repo_path("d.txt"),
9964 status: StatusCode::Deleted.worktree(),
9965 diff_stat: Some(DiffStat {
9966 added: 0,
9967 deleted: 1,
9968 }),
9969 },
9970 ]
9971 );
9972 });
9973
9974 git_add("a.txt", &repo);
9975 git_add("c.txt", &repo);
9976 git_remove_index(Path::new("d.txt"), &repo);
9977 git_commit("Another commit", &repo);
9978 tree.flush_fs_events(cx).await;
9979 project
9980 .update(cx, |project, cx| project.git_scans_complete(cx))
9981 .await;
9982 cx.executor().run_until_parked();
9983
9984 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9985 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9986 tree.flush_fs_events(cx).await;
9987 project
9988 .update(cx, |project, cx| project.git_scans_complete(cx))
9989 .await;
9990 cx.executor().run_until_parked();
9991
9992 repository.read_with(cx, |repository, _cx| {
9993 let entries = repository.cached_status().collect::<Vec<_>>();
9994
9995 // Deleting an untracked entry, b.txt, should leave no status
9996 // a.txt was tracked, and so should have a status
9997 assert_eq!(
9998 entries,
9999 [StatusEntry {
10000 repo_path: repo_path("a.txt"),
10001 status: StatusCode::Deleted.worktree(),
10002 diff_stat: Some(DiffStat {
10003 added: 0,
10004 deleted: 1,
10005 }),
10006 }]
10007 );
10008 });
10009}
10010
10011#[gpui::test]
10012#[ignore]
10013async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
10014 init_test(cx);
10015 cx.executor().allow_parking();
10016
10017 let root = TempTree::new(json!({
10018 "project": {
10019 "sub": {},
10020 "a.txt": "",
10021 },
10022 }));
10023
10024 let work_dir = root.path().join("project");
10025 let repo = git_init(work_dir.as_path());
10026 // a.txt exists in HEAD and the working copy but is deleted in the index.
10027 git_add("a.txt", &repo);
10028 git_commit("Initial commit", &repo);
10029 git_remove_index("a.txt".as_ref(), &repo);
10030 // `sub` is a nested git repository.
10031 let _sub = git_init(&work_dir.join("sub"));
10032
10033 let project = Project::test(
10034 Arc::new(RealFs::new(None, cx.executor())),
10035 [root.path()],
10036 cx,
10037 )
10038 .await;
10039
10040 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10041 tree.flush_fs_events(cx).await;
10042 project
10043 .update(cx, |project, cx| project.git_scans_complete(cx))
10044 .await;
10045 cx.executor().run_until_parked();
10046
10047 let repository = project.read_with(cx, |project, cx| {
10048 project
10049 .repositories(cx)
10050 .values()
10051 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
10052 .unwrap()
10053 .clone()
10054 });
10055
10056 repository.read_with(cx, |repository, _cx| {
10057 let entries = repository.cached_status().collect::<Vec<_>>();
10058
10059 // `sub` doesn't appear in our computed statuses.
10060 // a.txt appears with a combined `DA` status.
10061 assert_eq!(
10062 entries,
10063 [StatusEntry {
10064 repo_path: repo_path("a.txt"),
10065 status: TrackedStatus {
10066 index_status: StatusCode::Deleted,
10067 worktree_status: StatusCode::Added
10068 }
10069 .into(),
10070 diff_stat: None,
10071 }]
10072 )
10073 });
10074}
10075
10076#[track_caller]
10077/// We merge lhs into rhs.
10078fn merge_pending_ops_snapshots(
10079 source: Vec<pending_op::PendingOps>,
10080 mut target: Vec<pending_op::PendingOps>,
10081) -> Vec<pending_op::PendingOps> {
10082 for s_ops in source {
10083 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
10084 if ops.repo_path == s_ops.repo_path {
10085 Some(idx)
10086 } else {
10087 None
10088 }
10089 }) {
10090 let t_ops = &mut target[idx];
10091 for s_op in s_ops.ops {
10092 if let Some(op_idx) = t_ops
10093 .ops
10094 .iter()
10095 .zip(0..)
10096 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10097 {
10098 let t_op = &mut t_ops.ops[op_idx];
10099 match (s_op.job_status, t_op.job_status) {
10100 (pending_op::JobStatus::Running, _) => {}
10101 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10102 (s_st, t_st) if s_st == t_st => {}
10103 _ => unreachable!(),
10104 }
10105 } else {
10106 t_ops.ops.push(s_op);
10107 }
10108 }
10109 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10110 } else {
10111 target.push(s_ops);
10112 }
10113 }
10114 target
10115}
10116
10117#[gpui::test]
10118async fn test_repository_pending_ops_staging(
10119 executor: gpui::BackgroundExecutor,
10120 cx: &mut gpui::TestAppContext,
10121) {
10122 init_test(cx);
10123
10124 let fs = FakeFs::new(executor);
10125 fs.insert_tree(
10126 path!("/root"),
10127 json!({
10128 "my-repo": {
10129 ".git": {},
10130 "a.txt": "a",
10131 }
10132
10133 }),
10134 )
10135 .await;
10136
10137 fs.set_status_for_repo(
10138 path!("/root/my-repo/.git").as_ref(),
10139 &[("a.txt", FileStatus::Untracked)],
10140 );
10141
10142 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10143 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10144 project.update(cx, |project, cx| {
10145 let pending_ops_all = pending_ops_all.clone();
10146 cx.subscribe(project.git_store(), move |_, _, e, _| {
10147 if let GitStoreEvent::RepositoryUpdated(
10148 _,
10149 RepositoryEvent::PendingOpsChanged { pending_ops },
10150 _,
10151 ) = e
10152 {
10153 let merged = merge_pending_ops_snapshots(
10154 pending_ops.items(()),
10155 pending_ops_all.lock().items(()),
10156 );
10157 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10158 }
10159 })
10160 .detach();
10161 });
10162 project
10163 .update(cx, |project, cx| project.git_scans_complete(cx))
10164 .await;
10165
10166 let repo = project.read_with(cx, |project, cx| {
10167 project.repositories(cx).values().next().unwrap().clone()
10168 });
10169
10170 // Ensure we have no pending ops for any of the untracked files
10171 repo.read_with(cx, |repo, _cx| {
10172 assert!(repo.pending_ops().next().is_none());
10173 });
10174
10175 let mut id = 1u16;
10176
10177 let mut assert_stage = async |path: RepoPath, stage| {
10178 let git_status = if stage {
10179 pending_op::GitStatus::Staged
10180 } else {
10181 pending_op::GitStatus::Unstaged
10182 };
10183 repo.update(cx, |repo, cx| {
10184 let task = if stage {
10185 repo.stage_entries(vec![path.clone()], cx)
10186 } else {
10187 repo.unstage_entries(vec![path.clone()], cx)
10188 };
10189 let ops = repo.pending_ops_for_path(&path).unwrap();
10190 assert_eq!(
10191 ops.ops.last(),
10192 Some(&pending_op::PendingOp {
10193 id: id.into(),
10194 git_status,
10195 job_status: pending_op::JobStatus::Running
10196 })
10197 );
10198 task
10199 })
10200 .await
10201 .unwrap();
10202
10203 repo.read_with(cx, |repo, _cx| {
10204 let ops = repo.pending_ops_for_path(&path).unwrap();
10205 assert_eq!(
10206 ops.ops.last(),
10207 Some(&pending_op::PendingOp {
10208 id: id.into(),
10209 git_status,
10210 job_status: pending_op::JobStatus::Finished
10211 })
10212 );
10213 });
10214
10215 id += 1;
10216 };
10217
10218 assert_stage(repo_path("a.txt"), true).await;
10219 assert_stage(repo_path("a.txt"), false).await;
10220 assert_stage(repo_path("a.txt"), true).await;
10221 assert_stage(repo_path("a.txt"), false).await;
10222 assert_stage(repo_path("a.txt"), true).await;
10223
10224 cx.run_until_parked();
10225
10226 assert_eq!(
10227 pending_ops_all
10228 .lock()
10229 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10230 .unwrap()
10231 .ops,
10232 vec![
10233 pending_op::PendingOp {
10234 id: 1u16.into(),
10235 git_status: pending_op::GitStatus::Staged,
10236 job_status: pending_op::JobStatus::Finished
10237 },
10238 pending_op::PendingOp {
10239 id: 2u16.into(),
10240 git_status: pending_op::GitStatus::Unstaged,
10241 job_status: pending_op::JobStatus::Finished
10242 },
10243 pending_op::PendingOp {
10244 id: 3u16.into(),
10245 git_status: pending_op::GitStatus::Staged,
10246 job_status: pending_op::JobStatus::Finished
10247 },
10248 pending_op::PendingOp {
10249 id: 4u16.into(),
10250 git_status: pending_op::GitStatus::Unstaged,
10251 job_status: pending_op::JobStatus::Finished
10252 },
10253 pending_op::PendingOp {
10254 id: 5u16.into(),
10255 git_status: pending_op::GitStatus::Staged,
10256 job_status: pending_op::JobStatus::Finished
10257 }
10258 ],
10259 );
10260
10261 repo.update(cx, |repo, _cx| {
10262 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10263
10264 assert_eq!(
10265 git_statuses,
10266 [StatusEntry {
10267 repo_path: repo_path("a.txt"),
10268 status: TrackedStatus {
10269 index_status: StatusCode::Added,
10270 worktree_status: StatusCode::Unmodified
10271 }
10272 .into(),
10273 diff_stat: Some(DiffStat {
10274 added: 1,
10275 deleted: 0,
10276 }),
10277 }]
10278 );
10279 });
10280}
10281
10282#[gpui::test]
10283async fn test_repository_pending_ops_long_running_staging(
10284 executor: gpui::BackgroundExecutor,
10285 cx: &mut gpui::TestAppContext,
10286) {
10287 init_test(cx);
10288
10289 let fs = FakeFs::new(executor);
10290 fs.insert_tree(
10291 path!("/root"),
10292 json!({
10293 "my-repo": {
10294 ".git": {},
10295 "a.txt": "a",
10296 }
10297
10298 }),
10299 )
10300 .await;
10301
10302 fs.set_status_for_repo(
10303 path!("/root/my-repo/.git").as_ref(),
10304 &[("a.txt", FileStatus::Untracked)],
10305 );
10306
10307 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10308 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10309 project.update(cx, |project, cx| {
10310 let pending_ops_all = pending_ops_all.clone();
10311 cx.subscribe(project.git_store(), move |_, _, e, _| {
10312 if let GitStoreEvent::RepositoryUpdated(
10313 _,
10314 RepositoryEvent::PendingOpsChanged { pending_ops },
10315 _,
10316 ) = e
10317 {
10318 let merged = merge_pending_ops_snapshots(
10319 pending_ops.items(()),
10320 pending_ops_all.lock().items(()),
10321 );
10322 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10323 }
10324 })
10325 .detach();
10326 });
10327
10328 project
10329 .update(cx, |project, cx| project.git_scans_complete(cx))
10330 .await;
10331
10332 let repo = project.read_with(cx, |project, cx| {
10333 project.repositories(cx).values().next().unwrap().clone()
10334 });
10335
10336 repo.update(cx, |repo, cx| {
10337 repo.stage_entries(vec![repo_path("a.txt")], cx)
10338 })
10339 .detach();
10340
10341 repo.update(cx, |repo, cx| {
10342 repo.stage_entries(vec![repo_path("a.txt")], cx)
10343 })
10344 .unwrap()
10345 .with_timeout(Duration::from_secs(1), &cx.executor())
10346 .await
10347 .unwrap();
10348
10349 cx.run_until_parked();
10350
10351 assert_eq!(
10352 pending_ops_all
10353 .lock()
10354 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10355 .unwrap()
10356 .ops,
10357 vec![
10358 pending_op::PendingOp {
10359 id: 1u16.into(),
10360 git_status: pending_op::GitStatus::Staged,
10361 job_status: pending_op::JobStatus::Skipped
10362 },
10363 pending_op::PendingOp {
10364 id: 2u16.into(),
10365 git_status: pending_op::GitStatus::Staged,
10366 job_status: pending_op::JobStatus::Finished
10367 }
10368 ],
10369 );
10370
10371 repo.update(cx, |repo, _cx| {
10372 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10373
10374 assert_eq!(
10375 git_statuses,
10376 [StatusEntry {
10377 repo_path: repo_path("a.txt"),
10378 status: TrackedStatus {
10379 index_status: StatusCode::Added,
10380 worktree_status: StatusCode::Unmodified
10381 }
10382 .into(),
10383 diff_stat: Some(DiffStat {
10384 added: 1,
10385 deleted: 0,
10386 }),
10387 }]
10388 );
10389 });
10390}
10391
10392#[gpui::test]
10393async fn test_repository_pending_ops_stage_all(
10394 executor: gpui::BackgroundExecutor,
10395 cx: &mut gpui::TestAppContext,
10396) {
10397 init_test(cx);
10398
10399 let fs = FakeFs::new(executor);
10400 fs.insert_tree(
10401 path!("/root"),
10402 json!({
10403 "my-repo": {
10404 ".git": {},
10405 "a.txt": "a",
10406 "b.txt": "b"
10407 }
10408
10409 }),
10410 )
10411 .await;
10412
10413 fs.set_status_for_repo(
10414 path!("/root/my-repo/.git").as_ref(),
10415 &[
10416 ("a.txt", FileStatus::Untracked),
10417 ("b.txt", FileStatus::Untracked),
10418 ],
10419 );
10420
10421 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10422 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10423 project.update(cx, |project, cx| {
10424 let pending_ops_all = pending_ops_all.clone();
10425 cx.subscribe(project.git_store(), move |_, _, e, _| {
10426 if let GitStoreEvent::RepositoryUpdated(
10427 _,
10428 RepositoryEvent::PendingOpsChanged { pending_ops },
10429 _,
10430 ) = e
10431 {
10432 let merged = merge_pending_ops_snapshots(
10433 pending_ops.items(()),
10434 pending_ops_all.lock().items(()),
10435 );
10436 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10437 }
10438 })
10439 .detach();
10440 });
10441 project
10442 .update(cx, |project, cx| project.git_scans_complete(cx))
10443 .await;
10444
10445 let repo = project.read_with(cx, |project, cx| {
10446 project.repositories(cx).values().next().unwrap().clone()
10447 });
10448
10449 repo.update(cx, |repo, cx| {
10450 repo.stage_entries(vec![repo_path("a.txt")], cx)
10451 })
10452 .await
10453 .unwrap();
10454 repo.update(cx, |repo, cx| repo.stage_all(cx))
10455 .await
10456 .unwrap();
10457 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10458 .await
10459 .unwrap();
10460
10461 cx.run_until_parked();
10462
10463 assert_eq!(
10464 pending_ops_all
10465 .lock()
10466 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10467 .unwrap()
10468 .ops,
10469 vec![
10470 pending_op::PendingOp {
10471 id: 1u16.into(),
10472 git_status: pending_op::GitStatus::Staged,
10473 job_status: pending_op::JobStatus::Finished
10474 },
10475 pending_op::PendingOp {
10476 id: 2u16.into(),
10477 git_status: pending_op::GitStatus::Unstaged,
10478 job_status: pending_op::JobStatus::Finished
10479 },
10480 ],
10481 );
10482 assert_eq!(
10483 pending_ops_all
10484 .lock()
10485 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10486 .unwrap()
10487 .ops,
10488 vec![
10489 pending_op::PendingOp {
10490 id: 1u16.into(),
10491 git_status: pending_op::GitStatus::Staged,
10492 job_status: pending_op::JobStatus::Finished
10493 },
10494 pending_op::PendingOp {
10495 id: 2u16.into(),
10496 git_status: pending_op::GitStatus::Unstaged,
10497 job_status: pending_op::JobStatus::Finished
10498 },
10499 ],
10500 );
10501
10502 repo.update(cx, |repo, _cx| {
10503 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10504
10505 assert_eq!(
10506 git_statuses,
10507 [
10508 StatusEntry {
10509 repo_path: repo_path("a.txt"),
10510 status: FileStatus::Untracked,
10511 diff_stat: None,
10512 },
10513 StatusEntry {
10514 repo_path: repo_path("b.txt"),
10515 status: FileStatus::Untracked,
10516 diff_stat: None,
10517 },
10518 ]
10519 );
10520 });
10521}
10522
10523#[gpui::test]
10524async fn test_repository_subfolder_git_status(
10525 executor: gpui::BackgroundExecutor,
10526 cx: &mut gpui::TestAppContext,
10527) {
10528 init_test(cx);
10529
10530 let fs = FakeFs::new(executor);
10531 fs.insert_tree(
10532 path!("/root"),
10533 json!({
10534 "my-repo": {
10535 ".git": {},
10536 "a.txt": "a",
10537 "sub-folder-1": {
10538 "sub-folder-2": {
10539 "c.txt": "cc",
10540 "d": {
10541 "e.txt": "eee"
10542 }
10543 },
10544 }
10545 },
10546 }),
10547 )
10548 .await;
10549
10550 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10551 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10552
10553 fs.set_status_for_repo(
10554 path!("/root/my-repo/.git").as_ref(),
10555 &[(E_TXT, FileStatus::Untracked)],
10556 );
10557
10558 let project = Project::test(
10559 fs.clone(),
10560 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10561 cx,
10562 )
10563 .await;
10564
10565 project
10566 .update(cx, |project, cx| project.git_scans_complete(cx))
10567 .await;
10568 cx.run_until_parked();
10569
10570 let repository = project.read_with(cx, |project, cx| {
10571 project.repositories(cx).values().next().unwrap().clone()
10572 });
10573
10574 // Ensure that the git status is loaded correctly
10575 repository.read_with(cx, |repository, _cx| {
10576 assert_eq!(
10577 repository.work_directory_abs_path,
10578 Path::new(path!("/root/my-repo")).into()
10579 );
10580
10581 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10582 assert_eq!(
10583 repository
10584 .status_for_path(&repo_path(E_TXT))
10585 .unwrap()
10586 .status,
10587 FileStatus::Untracked
10588 );
10589 });
10590
10591 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10592 project
10593 .update(cx, |project, cx| project.git_scans_complete(cx))
10594 .await;
10595 cx.run_until_parked();
10596
10597 repository.read_with(cx, |repository, _cx| {
10598 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10599 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10600 });
10601}
10602
10603// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10604#[cfg(any())]
10605#[gpui::test]
10606async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10607 init_test(cx);
10608 cx.executor().allow_parking();
10609
10610 let root = TempTree::new(json!({
10611 "project": {
10612 "a.txt": "a",
10613 },
10614 }));
10615 let root_path = root.path();
10616
10617 let repo = git_init(&root_path.join("project"));
10618 git_add("a.txt", &repo);
10619 git_commit("init", &repo);
10620
10621 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10622
10623 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10624 tree.flush_fs_events(cx).await;
10625 project
10626 .update(cx, |project, cx| project.git_scans_complete(cx))
10627 .await;
10628 cx.executor().run_until_parked();
10629
10630 let repository = project.read_with(cx, |project, cx| {
10631 project.repositories(cx).values().next().unwrap().clone()
10632 });
10633
10634 git_branch("other-branch", &repo);
10635 git_checkout("refs/heads/other-branch", &repo);
10636 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10637 git_add("a.txt", &repo);
10638 git_commit("capitalize", &repo);
10639 let commit = repo
10640 .head()
10641 .expect("Failed to get HEAD")
10642 .peel_to_commit()
10643 .expect("HEAD is not a commit");
10644 git_checkout("refs/heads/main", &repo);
10645 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10646 git_add("a.txt", &repo);
10647 git_commit("improve letter", &repo);
10648 git_cherry_pick(&commit, &repo);
10649 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10650 .expect("No CHERRY_PICK_HEAD");
10651 pretty_assertions::assert_eq!(
10652 git_status(&repo),
10653 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10654 );
10655 tree.flush_fs_events(cx).await;
10656 project
10657 .update(cx, |project, cx| project.git_scans_complete(cx))
10658 .await;
10659 cx.executor().run_until_parked();
10660 let conflicts = repository.update(cx, |repository, _| {
10661 repository
10662 .merge_conflicts
10663 .iter()
10664 .cloned()
10665 .collect::<Vec<_>>()
10666 });
10667 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10668
10669 git_add("a.txt", &repo);
10670 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10671 git_commit("whatevs", &repo);
10672 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10673 .expect("Failed to remove CHERRY_PICK_HEAD");
10674 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10675 tree.flush_fs_events(cx).await;
10676 let conflicts = repository.update(cx, |repository, _| {
10677 repository
10678 .merge_conflicts
10679 .iter()
10680 .cloned()
10681 .collect::<Vec<_>>()
10682 });
10683 pretty_assertions::assert_eq!(conflicts, []);
10684}
10685
10686#[gpui::test]
10687async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10688 init_test(cx);
10689 let fs = FakeFs::new(cx.background_executor.clone());
10690 fs.insert_tree(
10691 path!("/root"),
10692 json!({
10693 ".git": {},
10694 ".gitignore": "*.txt\n",
10695 "a.xml": "<a></a>",
10696 "b.txt": "Some text"
10697 }),
10698 )
10699 .await;
10700
10701 fs.set_head_and_index_for_repo(
10702 path!("/root/.git").as_ref(),
10703 &[
10704 (".gitignore", "*.txt\n".into()),
10705 ("a.xml", "<a></a>".into()),
10706 ],
10707 );
10708
10709 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10710
10711 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10712 tree.flush_fs_events(cx).await;
10713 project
10714 .update(cx, |project, cx| project.git_scans_complete(cx))
10715 .await;
10716 cx.executor().run_until_parked();
10717
10718 let repository = project.read_with(cx, |project, cx| {
10719 project.repositories(cx).values().next().unwrap().clone()
10720 });
10721
10722 // One file is unmodified, the other is ignored.
10723 cx.read(|cx| {
10724 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10725 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10726 });
10727
10728 // Change the gitignore, and stage the newly non-ignored file.
10729 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10730 .await
10731 .unwrap();
10732 fs.set_index_for_repo(
10733 Path::new(path!("/root/.git")),
10734 &[
10735 (".gitignore", "*.txt\n".into()),
10736 ("a.xml", "<a></a>".into()),
10737 ("b.txt", "Some text".into()),
10738 ],
10739 );
10740
10741 cx.executor().run_until_parked();
10742 cx.read(|cx| {
10743 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10744 assert_entry_git_state(
10745 tree.read(cx),
10746 repository.read(cx),
10747 "b.txt",
10748 Some(StatusCode::Added),
10749 false,
10750 );
10751 });
10752}
10753
10754// NOTE:
10755// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10756// a directory which some program has already open.
10757// This is a limitation of the Windows.
10758// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10759// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10760#[gpui::test]
10761#[cfg_attr(target_os = "windows", ignore)]
10762async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10763 init_test(cx);
10764 cx.executor().allow_parking();
10765 let root = TempTree::new(json!({
10766 "projects": {
10767 "project1": {
10768 "a": "",
10769 "b": "",
10770 }
10771 },
10772
10773 }));
10774 let root_path = root.path();
10775
10776 let repo = git_init(&root_path.join("projects/project1"));
10777 git_add("a", &repo);
10778 git_commit("init", &repo);
10779 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10780
10781 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10782
10783 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10784 tree.flush_fs_events(cx).await;
10785 project
10786 .update(cx, |project, cx| project.git_scans_complete(cx))
10787 .await;
10788 cx.executor().run_until_parked();
10789
10790 let repository = project.read_with(cx, |project, cx| {
10791 project.repositories(cx).values().next().unwrap().clone()
10792 });
10793
10794 repository.read_with(cx, |repository, _| {
10795 assert_eq!(
10796 repository.work_directory_abs_path.as_ref(),
10797 root_path.join("projects/project1").as_path()
10798 );
10799 assert_eq!(
10800 repository
10801 .status_for_path(&repo_path("a"))
10802 .map(|entry| entry.status),
10803 Some(StatusCode::Modified.worktree()),
10804 );
10805 assert_eq!(
10806 repository
10807 .status_for_path(&repo_path("b"))
10808 .map(|entry| entry.status),
10809 Some(FileStatus::Untracked),
10810 );
10811 });
10812
10813 std::fs::rename(
10814 root_path.join("projects/project1"),
10815 root_path.join("projects/project2"),
10816 )
10817 .unwrap();
10818 tree.flush_fs_events(cx).await;
10819
10820 repository.read_with(cx, |repository, _| {
10821 assert_eq!(
10822 repository.work_directory_abs_path.as_ref(),
10823 root_path.join("projects/project2").as_path()
10824 );
10825 assert_eq!(
10826 repository.status_for_path(&repo_path("a")).unwrap().status,
10827 StatusCode::Modified.worktree(),
10828 );
10829 assert_eq!(
10830 repository.status_for_path(&repo_path("b")).unwrap().status,
10831 FileStatus::Untracked,
10832 );
10833 });
10834}
10835
10836// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10837// you can't rename a directory which some program has already open. This is a
10838// limitation of the Windows. See:
10839// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10840// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10841#[gpui::test]
10842#[cfg_attr(target_os = "windows", ignore)]
10843async fn test_file_status(cx: &mut gpui::TestAppContext) {
10844 init_test(cx);
10845 cx.executor().allow_parking();
10846 const IGNORE_RULE: &str = "**/target";
10847
10848 let root = TempTree::new(json!({
10849 "project": {
10850 "a.txt": "a",
10851 "b.txt": "bb",
10852 "c": {
10853 "d": {
10854 "e.txt": "eee"
10855 }
10856 },
10857 "f.txt": "ffff",
10858 "target": {
10859 "build_file": "???"
10860 },
10861 ".gitignore": IGNORE_RULE
10862 },
10863
10864 }));
10865 let root_path = root.path();
10866
10867 const A_TXT: &str = "a.txt";
10868 const B_TXT: &str = "b.txt";
10869 const E_TXT: &str = "c/d/e.txt";
10870 const F_TXT: &str = "f.txt";
10871 const DOTGITIGNORE: &str = ".gitignore";
10872 const BUILD_FILE: &str = "target/build_file";
10873
10874 // Set up git repository before creating the worktree.
10875 let work_dir = root.path().join("project");
10876 let mut repo = git_init(work_dir.as_path());
10877 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10878 git_add(A_TXT, &repo);
10879 git_add(E_TXT, &repo);
10880 git_add(DOTGITIGNORE, &repo);
10881 git_commit("Initial commit", &repo);
10882
10883 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10884
10885 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10886 tree.flush_fs_events(cx).await;
10887 project
10888 .update(cx, |project, cx| project.git_scans_complete(cx))
10889 .await;
10890 cx.executor().run_until_parked();
10891
10892 let repository = project.read_with(cx, |project, cx| {
10893 project.repositories(cx).values().next().unwrap().clone()
10894 });
10895
10896 // Check that the right git state is observed on startup
10897 repository.read_with(cx, |repository, _cx| {
10898 assert_eq!(
10899 repository.work_directory_abs_path.as_ref(),
10900 root_path.join("project").as_path()
10901 );
10902
10903 assert_eq!(
10904 repository
10905 .status_for_path(&repo_path(B_TXT))
10906 .unwrap()
10907 .status,
10908 FileStatus::Untracked,
10909 );
10910 assert_eq!(
10911 repository
10912 .status_for_path(&repo_path(F_TXT))
10913 .unwrap()
10914 .status,
10915 FileStatus::Untracked,
10916 );
10917 });
10918
10919 // Modify a file in the working copy.
10920 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10921 tree.flush_fs_events(cx).await;
10922 project
10923 .update(cx, |project, cx| project.git_scans_complete(cx))
10924 .await;
10925 cx.executor().run_until_parked();
10926
10927 // The worktree detects that the file's git status has changed.
10928 repository.read_with(cx, |repository, _| {
10929 assert_eq!(
10930 repository
10931 .status_for_path(&repo_path(A_TXT))
10932 .unwrap()
10933 .status,
10934 StatusCode::Modified.worktree(),
10935 );
10936 });
10937
10938 // Create a commit in the git repository.
10939 git_add(A_TXT, &repo);
10940 git_add(B_TXT, &repo);
10941 git_commit("Committing modified and added", &repo);
10942 tree.flush_fs_events(cx).await;
10943 project
10944 .update(cx, |project, cx| project.git_scans_complete(cx))
10945 .await;
10946 cx.executor().run_until_parked();
10947
10948 // The worktree detects that the files' git status have changed.
10949 repository.read_with(cx, |repository, _cx| {
10950 assert_eq!(
10951 repository
10952 .status_for_path(&repo_path(F_TXT))
10953 .unwrap()
10954 .status,
10955 FileStatus::Untracked,
10956 );
10957 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10958 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10959 });
10960
10961 // Modify files in the working copy and perform git operations on other files.
10962 git_reset(0, &repo);
10963 git_remove_index(Path::new(B_TXT), &repo);
10964 git_stash(&mut repo);
10965 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10966 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10967 tree.flush_fs_events(cx).await;
10968 project
10969 .update(cx, |project, cx| project.git_scans_complete(cx))
10970 .await;
10971 cx.executor().run_until_parked();
10972
10973 // Check that more complex repo changes are tracked
10974 repository.read_with(cx, |repository, _cx| {
10975 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10976 assert_eq!(
10977 repository
10978 .status_for_path(&repo_path(B_TXT))
10979 .unwrap()
10980 .status,
10981 FileStatus::Untracked,
10982 );
10983 assert_eq!(
10984 repository
10985 .status_for_path(&repo_path(E_TXT))
10986 .unwrap()
10987 .status,
10988 StatusCode::Modified.worktree(),
10989 );
10990 });
10991
10992 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10993 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10994 std::fs::write(
10995 work_dir.join(DOTGITIGNORE),
10996 [IGNORE_RULE, "f.txt"].join("\n"),
10997 )
10998 .unwrap();
10999
11000 git_add(Path::new(DOTGITIGNORE), &repo);
11001 git_commit("Committing modified git ignore", &repo);
11002
11003 tree.flush_fs_events(cx).await;
11004 cx.executor().run_until_parked();
11005
11006 let mut renamed_dir_name = "first_directory/second_directory";
11007 const RENAMED_FILE: &str = "rf.txt";
11008
11009 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
11010 std::fs::write(
11011 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
11012 "new-contents",
11013 )
11014 .unwrap();
11015
11016 tree.flush_fs_events(cx).await;
11017 project
11018 .update(cx, |project, cx| project.git_scans_complete(cx))
11019 .await;
11020 cx.executor().run_until_parked();
11021
11022 repository.read_with(cx, |repository, _cx| {
11023 assert_eq!(
11024 repository
11025 .status_for_path(&RepoPath::from_rel_path(
11026 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11027 ))
11028 .unwrap()
11029 .status,
11030 FileStatus::Untracked,
11031 );
11032 });
11033
11034 renamed_dir_name = "new_first_directory/second_directory";
11035
11036 std::fs::rename(
11037 work_dir.join("first_directory"),
11038 work_dir.join("new_first_directory"),
11039 )
11040 .unwrap();
11041
11042 tree.flush_fs_events(cx).await;
11043 project
11044 .update(cx, |project, cx| project.git_scans_complete(cx))
11045 .await;
11046 cx.executor().run_until_parked();
11047
11048 repository.read_with(cx, |repository, _cx| {
11049 assert_eq!(
11050 repository
11051 .status_for_path(&RepoPath::from_rel_path(
11052 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11053 ))
11054 .unwrap()
11055 .status,
11056 FileStatus::Untracked,
11057 );
11058 });
11059}
11060
11061#[gpui::test]
11062#[ignore]
11063async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
11064 init_test(cx);
11065 cx.executor().allow_parking();
11066
11067 const IGNORE_RULE: &str = "**/target";
11068
11069 let root = TempTree::new(json!({
11070 "project": {
11071 "src": {
11072 "main.rs": "fn main() {}"
11073 },
11074 "target": {
11075 "debug": {
11076 "important_text.txt": "important text",
11077 },
11078 },
11079 ".gitignore": IGNORE_RULE
11080 },
11081
11082 }));
11083 let root_path = root.path();
11084
11085 // Set up git repository before creating the worktree.
11086 let work_dir = root.path().join("project");
11087 let repo = git_init(work_dir.as_path());
11088 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11089 git_add("src/main.rs", &repo);
11090 git_add(".gitignore", &repo);
11091 git_commit("Initial commit", &repo);
11092
11093 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11094 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11095 let project_events = Arc::new(Mutex::new(Vec::new()));
11096 project.update(cx, |project, cx| {
11097 let repo_events = repository_updates.clone();
11098 cx.subscribe(project.git_store(), move |_, _, e, _| {
11099 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11100 repo_events.lock().push(e.clone());
11101 }
11102 })
11103 .detach();
11104 let project_events = project_events.clone();
11105 cx.subscribe_self(move |_, e, _| {
11106 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11107 project_events.lock().extend(
11108 updates
11109 .iter()
11110 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11111 .filter(|(path, _)| path != "fs-event-sentinel"),
11112 );
11113 }
11114 })
11115 .detach();
11116 });
11117
11118 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11119 tree.flush_fs_events(cx).await;
11120 tree.update(cx, |tree, cx| {
11121 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11122 })
11123 .await
11124 .unwrap();
11125 tree.update(cx, |tree, _| {
11126 assert_eq!(
11127 tree.entries(true, 0)
11128 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11129 .collect::<Vec<_>>(),
11130 vec![
11131 (rel_path(""), false),
11132 (rel_path("project/"), false),
11133 (rel_path("project/.gitignore"), false),
11134 (rel_path("project/src"), false),
11135 (rel_path("project/src/main.rs"), false),
11136 (rel_path("project/target"), true),
11137 (rel_path("project/target/debug"), true),
11138 (rel_path("project/target/debug/important_text.txt"), true),
11139 ]
11140 );
11141 });
11142
11143 assert_eq!(
11144 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11145 vec![RepositoryEvent::StatusesChanged,],
11146 "Initial worktree scan should produce a repo update event"
11147 );
11148 assert_eq!(
11149 project_events.lock().drain(..).collect::<Vec<_>>(),
11150 vec![
11151 ("project/target".to_string(), PathChange::Loaded),
11152 ("project/target/debug".to_string(), PathChange::Loaded),
11153 (
11154 "project/target/debug/important_text.txt".to_string(),
11155 PathChange::Loaded
11156 ),
11157 ],
11158 "Initial project changes should show that all not-ignored and all opened files are loaded"
11159 );
11160
11161 let deps_dir = work_dir.join("target").join("debug").join("deps");
11162 std::fs::create_dir_all(&deps_dir).unwrap();
11163 tree.flush_fs_events(cx).await;
11164 project
11165 .update(cx, |project, cx| project.git_scans_complete(cx))
11166 .await;
11167 cx.executor().run_until_parked();
11168 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11169 tree.flush_fs_events(cx).await;
11170 project
11171 .update(cx, |project, cx| project.git_scans_complete(cx))
11172 .await;
11173 cx.executor().run_until_parked();
11174 std::fs::remove_dir_all(&deps_dir).unwrap();
11175 tree.flush_fs_events(cx).await;
11176 project
11177 .update(cx, |project, cx| project.git_scans_complete(cx))
11178 .await;
11179 cx.executor().run_until_parked();
11180
11181 tree.update(cx, |tree, _| {
11182 assert_eq!(
11183 tree.entries(true, 0)
11184 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11185 .collect::<Vec<_>>(),
11186 vec![
11187 (rel_path(""), false),
11188 (rel_path("project/"), false),
11189 (rel_path("project/.gitignore"), false),
11190 (rel_path("project/src"), false),
11191 (rel_path("project/src/main.rs"), false),
11192 (rel_path("project/target"), true),
11193 (rel_path("project/target/debug"), true),
11194 (rel_path("project/target/debug/important_text.txt"), true),
11195 ],
11196 "No stray temp files should be left after the flycheck changes"
11197 );
11198 });
11199
11200 assert_eq!(
11201 repository_updates
11202 .lock()
11203 .iter()
11204 .cloned()
11205 .collect::<Vec<_>>(),
11206 Vec::new(),
11207 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11208 );
11209 assert_eq!(
11210 project_events.lock().as_slice(),
11211 vec![
11212 ("project/target/debug/deps".to_string(), PathChange::Added),
11213 ("project/target/debug/deps".to_string(), PathChange::Removed),
11214 ],
11215 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11216 No updates for more nested directories should happen as those are ignored",
11217 );
11218}
11219
11220// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11221// to different timings/ordering of events.
11222#[ignore]
11223#[gpui::test]
11224async fn test_odd_events_for_ignored_dirs(
11225 executor: BackgroundExecutor,
11226 cx: &mut gpui::TestAppContext,
11227) {
11228 init_test(cx);
11229 let fs = FakeFs::new(executor);
11230 fs.insert_tree(
11231 path!("/root"),
11232 json!({
11233 ".git": {},
11234 ".gitignore": "**/target/",
11235 "src": {
11236 "main.rs": "fn main() {}",
11237 },
11238 "target": {
11239 "debug": {
11240 "foo.txt": "foo",
11241 "deps": {}
11242 }
11243 }
11244 }),
11245 )
11246 .await;
11247 fs.set_head_and_index_for_repo(
11248 path!("/root/.git").as_ref(),
11249 &[
11250 (".gitignore", "**/target/".into()),
11251 ("src/main.rs", "fn main() {}".into()),
11252 ],
11253 );
11254
11255 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11256 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11257 let project_events = Arc::new(Mutex::new(Vec::new()));
11258 project.update(cx, |project, cx| {
11259 let repository_updates = repository_updates.clone();
11260 cx.subscribe(project.git_store(), move |_, _, e, _| {
11261 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11262 repository_updates.lock().push(e.clone());
11263 }
11264 })
11265 .detach();
11266 let project_events = project_events.clone();
11267 cx.subscribe_self(move |_, e, _| {
11268 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11269 project_events.lock().extend(
11270 updates
11271 .iter()
11272 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11273 .filter(|(path, _)| path != "fs-event-sentinel"),
11274 );
11275 }
11276 })
11277 .detach();
11278 });
11279
11280 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11281 tree.update(cx, |tree, cx| {
11282 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11283 })
11284 .await
11285 .unwrap();
11286 tree.flush_fs_events(cx).await;
11287 project
11288 .update(cx, |project, cx| project.git_scans_complete(cx))
11289 .await;
11290 cx.run_until_parked();
11291 tree.update(cx, |tree, _| {
11292 assert_eq!(
11293 tree.entries(true, 0)
11294 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11295 .collect::<Vec<_>>(),
11296 vec![
11297 (rel_path(""), false),
11298 (rel_path(".gitignore"), false),
11299 (rel_path("src"), false),
11300 (rel_path("src/main.rs"), false),
11301 (rel_path("target"), true),
11302 (rel_path("target/debug"), true),
11303 (rel_path("target/debug/deps"), true),
11304 (rel_path("target/debug/foo.txt"), true),
11305 ]
11306 );
11307 });
11308
11309 assert_eq!(
11310 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11311 vec![
11312 RepositoryEvent::HeadChanged,
11313 RepositoryEvent::StatusesChanged,
11314 RepositoryEvent::StatusesChanged,
11315 ],
11316 "Initial worktree scan should produce a repo update event"
11317 );
11318 assert_eq!(
11319 project_events.lock().drain(..).collect::<Vec<_>>(),
11320 vec![
11321 ("target".to_string(), PathChange::Loaded),
11322 ("target/debug".to_string(), PathChange::Loaded),
11323 ("target/debug/deps".to_string(), PathChange::Loaded),
11324 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11325 ],
11326 "All non-ignored entries and all opened firs should be getting a project event",
11327 );
11328
11329 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11330 // This may happen multiple times during a single flycheck, but once is enough for testing.
11331 fs.emit_fs_event("/root/target/debug/deps", None);
11332 tree.flush_fs_events(cx).await;
11333 project
11334 .update(cx, |project, cx| project.git_scans_complete(cx))
11335 .await;
11336 cx.executor().run_until_parked();
11337
11338 assert_eq!(
11339 repository_updates
11340 .lock()
11341 .iter()
11342 .cloned()
11343 .collect::<Vec<_>>(),
11344 Vec::new(),
11345 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11346 );
11347 assert_eq!(
11348 project_events.lock().as_slice(),
11349 Vec::new(),
11350 "No further project events should happen, as only ignored dirs received FS events",
11351 );
11352}
11353
11354#[gpui::test]
11355async fn test_repos_in_invisible_worktrees(
11356 executor: BackgroundExecutor,
11357 cx: &mut gpui::TestAppContext,
11358) {
11359 init_test(cx);
11360 let fs = FakeFs::new(executor);
11361 fs.insert_tree(
11362 path!("/root"),
11363 json!({
11364 "dir1": {
11365 ".git": {},
11366 "dep1": {
11367 ".git": {},
11368 "src": {
11369 "a.txt": "",
11370 },
11371 },
11372 "b.txt": "",
11373 },
11374 }),
11375 )
11376 .await;
11377
11378 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11379 let _visible_worktree =
11380 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11381 project
11382 .update(cx, |project, cx| project.git_scans_complete(cx))
11383 .await;
11384
11385 let repos = project.read_with(cx, |project, cx| {
11386 project
11387 .repositories(cx)
11388 .values()
11389 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11390 .collect::<Vec<_>>()
11391 });
11392 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11393
11394 let (_invisible_worktree, _) = project
11395 .update(cx, |project, cx| {
11396 project.worktree_store().update(cx, |worktree_store, cx| {
11397 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11398 })
11399 })
11400 .await
11401 .expect("failed to create worktree");
11402 project
11403 .update(cx, |project, cx| project.git_scans_complete(cx))
11404 .await;
11405
11406 let repos = project.read_with(cx, |project, cx| {
11407 project
11408 .repositories(cx)
11409 .values()
11410 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11411 .collect::<Vec<_>>()
11412 });
11413 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11414}
11415
11416#[gpui::test(iterations = 10)]
11417async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11418 init_test(cx);
11419 cx.update(|cx| {
11420 cx.update_global::<SettingsStore, _>(|store, cx| {
11421 store.update_user_settings(cx, |settings| {
11422 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11423 });
11424 });
11425 });
11426 let fs = FakeFs::new(cx.background_executor.clone());
11427 fs.insert_tree(
11428 path!("/root"),
11429 json!({
11430 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11431 "tree": {
11432 ".git": {},
11433 ".gitignore": "ignored-dir\n",
11434 "tracked-dir": {
11435 "tracked-file1": "",
11436 "ancestor-ignored-file1": "",
11437 },
11438 "ignored-dir": {
11439 "ignored-file1": ""
11440 }
11441 }
11442 }),
11443 )
11444 .await;
11445 fs.set_head_and_index_for_repo(
11446 path!("/root/tree/.git").as_ref(),
11447 &[
11448 (".gitignore", "ignored-dir\n".into()),
11449 ("tracked-dir/tracked-file1", "".into()),
11450 ],
11451 );
11452
11453 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11454
11455 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11456 tree.flush_fs_events(cx).await;
11457 project
11458 .update(cx, |project, cx| project.git_scans_complete(cx))
11459 .await;
11460 cx.executor().run_until_parked();
11461
11462 let repository = project.read_with(cx, |project, cx| {
11463 project.repositories(cx).values().next().unwrap().clone()
11464 });
11465
11466 tree.read_with(cx, |tree, _| {
11467 tree.as_local()
11468 .unwrap()
11469 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11470 })
11471 .recv()
11472 .await;
11473
11474 cx.read(|cx| {
11475 assert_entry_git_state(
11476 tree.read(cx),
11477 repository.read(cx),
11478 "tracked-dir/tracked-file1",
11479 None,
11480 false,
11481 );
11482 assert_entry_git_state(
11483 tree.read(cx),
11484 repository.read(cx),
11485 "tracked-dir/ancestor-ignored-file1",
11486 None,
11487 false,
11488 );
11489 assert_entry_git_state(
11490 tree.read(cx),
11491 repository.read(cx),
11492 "ignored-dir/ignored-file1",
11493 None,
11494 true,
11495 );
11496 });
11497
11498 fs.create_file(
11499 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11500 Default::default(),
11501 )
11502 .await
11503 .unwrap();
11504 fs.set_index_for_repo(
11505 path!("/root/tree/.git").as_ref(),
11506 &[
11507 (".gitignore", "ignored-dir\n".into()),
11508 ("tracked-dir/tracked-file1", "".into()),
11509 ("tracked-dir/tracked-file2", "".into()),
11510 ],
11511 );
11512 fs.create_file(
11513 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11514 Default::default(),
11515 )
11516 .await
11517 .unwrap();
11518 fs.create_file(
11519 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11520 Default::default(),
11521 )
11522 .await
11523 .unwrap();
11524
11525 cx.executor().run_until_parked();
11526 cx.read(|cx| {
11527 assert_entry_git_state(
11528 tree.read(cx),
11529 repository.read(cx),
11530 "tracked-dir/tracked-file2",
11531 Some(StatusCode::Added),
11532 false,
11533 );
11534 assert_entry_git_state(
11535 tree.read(cx),
11536 repository.read(cx),
11537 "tracked-dir/ancestor-ignored-file2",
11538 None,
11539 false,
11540 );
11541 assert_entry_git_state(
11542 tree.read(cx),
11543 repository.read(cx),
11544 "ignored-dir/ignored-file2",
11545 None,
11546 true,
11547 );
11548 assert!(
11549 tree.read(cx)
11550 .entry_for_path(&rel_path(".git"))
11551 .unwrap()
11552 .is_ignored
11553 );
11554 });
11555}
11556
11557#[gpui::test]
11558async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11559 init_test(cx);
11560
11561 let fs = FakeFs::new(cx.executor());
11562 fs.insert_tree(
11563 path!("/project"),
11564 json!({
11565 ".git": {
11566 "worktrees": {
11567 "some-worktree": {
11568 "commondir": "../..\n",
11569 // For is_git_dir
11570 "HEAD": "",
11571 "config": ""
11572 }
11573 },
11574 "modules": {
11575 "subdir": {
11576 "some-submodule": {
11577 // For is_git_dir
11578 "HEAD": "",
11579 "config": "",
11580 }
11581 }
11582 }
11583 },
11584 "src": {
11585 "a.txt": "A",
11586 },
11587 "some-worktree": {
11588 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11589 "src": {
11590 "b.txt": "B",
11591 }
11592 },
11593 "subdir": {
11594 "some-submodule": {
11595 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11596 "c.txt": "C",
11597 }
11598 }
11599 }),
11600 )
11601 .await;
11602
11603 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11604 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11605 scan_complete.await;
11606
11607 let mut repositories = project.update(cx, |project, cx| {
11608 project
11609 .repositories(cx)
11610 .values()
11611 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11612 .collect::<Vec<_>>()
11613 });
11614 repositories.sort();
11615 pretty_assertions::assert_eq!(
11616 repositories,
11617 [
11618 Path::new(path!("/project")).into(),
11619 Path::new(path!("/project/some-worktree")).into(),
11620 Path::new(path!("/project/subdir/some-submodule")).into(),
11621 ]
11622 );
11623
11624 // Generate a git-related event for the worktree and check that it's refreshed.
11625 fs.with_git_state(
11626 path!("/project/some-worktree/.git").as_ref(),
11627 true,
11628 |state| {
11629 state
11630 .head_contents
11631 .insert(repo_path("src/b.txt"), "b".to_owned());
11632 state
11633 .index_contents
11634 .insert(repo_path("src/b.txt"), "b".to_owned());
11635 },
11636 )
11637 .unwrap();
11638 cx.run_until_parked();
11639
11640 let buffer = project
11641 .update(cx, |project, cx| {
11642 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11643 })
11644 .await
11645 .unwrap();
11646 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11647 let (repo, _) = project
11648 .git_store()
11649 .read(cx)
11650 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11651 .unwrap();
11652 pretty_assertions::assert_eq!(
11653 repo.read(cx).work_directory_abs_path,
11654 Path::new(path!("/project/some-worktree")).into(),
11655 );
11656 pretty_assertions::assert_eq!(
11657 repo.read(cx).original_repo_abs_path,
11658 Path::new(path!("/project")).into(),
11659 );
11660 assert!(
11661 repo.read(cx).linked_worktree_path().is_some(),
11662 "linked worktree should be detected as a linked worktree"
11663 );
11664 let barrier = repo.update(cx, |repo, _| repo.barrier());
11665 (repo.clone(), barrier)
11666 });
11667 barrier.await.unwrap();
11668 worktree_repo.update(cx, |repo, _| {
11669 pretty_assertions::assert_eq!(
11670 repo.status_for_path(&repo_path("src/b.txt"))
11671 .unwrap()
11672 .status,
11673 StatusCode::Modified.worktree(),
11674 );
11675 });
11676
11677 // The same for the submodule.
11678 fs.with_git_state(
11679 path!("/project/subdir/some-submodule/.git").as_ref(),
11680 true,
11681 |state| {
11682 state
11683 .head_contents
11684 .insert(repo_path("c.txt"), "c".to_owned());
11685 state
11686 .index_contents
11687 .insert(repo_path("c.txt"), "c".to_owned());
11688 },
11689 )
11690 .unwrap();
11691 cx.run_until_parked();
11692
11693 let buffer = project
11694 .update(cx, |project, cx| {
11695 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11696 })
11697 .await
11698 .unwrap();
11699 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11700 let (repo, _) = project
11701 .git_store()
11702 .read(cx)
11703 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11704 .unwrap();
11705 pretty_assertions::assert_eq!(
11706 repo.read(cx).work_directory_abs_path,
11707 Path::new(path!("/project/subdir/some-submodule")).into(),
11708 );
11709 pretty_assertions::assert_eq!(
11710 repo.read(cx).original_repo_abs_path,
11711 Path::new(path!("/project/subdir/some-submodule")).into(),
11712 );
11713 assert!(
11714 repo.read(cx).linked_worktree_path().is_none(),
11715 "submodule should not be detected as a linked worktree"
11716 );
11717 let barrier = repo.update(cx, |repo, _| repo.barrier());
11718 (repo.clone(), barrier)
11719 });
11720 barrier.await.unwrap();
11721 submodule_repo.update(cx, |repo, _| {
11722 pretty_assertions::assert_eq!(
11723 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11724 StatusCode::Modified.worktree(),
11725 );
11726 });
11727}
11728
11729#[gpui::test]
11730async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11731 init_test(cx);
11732 let fs = FakeFs::new(cx.background_executor.clone());
11733 fs.insert_tree(
11734 path!("/root"),
11735 json!({
11736 "project": {
11737 ".git": {},
11738 "child1": {
11739 "a.txt": "A",
11740 },
11741 "child2": {
11742 "b.txt": "B",
11743 }
11744 }
11745 }),
11746 )
11747 .await;
11748
11749 let project = Project::test(
11750 fs.clone(),
11751 [
11752 path!("/root/project/child1").as_ref(),
11753 path!("/root/project/child2").as_ref(),
11754 ],
11755 cx,
11756 )
11757 .await;
11758
11759 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11760 tree.flush_fs_events(cx).await;
11761 project
11762 .update(cx, |project, cx| project.git_scans_complete(cx))
11763 .await;
11764 cx.executor().run_until_parked();
11765
11766 let repos = project.read_with(cx, |project, cx| {
11767 project
11768 .repositories(cx)
11769 .values()
11770 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11771 .collect::<Vec<_>>()
11772 });
11773 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11774}
11775
11776#[gpui::test]
11777async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11778 init_test(cx);
11779
11780 let file_1_committed = String::from(r#"file_1_committed"#);
11781 let file_1_staged = String::from(r#"file_1_staged"#);
11782 let file_2_committed = String::from(r#"file_2_committed"#);
11783 let file_2_staged = String::from(r#"file_2_staged"#);
11784 let buffer_contents = String::from(r#"buffer"#);
11785
11786 let fs = FakeFs::new(cx.background_executor.clone());
11787 fs.insert_tree(
11788 path!("/dir"),
11789 json!({
11790 ".git": {},
11791 "src": {
11792 "file_1.rs": file_1_committed.clone(),
11793 "file_2.rs": file_2_committed.clone(),
11794 }
11795 }),
11796 )
11797 .await;
11798
11799 fs.set_head_for_repo(
11800 path!("/dir/.git").as_ref(),
11801 &[
11802 ("src/file_1.rs", file_1_committed.clone()),
11803 ("src/file_2.rs", file_2_committed.clone()),
11804 ],
11805 "deadbeef",
11806 );
11807 fs.set_index_for_repo(
11808 path!("/dir/.git").as_ref(),
11809 &[
11810 ("src/file_1.rs", file_1_staged.clone()),
11811 ("src/file_2.rs", file_2_staged.clone()),
11812 ],
11813 );
11814
11815 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11816
11817 let buffer = project
11818 .update(cx, |project, cx| {
11819 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11820 })
11821 .await
11822 .unwrap();
11823
11824 buffer.update(cx, |buffer, cx| {
11825 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11826 });
11827
11828 let unstaged_diff = project
11829 .update(cx, |project, cx| {
11830 project.open_unstaged_diff(buffer.clone(), cx)
11831 })
11832 .await
11833 .unwrap();
11834
11835 cx.run_until_parked();
11836
11837 unstaged_diff.update(cx, |unstaged_diff, cx| {
11838 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11839 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11840 });
11841
11842 // Save the buffer as `file_2.rs`, which should trigger the
11843 // `BufferChangedFilePath` event.
11844 project
11845 .update(cx, |project, cx| {
11846 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11847 let path = ProjectPath {
11848 worktree_id,
11849 path: rel_path("src/file_2.rs").into(),
11850 };
11851 project.save_buffer_as(buffer.clone(), path, cx)
11852 })
11853 .await
11854 .unwrap();
11855
11856 cx.run_until_parked();
11857
11858 // Verify that the diff bases have been updated to file_2's contents due to
11859 // the `BufferChangedFilePath` event being handled.
11860 unstaged_diff.update(cx, |unstaged_diff, cx| {
11861 let snapshot = buffer.read(cx).snapshot();
11862 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11863 assert_eq!(
11864 base_text, file_2_staged,
11865 "Diff bases should be automatically updated to file_2 staged content"
11866 );
11867
11868 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11869 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11870 });
11871
11872 let uncommitted_diff = project
11873 .update(cx, |project, cx| {
11874 project.open_uncommitted_diff(buffer.clone(), cx)
11875 })
11876 .await
11877 .unwrap();
11878
11879 cx.run_until_parked();
11880
11881 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11882 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11883 assert_eq!(
11884 base_text, file_2_committed,
11885 "Uncommitted diff should compare against file_2 committed content"
11886 );
11887 });
11888}
11889
11890async fn search(
11891 project: &Entity<Project>,
11892 query: SearchQuery,
11893 cx: &mut gpui::TestAppContext,
11894) -> Result<HashMap<String, Vec<Range<usize>>>> {
11895 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11896 let mut results = HashMap::default();
11897 while let Ok(search_result) = search_rx.rx.recv().await {
11898 match search_result {
11899 SearchResult::Buffer { buffer, ranges } => {
11900 results.entry(buffer).or_insert(ranges);
11901 }
11902 SearchResult::LimitReached => {}
11903 }
11904 }
11905 Ok(results
11906 .into_iter()
11907 .map(|(buffer, ranges)| {
11908 buffer.update(cx, |buffer, cx| {
11909 let path = buffer
11910 .file()
11911 .unwrap()
11912 .full_path(cx)
11913 .to_string_lossy()
11914 .to_string();
11915 let ranges = ranges
11916 .into_iter()
11917 .map(|range| range.to_offset(buffer))
11918 .collect::<Vec<_>>();
11919 (path, ranges)
11920 })
11921 })
11922 .collect())
11923}
11924
11925#[gpui::test]
11926async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11927 init_test(cx);
11928
11929 let fs = FakeFs::new(cx.executor());
11930
11931 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11932 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11933 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11934 fs.insert_tree(path!("/dir"), json!({})).await;
11935 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11936
11937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11938
11939 let buffer = project
11940 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11941 .await
11942 .unwrap();
11943
11944 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11945 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11946 });
11947 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11948 assert_eq!(initial_text, "Hi");
11949 assert!(!initial_dirty);
11950
11951 let reload_receiver = buffer.update(cx, |buffer, cx| {
11952 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11953 });
11954 cx.executor().run_until_parked();
11955
11956 // Wait for reload to complete
11957 let _ = reload_receiver.await;
11958
11959 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11960 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11961 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11962 });
11963 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11964 assert_eq!(reloaded_text, "楈");
11965 assert!(!reloaded_dirty);
11966
11967 // Undo the reload
11968 buffer.update(cx, |buffer, cx| {
11969 buffer.undo(cx);
11970 });
11971
11972 buffer.read_with(cx, |buffer, _| {
11973 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11974 assert_eq!(buffer.text(), "Hi");
11975 assert!(!buffer.is_dirty());
11976 });
11977
11978 buffer.update(cx, |buffer, cx| {
11979 buffer.redo(cx);
11980 });
11981
11982 buffer.read_with(cx, |buffer, _| {
11983 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11984 assert_ne!(buffer.text(), "Hi");
11985 assert!(!buffer.is_dirty());
11986 });
11987}
11988
11989#[gpui::test]
11990async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11991 init_test(cx);
11992
11993 let fs = FakeFs::new(cx.executor());
11994 fs.insert_tree(
11995 path!("/root"),
11996 json!({
11997 "a": {
11998 ".git": {},
11999 ".zed": {
12000 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
12001 },
12002 "src": { "main.rs": "" }
12003 },
12004 "b": {
12005 ".git": {},
12006 ".zed": {
12007 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
12008 },
12009 "src": { "lib.rs": "" }
12010 },
12011 }),
12012 )
12013 .await;
12014
12015 let repos_created = Rc::new(RefCell::new(Vec::new()));
12016 let _observe = {
12017 let repos_created = repos_created.clone();
12018 cx.update(|cx| {
12019 cx.observe_new::<Repository>(move |repo, _, cx| {
12020 repos_created.borrow_mut().push(cx.entity().downgrade());
12021 let _ = repo;
12022 })
12023 })
12024 };
12025
12026 let project = Project::test(
12027 fs.clone(),
12028 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
12029 cx,
12030 )
12031 .await;
12032
12033 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
12034 scan_complete.await;
12035
12036 project.read_with(cx, |project, cx| {
12037 assert!(
12038 project.worktree_store().read(cx).initial_scan_completed(),
12039 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
12040 );
12041 });
12042
12043 let created_repos_len = repos_created.borrow().len();
12044 assert_eq!(
12045 created_repos_len, 2,
12046 "Expected 2 repositories to be created during scan, got {}",
12047 created_repos_len
12048 );
12049
12050 project.read_with(cx, |project, cx| {
12051 let git_store = project.git_store().read(cx);
12052 assert_eq!(
12053 git_store.repositories().len(),
12054 2,
12055 "Expected 2 repositories in GitStore"
12056 );
12057 });
12058}
12059
12060pub fn init_test(cx: &mut gpui::TestAppContext) {
12061 zlog::init_test();
12062
12063 cx.update(|cx| {
12064 let settings_store = SettingsStore::test(cx);
12065 cx.set_global(settings_store);
12066 release_channel::init(semver::Version::new(0, 0, 0), cx);
12067 });
12068}
12069
12070fn json_lang() -> Arc<Language> {
12071 Arc::new(Language::new(
12072 LanguageConfig {
12073 name: "JSON".into(),
12074 matcher: LanguageMatcher {
12075 path_suffixes: vec!["json".to_string()],
12076 ..Default::default()
12077 },
12078 ..Default::default()
12079 },
12080 None,
12081 ))
12082}
12083
12084fn js_lang() -> Arc<Language> {
12085 Arc::new(Language::new(
12086 LanguageConfig {
12087 name: "JavaScript".into(),
12088 matcher: LanguageMatcher {
12089 path_suffixes: vec!["js".to_string()],
12090 ..Default::default()
12091 },
12092 ..Default::default()
12093 },
12094 None,
12095 ))
12096}
12097
12098fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
12099 struct PythonMootToolchainLister(Arc<FakeFs>);
12100 #[async_trait]
12101 impl ToolchainLister for PythonMootToolchainLister {
12102 async fn list(
12103 &self,
12104 worktree_root: PathBuf,
12105 subroot_relative_path: Arc<RelPath>,
12106 _: Option<HashMap<String, String>>,
12107 ) -> ToolchainList {
12108 // This lister will always return a path .venv directories within ancestors
12109 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
12110 let mut toolchains = vec![];
12111 for ancestor in ancestors {
12112 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
12113 if self.0.is_dir(&venv_path).await {
12114 toolchains.push(Toolchain {
12115 name: SharedString::new_static("Python Venv"),
12116 path: venv_path.to_string_lossy().into_owned().into(),
12117 language_name: LanguageName(SharedString::new_static("Python")),
12118 as_json: serde_json::Value::Null,
12119 })
12120 }
12121 }
12122 ToolchainList {
12123 toolchains,
12124 ..Default::default()
12125 }
12126 }
12127 async fn resolve(
12128 &self,
12129 _: PathBuf,
12130 _: Option<HashMap<String, String>>,
12131 ) -> anyhow::Result<Toolchain> {
12132 Err(anyhow::anyhow!("Not implemented"))
12133 }
12134 fn meta(&self) -> ToolchainMetadata {
12135 ToolchainMetadata {
12136 term: SharedString::new_static("Virtual Environment"),
12137 new_toolchain_placeholder: SharedString::new_static(
12138 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12139 ),
12140 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12141 }
12142 }
12143 fn activation_script(
12144 &self,
12145 _: &Toolchain,
12146 _: ShellKind,
12147 _: &gpui::App,
12148 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12149 Box::pin(async { vec![] })
12150 }
12151 }
12152 Arc::new(
12153 Language::new(
12154 LanguageConfig {
12155 name: "Python".into(),
12156 matcher: LanguageMatcher {
12157 path_suffixes: vec!["py".to_string()],
12158 ..Default::default()
12159 },
12160 ..Default::default()
12161 },
12162 None, // We're not testing Python parsing with this language.
12163 )
12164 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12165 "pyproject.toml",
12166 ))))
12167 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12168 )
12169}
12170
12171fn typescript_lang() -> Arc<Language> {
12172 Arc::new(Language::new(
12173 LanguageConfig {
12174 name: "TypeScript".into(),
12175 matcher: LanguageMatcher {
12176 path_suffixes: vec!["ts".to_string()],
12177 ..Default::default()
12178 },
12179 ..Default::default()
12180 },
12181 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12182 ))
12183}
12184
12185fn tsx_lang() -> Arc<Language> {
12186 Arc::new(Language::new(
12187 LanguageConfig {
12188 name: "tsx".into(),
12189 matcher: LanguageMatcher {
12190 path_suffixes: vec!["tsx".to_string()],
12191 ..Default::default()
12192 },
12193 ..Default::default()
12194 },
12195 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12196 ))
12197}
12198
12199fn get_all_tasks(
12200 project: &Entity<Project>,
12201 task_contexts: Arc<TaskContexts>,
12202 cx: &mut App,
12203) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12204 let new_tasks = project.update(cx, |project, cx| {
12205 project.task_store().update(cx, |task_store, cx| {
12206 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12207 this.used_and_current_resolved_tasks(task_contexts, cx)
12208 })
12209 })
12210 });
12211
12212 cx.background_spawn(async move {
12213 let (mut old, new) = new_tasks.await;
12214 old.extend(new);
12215 old
12216 })
12217}
12218
12219#[track_caller]
12220fn assert_entry_git_state(
12221 tree: &Worktree,
12222 repository: &Repository,
12223 path: &str,
12224 index_status: Option<StatusCode>,
12225 is_ignored: bool,
12226) {
12227 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12228 let entry = tree
12229 .entry_for_path(&rel_path(path))
12230 .unwrap_or_else(|| panic!("entry {path} not found"));
12231 let status = repository
12232 .status_for_path(&repo_path(path))
12233 .map(|entry| entry.status);
12234 let expected = index_status.map(|index_status| {
12235 TrackedStatus {
12236 index_status,
12237 worktree_status: StatusCode::Unmodified,
12238 }
12239 .into()
12240 });
12241 assert_eq!(
12242 status, expected,
12243 "expected {path} to have git status: {expected:?}"
12244 );
12245 assert_eq!(
12246 entry.is_ignored, is_ignored,
12247 "expected {path} to have is_ignored: {is_ignored}"
12248 );
12249}
12250
12251#[track_caller]
12252fn git_init(path: &Path) -> git2::Repository {
12253 let mut init_opts = RepositoryInitOptions::new();
12254 init_opts.initial_head("main");
12255 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12256}
12257
12258#[track_caller]
12259fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12260 let path = path.as_ref();
12261 let mut index = repo.index().expect("Failed to get index");
12262 index.add_path(path).expect("Failed to add file");
12263 index.write().expect("Failed to write index");
12264}
12265
12266#[track_caller]
12267fn git_remove_index(path: &Path, repo: &git2::Repository) {
12268 let mut index = repo.index().expect("Failed to get index");
12269 index.remove_path(path).expect("Failed to add file");
12270 index.write().expect("Failed to write index");
12271}
12272
12273#[track_caller]
12274fn git_commit(msg: &'static str, repo: &git2::Repository) {
12275 use git2::Signature;
12276
12277 let signature = Signature::now("test", "test@zed.dev").unwrap();
12278 let oid = repo.index().unwrap().write_tree().unwrap();
12279 let tree = repo.find_tree(oid).unwrap();
12280 if let Ok(head) = repo.head() {
12281 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12282
12283 let parent_commit = parent_obj.as_commit().unwrap();
12284
12285 repo.commit(
12286 Some("HEAD"),
12287 &signature,
12288 &signature,
12289 msg,
12290 &tree,
12291 &[parent_commit],
12292 )
12293 .expect("Failed to commit with parent");
12294 } else {
12295 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12296 .expect("Failed to commit");
12297 }
12298}
12299
12300#[cfg(any())]
12301#[track_caller]
12302fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12303 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12304}
12305
12306#[track_caller]
12307fn git_stash(repo: &mut git2::Repository) {
12308 use git2::Signature;
12309
12310 let signature = Signature::now("test", "test@zed.dev").unwrap();
12311 repo.stash_save(&signature, "N/A", None)
12312 .expect("Failed to stash");
12313}
12314
12315#[track_caller]
12316fn git_reset(offset: usize, repo: &git2::Repository) {
12317 let head = repo.head().expect("Couldn't get repo head");
12318 let object = head.peel(git2::ObjectType::Commit).unwrap();
12319 let commit = object.as_commit().unwrap();
12320 let new_head = commit
12321 .parents()
12322 .inspect(|parnet| {
12323 parnet.message();
12324 })
12325 .nth(offset)
12326 .expect("Not enough history");
12327 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12328 .expect("Could not reset");
12329}
12330
12331#[cfg(any())]
12332#[track_caller]
12333fn git_branch(name: &str, repo: &git2::Repository) {
12334 let head = repo
12335 .head()
12336 .expect("Couldn't get repo head")
12337 .peel_to_commit()
12338 .expect("HEAD is not a commit");
12339 repo.branch(name, &head, false).expect("Failed to commit");
12340}
12341
12342#[cfg(any())]
12343#[track_caller]
12344fn git_checkout(name: &str, repo: &git2::Repository) {
12345 repo.set_head(name).expect("Failed to set head");
12346 repo.checkout_head(None).expect("Failed to check out head");
12347}
12348
12349#[cfg(any())]
12350#[track_caller]
12351fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12352 repo.statuses(None)
12353 .unwrap()
12354 .iter()
12355 .map(|status| (status.path().unwrap().to_string(), status.status()))
12356 .collect()
12357}
12358
12359#[gpui::test]
12360async fn test_find_project_path_abs(
12361 background_executor: BackgroundExecutor,
12362 cx: &mut gpui::TestAppContext,
12363) {
12364 // find_project_path should work with absolute paths
12365 init_test(cx);
12366
12367 let fs = FakeFs::new(background_executor);
12368 fs.insert_tree(
12369 path!("/root"),
12370 json!({
12371 "project1": {
12372 "file1.txt": "content1",
12373 "subdir": {
12374 "file2.txt": "content2"
12375 }
12376 },
12377 "project2": {
12378 "file3.txt": "content3"
12379 }
12380 }),
12381 )
12382 .await;
12383
12384 let project = Project::test(
12385 fs.clone(),
12386 [
12387 path!("/root/project1").as_ref(),
12388 path!("/root/project2").as_ref(),
12389 ],
12390 cx,
12391 )
12392 .await;
12393
12394 // Make sure the worktrees are fully initialized
12395 project
12396 .update(cx, |project, cx| project.git_scans_complete(cx))
12397 .await;
12398 cx.run_until_parked();
12399
12400 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12401 project.read_with(cx, |project, cx| {
12402 let worktrees: Vec<_> = project.worktrees(cx).collect();
12403 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12404 let id1 = worktrees[0].read(cx).id();
12405 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12406 let id2 = worktrees[1].read(cx).id();
12407 (abs_path1, id1, abs_path2, id2)
12408 });
12409
12410 project.update(cx, |project, cx| {
12411 let abs_path = project1_abs_path.join("file1.txt");
12412 let found_path = project.find_project_path(abs_path, cx).unwrap();
12413 assert_eq!(found_path.worktree_id, project1_id);
12414 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12415
12416 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12417 let found_path = project.find_project_path(abs_path, cx).unwrap();
12418 assert_eq!(found_path.worktree_id, project1_id);
12419 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12420
12421 let abs_path = project2_abs_path.join("file3.txt");
12422 let found_path = project.find_project_path(abs_path, cx).unwrap();
12423 assert_eq!(found_path.worktree_id, project2_id);
12424 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12425
12426 let abs_path = project1_abs_path.join("nonexistent.txt");
12427 let found_path = project.find_project_path(abs_path, cx);
12428 assert!(
12429 found_path.is_some(),
12430 "Should find project path for nonexistent file in worktree"
12431 );
12432
12433 // Test with an absolute path outside any worktree
12434 let abs_path = Path::new("/some/other/path");
12435 let found_path = project.find_project_path(abs_path, cx);
12436 assert!(
12437 found_path.is_none(),
12438 "Should not find project path for path outside any worktree"
12439 );
12440 });
12441}
12442
12443#[gpui::test]
12444async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12445 init_test(cx);
12446
12447 let fs = FakeFs::new(cx.executor());
12448 fs.insert_tree(
12449 path!("/root"),
12450 json!({
12451 "a": {
12452 ".git": {},
12453 "src": {
12454 "main.rs": "fn main() {}",
12455 }
12456 },
12457 "b": {
12458 ".git": {},
12459 "src": {
12460 "main.rs": "fn main() {}",
12461 },
12462 "script": {
12463 "run.sh": "#!/bin/bash"
12464 }
12465 }
12466 }),
12467 )
12468 .await;
12469
12470 let project = Project::test(
12471 fs.clone(),
12472 [
12473 path!("/root/a").as_ref(),
12474 path!("/root/b/script").as_ref(),
12475 path!("/root/b").as_ref(),
12476 ],
12477 cx,
12478 )
12479 .await;
12480 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12481 scan_complete.await;
12482
12483 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12484 assert_eq!(worktrees.len(), 3);
12485
12486 let worktree_id_by_abs_path = worktrees
12487 .into_iter()
12488 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12489 .collect::<HashMap<_, _>>();
12490 let worktree_id = worktree_id_by_abs_path
12491 .get(Path::new(path!("/root/b/script")))
12492 .unwrap();
12493
12494 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12495 assert_eq!(repos.len(), 2);
12496
12497 project.update(cx, |project, cx| {
12498 project.remove_worktree(*worktree_id, cx);
12499 });
12500 cx.run_until_parked();
12501
12502 let mut repo_paths = project
12503 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12504 .values()
12505 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12506 .collect::<Vec<_>>();
12507 repo_paths.sort();
12508
12509 pretty_assertions::assert_eq!(
12510 repo_paths,
12511 [
12512 Path::new(path!("/root/a")).into(),
12513 Path::new(path!("/root/b")).into(),
12514 ]
12515 );
12516
12517 let active_repo_path = project
12518 .read_with(cx, |p, cx| {
12519 p.active_repository(cx)
12520 .map(|r| r.read(cx).work_directory_abs_path.clone())
12521 })
12522 .unwrap();
12523 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12524
12525 let worktree_id = worktree_id_by_abs_path
12526 .get(Path::new(path!("/root/a")))
12527 .unwrap();
12528 project.update(cx, |project, cx| {
12529 project.remove_worktree(*worktree_id, cx);
12530 });
12531 cx.run_until_parked();
12532
12533 let active_repo_path = project
12534 .read_with(cx, |p, cx| {
12535 p.active_repository(cx)
12536 .map(|r| r.read(cx).work_directory_abs_path.clone())
12537 })
12538 .unwrap();
12539 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12540
12541 let worktree_id = worktree_id_by_abs_path
12542 .get(Path::new(path!("/root/b")))
12543 .unwrap();
12544 project.update(cx, |project, cx| {
12545 project.remove_worktree(*worktree_id, cx);
12546 });
12547 cx.run_until_parked();
12548
12549 let active_repo_path = project.read_with(cx, |p, cx| {
12550 p.active_repository(cx)
12551 .map(|r| r.read(cx).work_directory_abs_path.clone())
12552 });
12553 assert!(active_repo_path.is_none());
12554}
12555
12556#[gpui::test]
12557async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12558 use DiffHunkSecondaryStatus::*;
12559 init_test(cx);
12560
12561 let committed_contents = r#"
12562 one
12563 two
12564 three
12565 "#
12566 .unindent();
12567 let file_contents = r#"
12568 one
12569 TWO
12570 three
12571 "#
12572 .unindent();
12573
12574 let fs = FakeFs::new(cx.background_executor.clone());
12575 fs.insert_tree(
12576 path!("/dir"),
12577 json!({
12578 ".git": {},
12579 "file.txt": file_contents.clone()
12580 }),
12581 )
12582 .await;
12583
12584 fs.set_head_and_index_for_repo(
12585 path!("/dir/.git").as_ref(),
12586 &[("file.txt", committed_contents.clone())],
12587 );
12588
12589 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12590
12591 let buffer = project
12592 .update(cx, |project, cx| {
12593 project.open_local_buffer(path!("/dir/file.txt"), cx)
12594 })
12595 .await
12596 .unwrap();
12597 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12598 let uncommitted_diff = project
12599 .update(cx, |project, cx| {
12600 project.open_uncommitted_diff(buffer.clone(), cx)
12601 })
12602 .await
12603 .unwrap();
12604
12605 // The hunk is initially unstaged.
12606 uncommitted_diff.read_with(cx, |diff, cx| {
12607 assert_hunks(
12608 diff.snapshot(cx).hunks(&snapshot),
12609 &snapshot,
12610 &diff.base_text_string(cx).unwrap(),
12611 &[(
12612 1..2,
12613 "two\n",
12614 "TWO\n",
12615 DiffHunkStatus::modified(HasSecondaryHunk),
12616 )],
12617 );
12618 });
12619
12620 // Get the repository handle.
12621 let repo = project.read_with(cx, |project, cx| {
12622 project.repositories(cx).values().next().unwrap().clone()
12623 });
12624
12625 // Stage the file.
12626 let stage_task = repo.update(cx, |repo, cx| {
12627 repo.stage_entries(vec![repo_path("file.txt")], cx)
12628 });
12629
12630 // Run a few ticks to let the job start and mark hunks as pending,
12631 // but don't run_until_parked which would complete the entire operation.
12632 for _ in 0..10 {
12633 cx.executor().tick();
12634 let [hunk]: [_; 1] = uncommitted_diff
12635 .read_with(cx, |diff, cx| {
12636 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12637 })
12638 .try_into()
12639 .unwrap();
12640 match hunk.secondary_status {
12641 HasSecondaryHunk => {}
12642 SecondaryHunkRemovalPending => break,
12643 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12644 _ => panic!("unexpected hunk state"),
12645 }
12646 }
12647 uncommitted_diff.read_with(cx, |diff, cx| {
12648 assert_hunks(
12649 diff.snapshot(cx).hunks(&snapshot),
12650 &snapshot,
12651 &diff.base_text_string(cx).unwrap(),
12652 &[(
12653 1..2,
12654 "two\n",
12655 "TWO\n",
12656 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12657 )],
12658 );
12659 });
12660
12661 // Let the staging complete.
12662 stage_task.await.unwrap();
12663 cx.run_until_parked();
12664
12665 // The hunk is now fully staged.
12666 uncommitted_diff.read_with(cx, |diff, cx| {
12667 assert_hunks(
12668 diff.snapshot(cx).hunks(&snapshot),
12669 &snapshot,
12670 &diff.base_text_string(cx).unwrap(),
12671 &[(
12672 1..2,
12673 "two\n",
12674 "TWO\n",
12675 DiffHunkStatus::modified(NoSecondaryHunk),
12676 )],
12677 );
12678 });
12679
12680 // Simulate a commit by updating HEAD to match the current file contents.
12681 // The FakeGitRepository's commit method is a no-op, so we need to manually
12682 // update HEAD to simulate the commit completing.
12683 fs.set_head_for_repo(
12684 path!("/dir/.git").as_ref(),
12685 &[("file.txt", file_contents.clone())],
12686 "newhead",
12687 );
12688 cx.run_until_parked();
12689
12690 // After committing, there are no more hunks.
12691 uncommitted_diff.read_with(cx, |diff, cx| {
12692 assert_hunks(
12693 diff.snapshot(cx).hunks(&snapshot),
12694 &snapshot,
12695 &diff.base_text_string(cx).unwrap(),
12696 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12697 );
12698 });
12699}
12700
12701#[gpui::test]
12702async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12703 init_test(cx);
12704
12705 // Configure read_only_files setting
12706 cx.update(|cx| {
12707 cx.update_global::<SettingsStore, _>(|store, cx| {
12708 store.update_user_settings(cx, |settings| {
12709 settings.project.worktree.read_only_files = Some(vec![
12710 "**/generated/**".to_string(),
12711 "**/*.gen.rs".to_string(),
12712 ]);
12713 });
12714 });
12715 });
12716
12717 let fs = FakeFs::new(cx.background_executor.clone());
12718 fs.insert_tree(
12719 path!("/root"),
12720 json!({
12721 "src": {
12722 "main.rs": "fn main() {}",
12723 "types.gen.rs": "// Generated file",
12724 },
12725 "generated": {
12726 "schema.rs": "// Auto-generated schema",
12727 }
12728 }),
12729 )
12730 .await;
12731
12732 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12733
12734 // Open a regular file - should be read-write
12735 let regular_buffer = project
12736 .update(cx, |project, cx| {
12737 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12738 })
12739 .await
12740 .unwrap();
12741
12742 regular_buffer.read_with(cx, |buffer, _| {
12743 assert!(!buffer.read_only(), "Regular file should not be read-only");
12744 });
12745
12746 // Open a file matching *.gen.rs pattern - should be read-only
12747 let gen_buffer = project
12748 .update(cx, |project, cx| {
12749 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12750 })
12751 .await
12752 .unwrap();
12753
12754 gen_buffer.read_with(cx, |buffer, _| {
12755 assert!(
12756 buffer.read_only(),
12757 "File matching *.gen.rs pattern should be read-only"
12758 );
12759 });
12760
12761 // Open a file in generated directory - should be read-only
12762 let generated_buffer = project
12763 .update(cx, |project, cx| {
12764 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12765 })
12766 .await
12767 .unwrap();
12768
12769 generated_buffer.read_with(cx, |buffer, _| {
12770 assert!(
12771 buffer.read_only(),
12772 "File in generated directory should be read-only"
12773 );
12774 });
12775}
12776
12777#[gpui::test]
12778async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12779 init_test(cx);
12780
12781 // Explicitly set read_only_files to empty (default behavior)
12782 cx.update(|cx| {
12783 cx.update_global::<SettingsStore, _>(|store, cx| {
12784 store.update_user_settings(cx, |settings| {
12785 settings.project.worktree.read_only_files = Some(vec![]);
12786 });
12787 });
12788 });
12789
12790 let fs = FakeFs::new(cx.background_executor.clone());
12791 fs.insert_tree(
12792 path!("/root"),
12793 json!({
12794 "src": {
12795 "main.rs": "fn main() {}",
12796 },
12797 "generated": {
12798 "schema.rs": "// Auto-generated schema",
12799 }
12800 }),
12801 )
12802 .await;
12803
12804 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12805
12806 // All files should be read-write when read_only_files is empty
12807 let main_buffer = project
12808 .update(cx, |project, cx| {
12809 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12810 })
12811 .await
12812 .unwrap();
12813
12814 main_buffer.read_with(cx, |buffer, _| {
12815 assert!(
12816 !buffer.read_only(),
12817 "Files should not be read-only when read_only_files is empty"
12818 );
12819 });
12820
12821 let generated_buffer = project
12822 .update(cx, |project, cx| {
12823 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12824 })
12825 .await
12826 .unwrap();
12827
12828 generated_buffer.read_with(cx, |buffer, _| {
12829 assert!(
12830 !buffer.read_only(),
12831 "Generated files should not be read-only when read_only_files is empty"
12832 );
12833 });
12834}
12835
12836#[gpui::test]
12837async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12838 init_test(cx);
12839
12840 // Configure to make lock files read-only
12841 cx.update(|cx| {
12842 cx.update_global::<SettingsStore, _>(|store, cx| {
12843 store.update_user_settings(cx, |settings| {
12844 settings.project.worktree.read_only_files = Some(vec![
12845 "**/*.lock".to_string(),
12846 "**/package-lock.json".to_string(),
12847 ]);
12848 });
12849 });
12850 });
12851
12852 let fs = FakeFs::new(cx.background_executor.clone());
12853 fs.insert_tree(
12854 path!("/root"),
12855 json!({
12856 "Cargo.lock": "# Lock file",
12857 "Cargo.toml": "[package]",
12858 "package-lock.json": "{}",
12859 "package.json": "{}",
12860 }),
12861 )
12862 .await;
12863
12864 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12865
12866 // Cargo.lock should be read-only
12867 let cargo_lock = project
12868 .update(cx, |project, cx| {
12869 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12870 })
12871 .await
12872 .unwrap();
12873
12874 cargo_lock.read_with(cx, |buffer, _| {
12875 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12876 });
12877
12878 // Cargo.toml should be read-write
12879 let cargo_toml = project
12880 .update(cx, |project, cx| {
12881 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12882 })
12883 .await
12884 .unwrap();
12885
12886 cargo_toml.read_with(cx, |buffer, _| {
12887 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12888 });
12889
12890 // package-lock.json should be read-only
12891 let package_lock = project
12892 .update(cx, |project, cx| {
12893 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12894 })
12895 .await
12896 .unwrap();
12897
12898 package_lock.read_with(cx, |buffer, _| {
12899 assert!(buffer.read_only(), "package-lock.json should be read-only");
12900 });
12901
12902 // package.json should be read-write
12903 let package_json = project
12904 .update(cx, |project, cx| {
12905 project.open_local_buffer(path!("/root/package.json"), cx)
12906 })
12907 .await
12908 .unwrap();
12909
12910 package_json.read_with(cx, |buffer, _| {
12911 assert!(!buffer.read_only(), "package.json should not be read-only");
12912 });
12913}
12914
12915mod disable_ai_settings_tests {
12916 use gpui::TestAppContext;
12917 use project::*;
12918 use settings::{Settings, SettingsStore};
12919
12920 #[gpui::test]
12921 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12922 cx.update(|cx| {
12923 settings::init(cx);
12924
12925 // Test 1: Default is false (AI enabled)
12926 assert!(
12927 !DisableAiSettings::get_global(cx).disable_ai,
12928 "Default should allow AI"
12929 );
12930 });
12931
12932 let disable_true = serde_json::json!({
12933 "disable_ai": true
12934 })
12935 .to_string();
12936 let disable_false = serde_json::json!({
12937 "disable_ai": false
12938 })
12939 .to_string();
12940
12941 cx.update_global::<SettingsStore, _>(|store, cx| {
12942 store.set_user_settings(&disable_false, cx).unwrap();
12943 store.set_global_settings(&disable_true, cx).unwrap();
12944 });
12945 cx.update(|cx| {
12946 assert!(
12947 DisableAiSettings::get_global(cx).disable_ai,
12948 "Local false cannot override global true"
12949 );
12950 });
12951
12952 cx.update_global::<SettingsStore, _>(|store, cx| {
12953 store.set_global_settings(&disable_false, cx).unwrap();
12954 store.set_user_settings(&disable_true, cx).unwrap();
12955 });
12956
12957 cx.update(|cx| {
12958 assert!(
12959 DisableAiSettings::get_global(cx).disable_ai,
12960 "Local false cannot override global true"
12961 );
12962 });
12963 }
12964
12965 #[gpui::test]
12966 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12967 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12968 use worktree::WorktreeId;
12969
12970 cx.update(|cx| {
12971 settings::init(cx);
12972
12973 // Default should allow AI
12974 assert!(
12975 !DisableAiSettings::get_global(cx).disable_ai,
12976 "Default should allow AI"
12977 );
12978 });
12979
12980 let worktree_id = WorktreeId::from_usize(1);
12981 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12982 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12983 };
12984 let project_path = rel_path("project");
12985 let settings_location = SettingsLocation {
12986 worktree_id,
12987 path: project_path.as_ref(),
12988 };
12989
12990 // Test: Project-level disable_ai=true should disable AI for files in that project
12991 cx.update_global::<SettingsStore, _>(|store, cx| {
12992 store
12993 .set_local_settings(
12994 worktree_id,
12995 LocalSettingsPath::InWorktree(project_path.clone()),
12996 LocalSettingsKind::Settings,
12997 Some(r#"{ "disable_ai": true }"#),
12998 cx,
12999 )
13000 .unwrap();
13001 });
13002
13003 cx.update(|cx| {
13004 let settings = DisableAiSettings::get(Some(settings_location), cx);
13005 assert!(
13006 settings.disable_ai,
13007 "Project-level disable_ai=true should disable AI for files in that project"
13008 );
13009 // Global should now also be true since project-level disable_ai is merged into global
13010 assert!(
13011 DisableAiSettings::get_global(cx).disable_ai,
13012 "Global setting should be affected by project-level disable_ai=true"
13013 );
13014 });
13015
13016 // Test: Setting project-level to false should allow AI for that project
13017 cx.update_global::<SettingsStore, _>(|store, cx| {
13018 store
13019 .set_local_settings(
13020 worktree_id,
13021 LocalSettingsPath::InWorktree(project_path.clone()),
13022 LocalSettingsKind::Settings,
13023 Some(r#"{ "disable_ai": false }"#),
13024 cx,
13025 )
13026 .unwrap();
13027 });
13028
13029 cx.update(|cx| {
13030 let settings = DisableAiSettings::get(Some(settings_location), cx);
13031 assert!(
13032 !settings.disable_ai,
13033 "Project-level disable_ai=false should allow AI"
13034 );
13035 // Global should also be false now
13036 assert!(
13037 !DisableAiSettings::get_global(cx).disable_ai,
13038 "Global setting should be false when project-level is false"
13039 );
13040 });
13041
13042 // Test: User-level true + project-level false = AI disabled (saturation)
13043 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
13044 cx.update_global::<SettingsStore, _>(|store, cx| {
13045 store.set_user_settings(&disable_true, cx).unwrap();
13046 store
13047 .set_local_settings(
13048 worktree_id,
13049 LocalSettingsPath::InWorktree(project_path.clone()),
13050 LocalSettingsKind::Settings,
13051 Some(r#"{ "disable_ai": false }"#),
13052 cx,
13053 )
13054 .unwrap();
13055 });
13056
13057 cx.update(|cx| {
13058 let settings = DisableAiSettings::get(Some(settings_location), cx);
13059 assert!(
13060 settings.disable_ai,
13061 "Project-level false cannot override user-level true (SaturatingBool)"
13062 );
13063 });
13064 }
13065}