1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
45 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
46 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
47 ToolchainMetadata,
48 language_settings::{LanguageSettings, LanguageSettingsContent},
49 markdown_lang, rust_lang, tree_sitter_typescript,
50};
51use lsp::{
52 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
53 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
54 Uri, WillRenameFiles, notification::DidRenameFiles,
55};
56use parking_lot::Mutex;
57use paths::{config_dir, global_gitignore_path, tasks_file};
58use postage::stream::Stream as _;
59use pretty_assertions::{assert_eq, assert_matches};
60use project::{
61 Event, TaskContexts,
62 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
63 search::{SearchQuery, SearchResult},
64 task_store::{TaskSettingsLocation, TaskStore},
65 *,
66};
67use rand::{Rng as _, rngs::StdRng};
68use serde_json::json;
69use settings::SettingsStore;
70#[cfg(not(windows))]
71use std::os;
72use std::{
73 cell::RefCell,
74 env, mem,
75 num::NonZeroU32,
76 ops::Range,
77 path::{Path, PathBuf},
78 rc::Rc,
79 str::FromStr,
80 sync::{Arc, OnceLock, atomic},
81 task::Poll,
82 time::Duration,
83};
84use sum_tree::SumTree;
85use task::{ResolvedTask, ShellKind, TaskContext};
86use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
87use unindent::Unindent as _;
88use util::{
89 TryFutureExt as _, assert_set_eq, maybe, path,
90 paths::{PathMatcher, PathStyle},
91 rel_path::{RelPath, rel_path},
92 test::{TempTree, marked_text_offsets},
93 uri,
94};
95use worktree::WorktreeModelHandle as _;
96
97#[gpui::test]
98async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
99 cx.executor().allow_parking();
100
101 let (tx, mut rx) = futures::channel::mpsc::unbounded();
102 let _thread = std::thread::spawn(move || {
103 #[cfg(not(target_os = "windows"))]
104 std::fs::metadata("/tmp").unwrap();
105 #[cfg(target_os = "windows")]
106 std::fs::metadata("C:/Windows").unwrap();
107 std::thread::sleep(Duration::from_millis(1000));
108 tx.unbounded_send(1).unwrap();
109 });
110 rx.next().await.unwrap();
111}
112
113#[gpui::test]
114async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
115 cx.executor().allow_parking();
116
117 let io_task = smol::unblock(move || {
118 println!("sleeping on thread {:?}", std::thread::current().id());
119 std::thread::sleep(Duration::from_millis(10));
120 1
121 });
122
123 let task = cx.foreground_executor().spawn(async move {
124 io_task.await;
125 });
126
127 task.await;
128}
129
130#[gpui::test]
131async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
132 cx: &mut gpui::TestAppContext,
133) {
134 init_test(cx);
135
136 let fs = FakeFs::new(cx.executor());
137 fs.insert_tree(
138 path!("/root"),
139 json!({
140 "dir-project": {
141 "src": {
142 "main.rs": "fn main() {}"
143 }
144 },
145 "single-file.rs": "fn helper() {}"
146 }),
147 )
148 .await;
149
150 let project = Project::test(
151 fs,
152 [
153 Path::new(path!("/root/single-file.rs")),
154 Path::new(path!("/root/dir-project")),
155 ],
156 cx,
157 )
158 .await;
159
160 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
161 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
162
163 assert_eq!(
164 ordered_paths,
165 vec![
166 PathBuf::from(path!("/root/dir-project")),
167 PathBuf::from(path!("/root")),
168 ]
169 );
170}
171
172#[gpui::test]
173async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
174 cx: &mut gpui::TestAppContext,
175) {
176 init_test(cx);
177
178 let fs = FakeFs::new(cx.executor());
179 let project = Project::test(fs, [], cx).await;
180
181 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
182 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
183
184 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
185}
186
187// NOTE:
188// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
189// we assume that they are not supported out of the box.
190#[cfg(not(windows))]
191#[gpui::test]
192async fn test_symlinks(cx: &mut gpui::TestAppContext) {
193 init_test(cx);
194 cx.executor().allow_parking();
195
196 let dir = TempTree::new(json!({
197 "root": {
198 "apple": "",
199 "banana": {
200 "carrot": {
201 "date": "",
202 "endive": "",
203 }
204 },
205 "fennel": {
206 "grape": "",
207 }
208 }
209 }));
210
211 let root_link_path = dir.path().join("root_link");
212 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
213 os::unix::fs::symlink(
214 dir.path().join("root/fennel"),
215 dir.path().join("root/finnochio"),
216 )
217 .unwrap();
218
219 let project = Project::test(
220 Arc::new(RealFs::new(None, cx.executor())),
221 [root_link_path.as_ref()],
222 cx,
223 )
224 .await;
225
226 project.update(cx, |project, cx| {
227 let tree = project.worktrees(cx).next().unwrap().read(cx);
228 assert_eq!(tree.file_count(), 5);
229 assert_eq!(
230 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
231 tree.entry_for_path(rel_path("finnochio/grape"))
232 .unwrap()
233 .inode
234 );
235 });
236}
237
238#[gpui::test]
239async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
240 init_test(cx);
241
242 let dir = TempTree::new(json!({
243 ".editorconfig": r#"
244 root = true
245 [*.rs]
246 indent_style = tab
247 indent_size = 3
248 end_of_line = lf
249 insert_final_newline = true
250 trim_trailing_whitespace = true
251 max_line_length = 120
252 [*.js]
253 tab_width = 10
254 max_line_length = off
255 "#,
256 ".zed": {
257 "settings.json": r#"{
258 "tab_size": 8,
259 "hard_tabs": false,
260 "ensure_final_newline_on_save": false,
261 "remove_trailing_whitespace_on_save": false,
262 "preferred_line_length": 64,
263 "soft_wrap": "editor_width",
264 }"#,
265 },
266 "a.rs": "fn a() {\n A\n}",
267 "b": {
268 ".editorconfig": r#"
269 [*.rs]
270 indent_size = 2
271 max_line_length = off,
272 "#,
273 "b.rs": "fn b() {\n B\n}",
274 },
275 "c.js": "def c\n C\nend",
276 "d": {
277 ".editorconfig": r#"
278 [*.rs]
279 indent_size = 1
280 "#,
281 "d.rs": "fn d() {\n D\n}",
282 },
283 "README.json": "tabs are better\n",
284 }));
285
286 let path = dir.path();
287 let fs = FakeFs::new(cx.executor());
288 fs.insert_tree_from_real_fs(path, path).await;
289 let project = Project::test(fs, [path], cx).await;
290
291 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
292 language_registry.add(js_lang());
293 language_registry.add(json_lang());
294 language_registry.add(rust_lang());
295
296 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
297
298 cx.executor().run_until_parked();
299
300 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
301 let buffer = project
302 .update(cx, |project, cx| {
303 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
304 })
305 .await
306 .unwrap();
307 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
308 };
309
310 let settings_a = settings_for("a.rs", cx).await;
311 let settings_b = settings_for("b/b.rs", cx).await;
312 let settings_c = settings_for("c.js", cx).await;
313 let settings_d = settings_for("d/d.rs", cx).await;
314 let settings_readme = settings_for("README.json", cx).await;
315 // .editorconfig overrides .zed/settings
316 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
317 assert_eq!(settings_a.hard_tabs, true);
318 assert_eq!(settings_a.ensure_final_newline_on_save, true);
319 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
320 assert_eq!(settings_a.preferred_line_length, 120);
321
322 // .editorconfig in b/ overrides .editorconfig in root
323 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
324
325 // .editorconfig in subdirectory overrides .editorconfig in root
326 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
327
328 // "indent_size" is not set, so "tab_width" is used
329 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
330
331 // When max_line_length is "off", default to .zed/settings.json
332 assert_eq!(settings_b.preferred_line_length, 64);
333 assert_eq!(settings_c.preferred_line_length, 64);
334
335 // README.md should not be affected by .editorconfig's globe "*.rs"
336 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
337}
338
339#[gpui::test]
340async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
341 init_test(cx);
342
343 let fs = FakeFs::new(cx.executor());
344 fs.insert_tree(
345 path!("/grandparent"),
346 json!({
347 ".editorconfig": "[*]\nindent_size = 4\n",
348 "parent": {
349 ".editorconfig": "[*.rs]\nindent_size = 2\n",
350 "worktree": {
351 ".editorconfig": "[*.md]\nindent_size = 3\n",
352 "main.rs": "fn main() {}",
353 "README.md": "# README",
354 "other.txt": "other content",
355 }
356 }
357 }),
358 )
359 .await;
360
361 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
362
363 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
364 language_registry.add(rust_lang());
365 language_registry.add(markdown_lang());
366
367 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
368
369 cx.executor().run_until_parked();
370 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
371 let buffer = project
372 .update(cx, |project, cx| {
373 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
374 })
375 .await
376 .unwrap();
377 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
378 };
379
380 let settings_rs = settings_for("main.rs", cx).await;
381 let settings_md = settings_for("README.md", cx).await;
382 let settings_txt = settings_for("other.txt", cx).await;
383
384 // main.rs gets indent_size = 2 from parent's external .editorconfig
385 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
386
387 // README.md gets indent_size = 3 from internal worktree .editorconfig
388 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
389
390 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
391 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
392}
393
394#[gpui::test]
395async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
396 init_test(cx);
397
398 let fs = FakeFs::new(cx.executor());
399 fs.insert_tree(
400 path!("/worktree"),
401 json!({
402 ".editorconfig": "[*]\nindent_size = 99\n",
403 "src": {
404 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
405 "file.rs": "fn main() {}",
406 }
407 }),
408 )
409 .await;
410
411 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
412
413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
414 language_registry.add(rust_lang());
415
416 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
417
418 cx.executor().run_until_parked();
419
420 let buffer = project
421 .update(cx, |project, cx| {
422 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
423 })
424 .await
425 .unwrap();
426 cx.update(|cx| {
427 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
428 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
429 });
430}
431
432#[gpui::test]
433async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
434 init_test(cx);
435
436 let fs = FakeFs::new(cx.executor());
437 fs.insert_tree(
438 path!("/parent"),
439 json!({
440 ".editorconfig": "[*]\nindent_size = 99\n",
441 "worktree": {
442 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
443 "file.rs": "fn main() {}",
444 }
445 }),
446 )
447 .await;
448
449 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
450
451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
452 language_registry.add(rust_lang());
453
454 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
455
456 cx.executor().run_until_parked();
457
458 let buffer = project
459 .update(cx, |project, cx| {
460 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
461 })
462 .await
463 .unwrap();
464
465 cx.update(|cx| {
466 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
467
468 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
469 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
470 });
471}
472
473#[gpui::test]
474async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
475 init_test(cx);
476
477 let fs = FakeFs::new(cx.executor());
478 fs.insert_tree(
479 path!("/grandparent"),
480 json!({
481 ".editorconfig": "[*]\nindent_size = 99\n",
482 "parent": {
483 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
484 "worktree": {
485 "file.rs": "fn main() {}",
486 }
487 }
488 }),
489 )
490 .await;
491
492 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
493
494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
495 language_registry.add(rust_lang());
496
497 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
498
499 cx.executor().run_until_parked();
500
501 let buffer = project
502 .update(cx, |project, cx| {
503 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
504 })
505 .await
506 .unwrap();
507
508 cx.update(|cx| {
509 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
510
511 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
512 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
513 });
514}
515
516#[gpui::test]
517async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/parent"),
523 json!({
524 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
525 "worktree_a": {
526 "file.rs": "fn a() {}",
527 ".editorconfig": "[*]\ninsert_final_newline = true\n",
528 },
529 "worktree_b": {
530 "file.rs": "fn b() {}",
531 ".editorconfig": "[*]\ninsert_final_newline = false\n",
532 }
533 }),
534 )
535 .await;
536
537 let project = Project::test(
538 fs,
539 [
540 path!("/parent/worktree_a").as_ref(),
541 path!("/parent/worktree_b").as_ref(),
542 ],
543 cx,
544 )
545 .await;
546
547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
548 language_registry.add(rust_lang());
549
550 cx.executor().run_until_parked();
551
552 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
553 assert_eq!(worktrees.len(), 2);
554
555 for worktree in worktrees {
556 let buffer = project
557 .update(cx, |project, cx| {
558 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
559 })
560 .await
561 .unwrap();
562
563 cx.update(|cx| {
564 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
565
566 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
567 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
568 });
569 }
570}
571
572#[gpui::test]
573async fn test_external_editorconfig_not_loaded_without_internal_config(
574 cx: &mut gpui::TestAppContext,
575) {
576 init_test(cx);
577
578 let fs = FakeFs::new(cx.executor());
579 fs.insert_tree(
580 path!("/parent"),
581 json!({
582 ".editorconfig": "[*]\nindent_size = 99\n",
583 "worktree": {
584 "file.rs": "fn main() {}",
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
591
592 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
593 language_registry.add(rust_lang());
594
595 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
596
597 cx.executor().run_until_parked();
598
599 let buffer = project
600 .update(cx, |project, cx| {
601 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
602 })
603 .await
604 .unwrap();
605
606 cx.update(|cx| {
607 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
608
609 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
610 // because without an internal .editorconfig, external configs are not loaded
611 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
612 });
613}
614
615#[gpui::test]
616async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 4\n",
624 "worktree": {
625 ".editorconfig": "[*]\n",
626 "file.rs": "fn main() {}",
627 }
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
633
634 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
635 language_registry.add(rust_lang());
636
637 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
638
639 cx.executor().run_until_parked();
640
641 let buffer = project
642 .update(cx, |project, cx| {
643 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
644 })
645 .await
646 .unwrap();
647
648 cx.update(|cx| {
649 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
650
651 // Test initial settings: tab_size = 4 from parent's external .editorconfig
652 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
653 });
654
655 fs.atomic_write(
656 PathBuf::from(path!("/parent/.editorconfig")),
657 "[*]\nindent_size = 8\n".to_owned(),
658 )
659 .await
660 .unwrap();
661
662 cx.executor().run_until_parked();
663
664 let buffer = project
665 .update(cx, |project, cx| {
666 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
667 })
668 .await
669 .unwrap();
670
671 cx.update(|cx| {
672 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
673
674 // Test settings updated: tab_size = 8
675 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
676 });
677}
678
679#[gpui::test]
680async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
681 init_test(cx);
682
683 let fs = FakeFs::new(cx.executor());
684 fs.insert_tree(
685 path!("/parent"),
686 json!({
687 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
688 "existing_worktree": {
689 ".editorconfig": "[*]\n",
690 "file.rs": "fn a() {}",
691 },
692 "new_worktree": {
693 ".editorconfig": "[*]\n",
694 "file.rs": "fn b() {}",
695 }
696 }),
697 )
698 .await;
699
700 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
701
702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
703 language_registry.add(rust_lang());
704
705 cx.executor().run_until_parked();
706
707 let buffer = project
708 .update(cx, |project, cx| {
709 let id = project.worktrees(cx).next().unwrap().read(cx).id();
710 project.open_buffer((id, rel_path("file.rs")), cx)
711 })
712 .await
713 .unwrap();
714
715 cx.update(|cx| {
716 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
717
718 // Test existing worktree has tab_size = 7
719 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
720 });
721
722 let (new_worktree, _) = project
723 .update(cx, |project, cx| {
724 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
725 })
726 .await
727 .unwrap();
728
729 cx.executor().run_until_parked();
730
731 let buffer = project
732 .update(cx, |project, cx| {
733 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
734 })
735 .await
736 .unwrap();
737
738 cx.update(|cx| {
739 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
740
741 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
742 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
743 });
744}
745
746#[gpui::test]
747async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
748 init_test(cx);
749
750 let fs = FakeFs::new(cx.executor());
751 fs.insert_tree(
752 path!("/parent"),
753 json!({
754 ".editorconfig": "[*]\nindent_size = 6\n",
755 "worktree": {
756 ".editorconfig": "[*]\n",
757 "file.rs": "fn main() {}",
758 }
759 }),
760 )
761 .await;
762
763 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
764
765 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
766 language_registry.add(rust_lang());
767
768 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
769 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
770
771 cx.executor().run_until_parked();
772
773 cx.update(|cx| {
774 let store = cx.global::<SettingsStore>();
775 let (worktree_ids, external_paths, watcher_paths) =
776 store.editorconfig_store.read(cx).test_state();
777
778 // Test external config is loaded
779 assert!(worktree_ids.contains(&worktree_id));
780 assert!(!external_paths.is_empty());
781 assert!(!watcher_paths.is_empty());
782 });
783
784 project.update(cx, |project, cx| {
785 project.remove_worktree(worktree_id, cx);
786 });
787
788 cx.executor().run_until_parked();
789
790 cx.update(|cx| {
791 let store = cx.global::<SettingsStore>();
792 let (worktree_ids, external_paths, watcher_paths) =
793 store.editorconfig_store.read(cx).test_state();
794
795 // Test worktree state, external configs, and watchers all removed
796 assert!(!worktree_ids.contains(&worktree_id));
797 assert!(external_paths.is_empty());
798 assert!(watcher_paths.is_empty());
799 });
800}
801
802#[gpui::test]
803async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
804 cx: &mut gpui::TestAppContext,
805) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor());
809 fs.insert_tree(
810 path!("/parent"),
811 json!({
812 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
813 "worktree_a": {
814 ".editorconfig": "[*]\n",
815 "file.rs": "fn a() {}",
816 },
817 "worktree_b": {
818 ".editorconfig": "[*]\n",
819 "file.rs": "fn b() {}",
820 }
821 }),
822 )
823 .await;
824
825 let project = Project::test(
826 fs,
827 [
828 path!("/parent/worktree_a").as_ref(),
829 path!("/parent/worktree_b").as_ref(),
830 ],
831 cx,
832 )
833 .await;
834
835 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
836 language_registry.add(rust_lang());
837
838 cx.executor().run_until_parked();
839
840 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
841 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
842 assert_eq!(worktrees.len(), 2);
843
844 let worktree_a = &worktrees[0];
845 let worktree_b = &worktrees[1];
846 let worktree_a_id = worktree_a.read(cx).id();
847 let worktree_b_id = worktree_b.read(cx).id();
848 (worktree_a_id, worktree_b.clone(), worktree_b_id)
849 });
850
851 cx.update(|cx| {
852 let store = cx.global::<SettingsStore>();
853 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
854
855 // Test both worktrees have settings and share external config
856 assert!(worktree_ids.contains(&worktree_a_id));
857 assert!(worktree_ids.contains(&worktree_b_id));
858 assert_eq!(external_paths.len(), 1); // single shared external config
859 });
860
861 project.update(cx, |project, cx| {
862 project.remove_worktree(worktree_a_id, cx);
863 });
864
865 cx.executor().run_until_parked();
866
867 cx.update(|cx| {
868 let store = cx.global::<SettingsStore>();
869 let (worktree_ids, external_paths, watcher_paths) =
870 store.editorconfig_store.read(cx).test_state();
871
872 // Test worktree_a is gone but external config remains for worktree_b
873 assert!(!worktree_ids.contains(&worktree_a_id));
874 assert!(worktree_ids.contains(&worktree_b_id));
875 // External config should still exist because worktree_b uses it
876 assert_eq!(external_paths.len(), 1);
877 assert_eq!(watcher_paths.len(), 1);
878 });
879
880 let buffer = project
881 .update(cx, |project, cx| {
882 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
883 })
884 .await
885 .unwrap();
886
887 cx.update(|cx| {
888 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
889
890 // Test worktree_b still has correct settings
891 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
892 });
893}
894
895#[gpui::test]
896async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
897 init_test(cx);
898 cx.update(|cx| {
899 GitHostingProviderRegistry::default_global(cx);
900 git_hosting_providers::init(cx);
901 });
902
903 let fs = FakeFs::new(cx.executor());
904 let str_path = path!("/dir");
905 let path = Path::new(str_path);
906
907 fs.insert_tree(
908 path!("/dir"),
909 json!({
910 ".zed": {
911 "settings.json": r#"{
912 "git_hosting_providers": [
913 {
914 "provider": "gitlab",
915 "base_url": "https://google.com",
916 "name": "foo"
917 }
918 ]
919 }"#
920 },
921 }),
922 )
923 .await;
924
925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
926 let (_worktree, _) =
927 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
928 cx.executor().run_until_parked();
929
930 cx.update(|cx| {
931 let provider = GitHostingProviderRegistry::global(cx);
932 assert!(
933 provider
934 .list_hosting_providers()
935 .into_iter()
936 .any(|provider| provider.name() == "foo")
937 );
938 });
939
940 fs.atomic_write(
941 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
942 "{}".into(),
943 )
944 .await
945 .unwrap();
946
947 cx.run_until_parked();
948
949 cx.update(|cx| {
950 let provider = GitHostingProviderRegistry::global(cx);
951 assert!(
952 !provider
953 .list_hosting_providers()
954 .into_iter()
955 .any(|provider| provider.name() == "foo")
956 );
957 });
958}
959
960#[gpui::test]
961async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
962 init_test(cx);
963 TaskStore::init(None);
964
965 let fs = FakeFs::new(cx.executor());
966 fs.insert_tree(
967 path!("/dir"),
968 json!({
969 ".zed": {
970 "settings.json": r#"{ "tab_size": 8 }"#,
971 "tasks.json": r#"[{
972 "label": "cargo check all",
973 "command": "cargo",
974 "args": ["check", "--all"]
975 },]"#,
976 },
977 "a": {
978 "a.rs": "fn a() {\n A\n}"
979 },
980 "b": {
981 ".zed": {
982 "settings.json": r#"{ "tab_size": 2 }"#,
983 "tasks.json": r#"[{
984 "label": "cargo check",
985 "command": "cargo",
986 "args": ["check"]
987 },]"#,
988 },
989 "b.rs": "fn b() {\n B\n}"
990 }
991 }),
992 )
993 .await;
994
995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
996 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
997
998 cx.executor().run_until_parked();
999 let worktree_id = cx.update(|cx| {
1000 project.update(cx, |project, cx| {
1001 project.worktrees(cx).next().unwrap().read(cx).id()
1002 })
1003 });
1004
1005 let mut task_contexts = TaskContexts::default();
1006 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1007 let task_contexts = Arc::new(task_contexts);
1008
1009 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1010 id: worktree_id,
1011 directory_in_worktree: rel_path(".zed").into(),
1012 id_base: "local worktree tasks from directory \".zed\"".into(),
1013 };
1014
1015 let buffer_a = project
1016 .update(cx, |project, cx| {
1017 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1018 })
1019 .await
1020 .unwrap();
1021 let buffer_b = project
1022 .update(cx, |project, cx| {
1023 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1024 })
1025 .await
1026 .unwrap();
1027 cx.update(|cx| {
1028 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1029 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1030
1031 assert_eq!(settings_a.tab_size.get(), 8);
1032 assert_eq!(settings_b.tab_size.get(), 2);
1033 });
1034
1035 let all_tasks = cx
1036 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1037 .await
1038 .into_iter()
1039 .map(|(source_kind, task)| {
1040 let resolved = task.resolved;
1041 (
1042 source_kind,
1043 task.resolved_label,
1044 resolved.args,
1045 resolved.env,
1046 )
1047 })
1048 .collect::<Vec<_>>();
1049 assert_eq!(
1050 all_tasks,
1051 vec![
1052 (
1053 TaskSourceKind::Worktree {
1054 id: worktree_id,
1055 directory_in_worktree: rel_path("b/.zed").into(),
1056 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1057 },
1058 "cargo check".to_string(),
1059 vec!["check".to_string()],
1060 HashMap::default(),
1061 ),
1062 (
1063 topmost_local_task_source_kind.clone(),
1064 "cargo check all".to_string(),
1065 vec!["check".to_string(), "--all".to_string()],
1066 HashMap::default(),
1067 ),
1068 ]
1069 );
1070
1071 let (_, resolved_task) = cx
1072 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1073 .await
1074 .into_iter()
1075 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1076 .expect("should have one global task");
1077 project.update(cx, |project, cx| {
1078 let task_inventory = project
1079 .task_store()
1080 .read(cx)
1081 .task_inventory()
1082 .cloned()
1083 .unwrap();
1084 task_inventory.update(cx, |inventory, _| {
1085 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1086 inventory
1087 .update_file_based_tasks(
1088 TaskSettingsLocation::Global(tasks_file()),
1089 Some(
1090 &json!([{
1091 "label": "cargo check unstable",
1092 "command": "cargo",
1093 "args": [
1094 "check",
1095 "--all",
1096 "--all-targets"
1097 ],
1098 "env": {
1099 "RUSTFLAGS": "-Zunstable-options"
1100 }
1101 }])
1102 .to_string(),
1103 ),
1104 )
1105 .unwrap();
1106 });
1107 });
1108 cx.run_until_parked();
1109
1110 let all_tasks = cx
1111 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1112 .await
1113 .into_iter()
1114 .map(|(source_kind, task)| {
1115 let resolved = task.resolved;
1116 (
1117 source_kind,
1118 task.resolved_label,
1119 resolved.args,
1120 resolved.env,
1121 )
1122 })
1123 .collect::<Vec<_>>();
1124 assert_eq!(
1125 all_tasks,
1126 vec![
1127 (
1128 topmost_local_task_source_kind.clone(),
1129 "cargo check all".to_string(),
1130 vec!["check".to_string(), "--all".to_string()],
1131 HashMap::default(),
1132 ),
1133 (
1134 TaskSourceKind::Worktree {
1135 id: worktree_id,
1136 directory_in_worktree: rel_path("b/.zed").into(),
1137 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1138 },
1139 "cargo check".to_string(),
1140 vec!["check".to_string()],
1141 HashMap::default(),
1142 ),
1143 (
1144 TaskSourceKind::AbsPath {
1145 abs_path: paths::tasks_file().clone(),
1146 id_base: "global tasks.json".into(),
1147 },
1148 "cargo check unstable".to_string(),
1149 vec![
1150 "check".to_string(),
1151 "--all".to_string(),
1152 "--all-targets".to_string(),
1153 ],
1154 HashMap::from_iter(Some((
1155 "RUSTFLAGS".to_string(),
1156 "-Zunstable-options".to_string()
1157 ))),
1158 ),
1159 ]
1160 );
1161}
1162
1163#[gpui::test]
1164async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1165 init_test(cx);
1166 TaskStore::init(None);
1167
1168 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1169 // event is emitted before we havd a chance to setup the event subscription.
1170 let fs = FakeFs::new(cx.executor());
1171 fs.insert_tree(
1172 path!("/dir"),
1173 json!({
1174 ".zed": {
1175 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1176 },
1177 "file.rs": ""
1178 }),
1179 )
1180 .await;
1181
1182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1183 let saw_toast = Rc::new(RefCell::new(false));
1184
1185 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1186 // later assert that the `Event::Toast` even is emitted.
1187 fs.save(
1188 path!("/dir/.zed/tasks.json").as_ref(),
1189 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1190 Default::default(),
1191 )
1192 .await
1193 .unwrap();
1194
1195 project.update(cx, |_, cx| {
1196 let saw_toast = saw_toast.clone();
1197
1198 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1199 Event::Toast {
1200 notification_id,
1201 message,
1202 link: Some(ToastLink { url, .. }),
1203 } => {
1204 assert!(notification_id.starts_with("local-tasks-"));
1205 assert!(message.contains("ZED_FOO"));
1206 assert_eq!(*url, "https://zed.dev/docs/tasks");
1207 *saw_toast.borrow_mut() = true;
1208 }
1209 _ => {}
1210 })
1211 .detach();
1212 });
1213
1214 cx.run_until_parked();
1215 assert!(
1216 *saw_toast.borrow(),
1217 "Expected `Event::Toast` was never emitted"
1218 );
1219}
1220
1221#[gpui::test]
1222async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1223 init_test(cx);
1224 TaskStore::init(None);
1225
1226 let fs = FakeFs::new(cx.executor());
1227 fs.insert_tree(
1228 path!("/dir"),
1229 json!({
1230 ".zed": {
1231 "tasks.json": r#"[{
1232 "label": "test worktree root",
1233 "command": "echo $ZED_WORKTREE_ROOT"
1234 }]"#,
1235 },
1236 "a": {
1237 "a.rs": "fn a() {\n A\n}"
1238 },
1239 }),
1240 )
1241 .await;
1242
1243 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1244 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1245
1246 cx.executor().run_until_parked();
1247 let worktree_id = cx.update(|cx| {
1248 project.update(cx, |project, cx| {
1249 project.worktrees(cx).next().unwrap().read(cx).id()
1250 })
1251 });
1252
1253 let active_non_worktree_item_tasks = cx
1254 .update(|cx| {
1255 get_all_tasks(
1256 &project,
1257 Arc::new(TaskContexts {
1258 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1259 active_worktree_context: None,
1260 other_worktree_contexts: Vec::new(),
1261 lsp_task_sources: HashMap::default(),
1262 latest_selection: None,
1263 }),
1264 cx,
1265 )
1266 })
1267 .await;
1268 assert!(
1269 active_non_worktree_item_tasks.is_empty(),
1270 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1271 );
1272
1273 let active_worktree_tasks = cx
1274 .update(|cx| {
1275 get_all_tasks(
1276 &project,
1277 Arc::new(TaskContexts {
1278 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1279 active_worktree_context: Some((worktree_id, {
1280 let mut worktree_context = TaskContext::default();
1281 worktree_context
1282 .task_variables
1283 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1284 worktree_context
1285 })),
1286 other_worktree_contexts: Vec::new(),
1287 lsp_task_sources: HashMap::default(),
1288 latest_selection: None,
1289 }),
1290 cx,
1291 )
1292 })
1293 .await;
1294 assert_eq!(
1295 active_worktree_tasks
1296 .into_iter()
1297 .map(|(source_kind, task)| {
1298 let resolved = task.resolved;
1299 (source_kind, resolved.command.unwrap())
1300 })
1301 .collect::<Vec<_>>(),
1302 vec![(
1303 TaskSourceKind::Worktree {
1304 id: worktree_id,
1305 directory_in_worktree: rel_path(".zed").into(),
1306 id_base: "local worktree tasks from directory \".zed\"".into(),
1307 },
1308 "echo /dir".to_string(),
1309 )]
1310 );
1311}
1312
1313#[gpui::test]
1314async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1315 cx: &mut gpui::TestAppContext,
1316) {
1317 pub(crate) struct PyprojectTomlManifestProvider;
1318
1319 impl ManifestProvider for PyprojectTomlManifestProvider {
1320 fn name(&self) -> ManifestName {
1321 SharedString::new_static("pyproject.toml").into()
1322 }
1323
1324 fn search(
1325 &self,
1326 ManifestQuery {
1327 path,
1328 depth,
1329 delegate,
1330 }: ManifestQuery,
1331 ) -> Option<Arc<RelPath>> {
1332 for path in path.ancestors().take(depth) {
1333 let p = path.join(rel_path("pyproject.toml"));
1334 if delegate.exists(&p, Some(false)) {
1335 return Some(path.into());
1336 }
1337 }
1338
1339 None
1340 }
1341 }
1342
1343 init_test(cx);
1344 let fs = FakeFs::new(cx.executor());
1345
1346 fs.insert_tree(
1347 path!("/the-root"),
1348 json!({
1349 ".zed": {
1350 "settings.json": r#"
1351 {
1352 "languages": {
1353 "Python": {
1354 "language_servers": ["ty"]
1355 }
1356 }
1357 }"#
1358 },
1359 "project-a": {
1360 ".venv": {},
1361 "file.py": "",
1362 "pyproject.toml": ""
1363 },
1364 "project-b": {
1365 ".venv": {},
1366 "source_file.py":"",
1367 "another_file.py": "",
1368 "pyproject.toml": ""
1369 }
1370 }),
1371 )
1372 .await;
1373 cx.update(|cx| {
1374 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1375 });
1376
1377 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1379 let _fake_python_server = language_registry.register_fake_lsp(
1380 "Python",
1381 FakeLspAdapter {
1382 name: "ty",
1383 capabilities: lsp::ServerCapabilities {
1384 ..Default::default()
1385 },
1386 ..Default::default()
1387 },
1388 );
1389
1390 language_registry.add(python_lang(fs.clone()));
1391 let (first_buffer, _handle) = project
1392 .update(cx, |project, cx| {
1393 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1394 })
1395 .await
1396 .unwrap();
1397 cx.executor().run_until_parked();
1398 let servers = project.update(cx, |project, cx| {
1399 project.lsp_store().update(cx, |this, cx| {
1400 first_buffer.update(cx, |buffer, cx| {
1401 this.running_language_servers_for_local_buffer(buffer, cx)
1402 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1403 .collect::<Vec<_>>()
1404 })
1405 })
1406 });
1407 cx.executor().run_until_parked();
1408 assert_eq!(servers.len(), 1);
1409 let (adapter, server) = servers.into_iter().next().unwrap();
1410 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1411 assert_eq!(server.server_id(), LanguageServerId(0));
1412 // `workspace_folders` are set to the rooting point.
1413 assert_eq!(
1414 server.workspace_folders(),
1415 BTreeSet::from_iter(
1416 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1417 )
1418 );
1419
1420 let (second_project_buffer, _other_handle) = project
1421 .update(cx, |project, cx| {
1422 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1423 })
1424 .await
1425 .unwrap();
1426 cx.executor().run_until_parked();
1427 let servers = project.update(cx, |project, cx| {
1428 project.lsp_store().update(cx, |this, cx| {
1429 second_project_buffer.update(cx, |buffer, cx| {
1430 this.running_language_servers_for_local_buffer(buffer, cx)
1431 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1432 .collect::<Vec<_>>()
1433 })
1434 })
1435 });
1436 cx.executor().run_until_parked();
1437 assert_eq!(servers.len(), 1);
1438 let (adapter, server) = servers.into_iter().next().unwrap();
1439 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1440 // We're not using venvs at all here, so both folders should fall under the same root.
1441 assert_eq!(server.server_id(), LanguageServerId(0));
1442 // Now, let's select a different toolchain for one of subprojects.
1443
1444 let Toolchains {
1445 toolchains: available_toolchains_for_b,
1446 root_path,
1447 ..
1448 } = project
1449 .update(cx, |this, cx| {
1450 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1451 this.available_toolchains(
1452 ProjectPath {
1453 worktree_id,
1454 path: rel_path("project-b/source_file.py").into(),
1455 },
1456 LanguageName::new_static("Python"),
1457 cx,
1458 )
1459 })
1460 .await
1461 .expect("A toolchain to be discovered");
1462 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1463 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1464 let currently_active_toolchain = project
1465 .update(cx, |this, cx| {
1466 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1467 this.active_toolchain(
1468 ProjectPath {
1469 worktree_id,
1470 path: rel_path("project-b/source_file.py").into(),
1471 },
1472 LanguageName::new_static("Python"),
1473 cx,
1474 )
1475 })
1476 .await;
1477
1478 assert!(currently_active_toolchain.is_none());
1479 let _ = project
1480 .update(cx, |this, cx| {
1481 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1482 this.activate_toolchain(
1483 ProjectPath {
1484 worktree_id,
1485 path: root_path,
1486 },
1487 available_toolchains_for_b
1488 .toolchains
1489 .into_iter()
1490 .next()
1491 .unwrap(),
1492 cx,
1493 )
1494 })
1495 .await
1496 .unwrap();
1497 cx.run_until_parked();
1498 let servers = project.update(cx, |project, cx| {
1499 project.lsp_store().update(cx, |this, cx| {
1500 second_project_buffer.update(cx, |buffer, cx| {
1501 this.running_language_servers_for_local_buffer(buffer, cx)
1502 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1503 .collect::<Vec<_>>()
1504 })
1505 })
1506 });
1507 cx.executor().run_until_parked();
1508 assert_eq!(servers.len(), 1);
1509 let (adapter, server) = servers.into_iter().next().unwrap();
1510 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1511 // There's a new language server in town.
1512 assert_eq!(server.server_id(), LanguageServerId(1));
1513}
1514
1515#[gpui::test]
1516async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree(
1521 path!("/dir"),
1522 json!({
1523 "test.rs": "const A: i32 = 1;",
1524 "test2.rs": "",
1525 "Cargo.toml": "a = 1",
1526 "package.json": "{\"a\": 1}",
1527 }),
1528 )
1529 .await;
1530
1531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1533
1534 let mut fake_rust_servers = language_registry.register_fake_lsp(
1535 "Rust",
1536 FakeLspAdapter {
1537 name: "the-rust-language-server",
1538 capabilities: lsp::ServerCapabilities {
1539 completion_provider: Some(lsp::CompletionOptions {
1540 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1541 ..Default::default()
1542 }),
1543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1544 lsp::TextDocumentSyncOptions {
1545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1546 ..Default::default()
1547 },
1548 )),
1549 ..Default::default()
1550 },
1551 ..Default::default()
1552 },
1553 );
1554 let mut fake_json_servers = language_registry.register_fake_lsp(
1555 "JSON",
1556 FakeLspAdapter {
1557 name: "the-json-language-server",
1558 capabilities: lsp::ServerCapabilities {
1559 completion_provider: Some(lsp::CompletionOptions {
1560 trigger_characters: Some(vec![":".to_string()]),
1561 ..Default::default()
1562 }),
1563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1564 lsp::TextDocumentSyncOptions {
1565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1566 ..Default::default()
1567 },
1568 )),
1569 ..Default::default()
1570 },
1571 ..Default::default()
1572 },
1573 );
1574
1575 // Open a buffer without an associated language server.
1576 let (toml_buffer, _handle) = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 // Open a buffer with an associated language server before the language for it has been loaded.
1584 let (rust_buffer, _handle2) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590 rust_buffer.update(cx, |buffer, _| {
1591 assert_eq!(buffer.language().map(|l| l.name()), None);
1592 });
1593
1594 // Now we add the languages to the project, and ensure they get assigned to all
1595 // the relevant open buffers.
1596 language_registry.add(json_lang());
1597 language_registry.add(rust_lang());
1598 cx.executor().run_until_parked();
1599 rust_buffer.update(cx, |buffer, _| {
1600 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1601 });
1602
1603 // A server is started up, and it is notified about Rust files.
1604 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1605 assert_eq!(
1606 fake_rust_server
1607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1608 .await
1609 .text_document,
1610 lsp::TextDocumentItem {
1611 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1612 version: 0,
1613 text: "const A: i32 = 1;".to_string(),
1614 language_id: "rust".to_string(),
1615 }
1616 );
1617
1618 // The buffer is configured based on the language server's capabilities.
1619 rust_buffer.update(cx, |buffer, _| {
1620 assert_eq!(
1621 buffer
1622 .completion_triggers()
1623 .iter()
1624 .cloned()
1625 .collect::<Vec<_>>(),
1626 &[".".to_string(), "::".to_string()]
1627 );
1628 });
1629 toml_buffer.update(cx, |buffer, _| {
1630 assert!(buffer.completion_triggers().is_empty());
1631 });
1632
1633 // Edit a buffer. The changes are reported to the language server.
1634 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1635 assert_eq!(
1636 fake_rust_server
1637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1638 .await
1639 .text_document,
1640 lsp::VersionedTextDocumentIdentifier::new(
1641 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1642 1
1643 )
1644 );
1645
1646 // Open a third buffer with a different associated language server.
1647 let (json_buffer, _json_handle) = project
1648 .update(cx, |project, cx| {
1649 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1650 })
1651 .await
1652 .unwrap();
1653
1654 // A json language server is started up and is only notified about the json buffer.
1655 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1656 assert_eq!(
1657 fake_json_server
1658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1659 .await
1660 .text_document,
1661 lsp::TextDocumentItem {
1662 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1663 version: 0,
1664 text: "{\"a\": 1}".to_string(),
1665 language_id: "json".to_string(),
1666 }
1667 );
1668
1669 // This buffer is configured based on the second language server's
1670 // capabilities.
1671 json_buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .completion_triggers()
1675 .iter()
1676 .cloned()
1677 .collect::<Vec<_>>(),
1678 &[":".to_string()]
1679 );
1680 });
1681
1682 // When opening another buffer whose language server is already running,
1683 // it is also configured based on the existing language server's capabilities.
1684 let (rust_buffer2, _handle4) = project
1685 .update(cx, |project, cx| {
1686 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1687 })
1688 .await
1689 .unwrap();
1690 rust_buffer2.update(cx, |buffer, _| {
1691 assert_eq!(
1692 buffer
1693 .completion_triggers()
1694 .iter()
1695 .cloned()
1696 .collect::<Vec<_>>(),
1697 &[".".to_string(), "::".to_string()]
1698 );
1699 });
1700
1701 // Changes are reported only to servers matching the buffer's language.
1702 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1703 rust_buffer2.update(cx, |buffer, cx| {
1704 buffer.edit([(0..0, "let x = 1;")], None, cx)
1705 });
1706 assert_eq!(
1707 fake_rust_server
1708 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1709 .await
1710 .text_document,
1711 lsp::VersionedTextDocumentIdentifier::new(
1712 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1713 1
1714 )
1715 );
1716
1717 // Save notifications are reported to all servers.
1718 project
1719 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1720 .await
1721 .unwrap();
1722 assert_eq!(
1723 fake_rust_server
1724 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1725 .await
1726 .text_document,
1727 lsp::TextDocumentIdentifier::new(
1728 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1729 )
1730 );
1731 assert_eq!(
1732 fake_json_server
1733 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1734 .await
1735 .text_document,
1736 lsp::TextDocumentIdentifier::new(
1737 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1738 )
1739 );
1740
1741 // Renames are reported only to servers matching the buffer's language.
1742 fs.rename(
1743 Path::new(path!("/dir/test2.rs")),
1744 Path::new(path!("/dir/test3.rs")),
1745 Default::default(),
1746 )
1747 .await
1748 .unwrap();
1749 assert_eq!(
1750 fake_rust_server
1751 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1752 .await
1753 .text_document,
1754 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1755 );
1756 assert_eq!(
1757 fake_rust_server
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document,
1761 lsp::TextDocumentItem {
1762 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1763 version: 0,
1764 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1765 language_id: "rust".to_string(),
1766 },
1767 );
1768
1769 rust_buffer2.update(cx, |buffer, cx| {
1770 buffer.update_diagnostics(
1771 LanguageServerId(0),
1772 DiagnosticSet::from_sorted_entries(
1773 vec![DiagnosticEntry {
1774 diagnostic: Default::default(),
1775 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1776 }],
1777 &buffer.snapshot(),
1778 ),
1779 cx,
1780 );
1781 assert_eq!(
1782 buffer
1783 .snapshot()
1784 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1785 .count(),
1786 1
1787 );
1788 });
1789
1790 // When the rename changes the extension of the file, the buffer gets closed on the old
1791 // language server and gets opened on the new one.
1792 fs.rename(
1793 Path::new(path!("/dir/test3.rs")),
1794 Path::new(path!("/dir/test3.json")),
1795 Default::default(),
1796 )
1797 .await
1798 .unwrap();
1799 assert_eq!(
1800 fake_rust_server
1801 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1802 .await
1803 .text_document,
1804 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1805 );
1806 assert_eq!(
1807 fake_json_server
1808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1809 .await
1810 .text_document,
1811 lsp::TextDocumentItem {
1812 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1813 version: 0,
1814 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1815 language_id: "json".to_string(),
1816 },
1817 );
1818
1819 // We clear the diagnostics, since the language has changed.
1820 rust_buffer2.update(cx, |buffer, _| {
1821 assert_eq!(
1822 buffer
1823 .snapshot()
1824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1825 .count(),
1826 0
1827 );
1828 });
1829
1830 // The renamed file's version resets after changing language server.
1831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1832 assert_eq!(
1833 fake_json_server
1834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1835 .await
1836 .text_document,
1837 lsp::VersionedTextDocumentIdentifier::new(
1838 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1839 1
1840 )
1841 );
1842
1843 // Restart language servers
1844 project.update(cx, |project, cx| {
1845 project.restart_language_servers_for_buffers(
1846 vec![rust_buffer.clone(), json_buffer.clone()],
1847 HashSet::default(),
1848 cx,
1849 );
1850 });
1851
1852 let mut rust_shutdown_requests = fake_rust_server
1853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1854 let mut json_shutdown_requests = fake_json_server
1855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1857
1858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1860
1861 // Ensure rust document is reopened in new rust language server
1862 assert_eq!(
1863 fake_rust_server
1864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1865 .await
1866 .text_document,
1867 lsp::TextDocumentItem {
1868 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1869 version: 0,
1870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1871 language_id: "rust".to_string(),
1872 }
1873 );
1874
1875 // Ensure json documents are reopened in new json language server
1876 assert_set_eq!(
1877 [
1878 fake_json_server
1879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1880 .await
1881 .text_document,
1882 fake_json_server
1883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1884 .await
1885 .text_document,
1886 ],
1887 [
1888 lsp::TextDocumentItem {
1889 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1890 version: 0,
1891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1892 language_id: "json".to_string(),
1893 },
1894 lsp::TextDocumentItem {
1895 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1896 version: 0,
1897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1898 language_id: "json".to_string(),
1899 }
1900 ]
1901 );
1902
1903 // Close notifications are reported only to servers matching the buffer's language.
1904 cx.update(|_| drop(_json_handle));
1905 let close_message = lsp::DidCloseTextDocumentParams {
1906 text_document: lsp::TextDocumentIdentifier::new(
1907 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1908 ),
1909 };
1910 assert_eq!(
1911 fake_json_server
1912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1913 .await,
1914 close_message,
1915 );
1916}
1917
1918#[gpui::test]
1919async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1920 init_test(cx);
1921
1922 let settings_json_contents = json!({
1923 "languages": {
1924 "Rust": {
1925 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1926 }
1927 },
1928 "lsp": {
1929 "my_fake_lsp": {
1930 "binary": {
1931 // file exists, so this is treated as a relative path
1932 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1933 }
1934 },
1935 "lsp_on_path": {
1936 "binary": {
1937 // file doesn't exist, so it will fall back on PATH env var
1938 "path": path!("lsp_on_path.exe").to_string(),
1939 }
1940 }
1941 },
1942 });
1943
1944 let fs = FakeFs::new(cx.executor());
1945 fs.insert_tree(
1946 path!("/the-root"),
1947 json!({
1948 ".zed": {
1949 "settings.json": settings_json_contents.to_string(),
1950 },
1951 ".relative_path": {
1952 "to": {
1953 "my_fake_lsp.exe": "",
1954 },
1955 },
1956 "src": {
1957 "main.rs": "",
1958 }
1959 }),
1960 )
1961 .await;
1962
1963 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1965 language_registry.add(rust_lang());
1966
1967 let mut my_fake_lsp = language_registry.register_fake_lsp(
1968 "Rust",
1969 FakeLspAdapter {
1970 name: "my_fake_lsp",
1971 ..Default::default()
1972 },
1973 );
1974 let mut lsp_on_path = language_registry.register_fake_lsp(
1975 "Rust",
1976 FakeLspAdapter {
1977 name: "lsp_on_path",
1978 ..Default::default()
1979 },
1980 );
1981
1982 cx.run_until_parked();
1983
1984 // Start the language server by opening a buffer with a compatible file extension.
1985 project
1986 .update(cx, |project, cx| {
1987 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1988 })
1989 .await
1990 .unwrap();
1991
1992 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1993 assert_eq!(
1994 lsp_path.to_string_lossy(),
1995 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1996 );
1997
1998 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1999 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2000}
2001
2002#[gpui::test]
2003async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let settings_json_contents = json!({
2007 "languages": {
2008 "Rust": {
2009 "language_servers": ["tilde_lsp"]
2010 }
2011 },
2012 "lsp": {
2013 "tilde_lsp": {
2014 "binary": {
2015 "path": "~/.local/bin/rust-analyzer",
2016 }
2017 }
2018 },
2019 });
2020
2021 let fs = FakeFs::new(cx.executor());
2022 fs.insert_tree(
2023 path!("/root"),
2024 json!({
2025 ".zed": {
2026 "settings.json": settings_json_contents.to_string(),
2027 },
2028 "src": {
2029 "main.rs": "fn main() {}",
2030 }
2031 }),
2032 )
2033 .await;
2034
2035 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2036 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2037 language_registry.add(rust_lang());
2038
2039 let mut tilde_lsp = language_registry.register_fake_lsp(
2040 "Rust",
2041 FakeLspAdapter {
2042 name: "tilde_lsp",
2043 ..Default::default()
2044 },
2045 );
2046 cx.run_until_parked();
2047
2048 project
2049 .update(cx, |project, cx| {
2050 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2051 })
2052 .await
2053 .unwrap();
2054
2055 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2056 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2057 assert_eq!(
2058 lsp_path, expected_path,
2059 "Tilde path should expand to home directory"
2060 );
2061}
2062
2063#[gpui::test]
2064async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2065 cx: &mut gpui::TestAppContext,
2066) {
2067 init_test(cx);
2068
2069 let fs = FakeFs::new(cx.executor());
2070 fs.insert_tree(
2071 path!("/the-root"),
2072 json!({
2073 "Cargo.lock": "",
2074 "src": {
2075 "a.rs": "",
2076 }
2077 }),
2078 )
2079 .await;
2080
2081 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2082 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2083 (project.languages().clone(), project.lsp_store())
2084 });
2085 language_registry.add(rust_lang());
2086 let mut fake_servers = language_registry.register_fake_lsp(
2087 "Rust",
2088 FakeLspAdapter {
2089 name: "the-language-server",
2090 ..Default::default()
2091 },
2092 );
2093
2094 cx.executor().run_until_parked();
2095
2096 project
2097 .update(cx, |project, cx| {
2098 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2099 })
2100 .await
2101 .unwrap();
2102
2103 let fake_server = fake_servers.next().await.unwrap();
2104 cx.executor().run_until_parked();
2105
2106 let file_changes = Arc::new(Mutex::new(Vec::new()));
2107 fake_server
2108 .request::<lsp::request::RegisterCapability>(
2109 lsp::RegistrationParams {
2110 registrations: vec![lsp::Registration {
2111 id: Default::default(),
2112 method: "workspace/didChangeWatchedFiles".to_string(),
2113 register_options: serde_json::to_value(
2114 lsp::DidChangeWatchedFilesRegistrationOptions {
2115 watchers: vec![lsp::FileSystemWatcher {
2116 glob_pattern: lsp::GlobPattern::String(
2117 path!("/the-root/Cargo.lock").to_string(),
2118 ),
2119 kind: None,
2120 }],
2121 },
2122 )
2123 .ok(),
2124 }],
2125 },
2126 DEFAULT_LSP_REQUEST_TIMEOUT,
2127 )
2128 .await
2129 .into_response()
2130 .unwrap();
2131 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2132 let file_changes = file_changes.clone();
2133 move |params, _| {
2134 let mut file_changes = file_changes.lock();
2135 file_changes.extend(params.changes);
2136 }
2137 });
2138
2139 cx.executor().run_until_parked();
2140 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2141
2142 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2143 cx.executor().run_until_parked();
2144
2145 assert_eq!(
2146 &*file_changes.lock(),
2147 &[lsp::FileEvent {
2148 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2149 typ: lsp::FileChangeType::CHANGED,
2150 }]
2151 );
2152}
2153
2154#[gpui::test]
2155async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let fs = FakeFs::new(cx.executor());
2159 fs.insert_tree(
2160 path!("/the-root"),
2161 json!({
2162 ".gitignore": "target\n",
2163 "Cargo.lock": "",
2164 "src": {
2165 "a.rs": "",
2166 "b.rs": "",
2167 },
2168 "target": {
2169 "x": {
2170 "out": {
2171 "x.rs": ""
2172 }
2173 },
2174 "y": {
2175 "out": {
2176 "y.rs": "",
2177 }
2178 },
2179 "z": {
2180 "out": {
2181 "z.rs": ""
2182 }
2183 }
2184 }
2185 }),
2186 )
2187 .await;
2188 fs.insert_tree(
2189 path!("/the-registry"),
2190 json!({
2191 "dep1": {
2192 "src": {
2193 "dep1.rs": "",
2194 }
2195 },
2196 "dep2": {
2197 "src": {
2198 "dep2.rs": "",
2199 }
2200 },
2201 }),
2202 )
2203 .await;
2204 fs.insert_tree(
2205 path!("/the/stdlib"),
2206 json!({
2207 "LICENSE": "",
2208 "src": {
2209 "string.rs": "",
2210 }
2211 }),
2212 )
2213 .await;
2214
2215 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2216 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2217 (project.languages().clone(), project.lsp_store())
2218 });
2219 language_registry.add(rust_lang());
2220 let mut fake_servers = language_registry.register_fake_lsp(
2221 "Rust",
2222 FakeLspAdapter {
2223 name: "the-language-server",
2224 ..Default::default()
2225 },
2226 );
2227
2228 cx.executor().run_until_parked();
2229
2230 // Start the language server by opening a buffer with a compatible file extension.
2231 project
2232 .update(cx, |project, cx| {
2233 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2234 })
2235 .await
2236 .unwrap();
2237
2238 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2239 project.update(cx, |project, cx| {
2240 let worktree = project.worktrees(cx).next().unwrap();
2241 assert_eq!(
2242 worktree
2243 .read(cx)
2244 .snapshot()
2245 .entries(true, 0)
2246 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2247 .collect::<Vec<_>>(),
2248 &[
2249 ("", false),
2250 (".gitignore", false),
2251 ("Cargo.lock", false),
2252 ("src", false),
2253 ("src/a.rs", false),
2254 ("src/b.rs", false),
2255 ("target", true),
2256 ]
2257 );
2258 });
2259
2260 let prev_read_dir_count = fs.read_dir_call_count();
2261
2262 let fake_server = fake_servers.next().await.unwrap();
2263 cx.executor().run_until_parked();
2264 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2265 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2266 id
2267 });
2268
2269 // Simulate jumping to a definition in a dependency outside of the worktree.
2270 let _out_of_worktree_buffer = project
2271 .update(cx, |project, cx| {
2272 project.open_local_buffer_via_lsp(
2273 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2274 server_id,
2275 cx,
2276 )
2277 })
2278 .await
2279 .unwrap();
2280
2281 // Keep track of the FS events reported to the language server.
2282 let file_changes = Arc::new(Mutex::new(Vec::new()));
2283 fake_server
2284 .request::<lsp::request::RegisterCapability>(
2285 lsp::RegistrationParams {
2286 registrations: vec![lsp::Registration {
2287 id: Default::default(),
2288 method: "workspace/didChangeWatchedFiles".to_string(),
2289 register_options: serde_json::to_value(
2290 lsp::DidChangeWatchedFilesRegistrationOptions {
2291 watchers: vec![
2292 lsp::FileSystemWatcher {
2293 glob_pattern: lsp::GlobPattern::String(
2294 path!("/the-root/Cargo.toml").to_string(),
2295 ),
2296 kind: None,
2297 },
2298 lsp::FileSystemWatcher {
2299 glob_pattern: lsp::GlobPattern::String(
2300 path!("/the-root/src/*.{rs,c}").to_string(),
2301 ),
2302 kind: None,
2303 },
2304 lsp::FileSystemWatcher {
2305 glob_pattern: lsp::GlobPattern::String(
2306 path!("/the-root/target/y/**/*.rs").to_string(),
2307 ),
2308 kind: None,
2309 },
2310 lsp::FileSystemWatcher {
2311 glob_pattern: lsp::GlobPattern::String(
2312 path!("/the/stdlib/src/**/*.rs").to_string(),
2313 ),
2314 kind: None,
2315 },
2316 lsp::FileSystemWatcher {
2317 glob_pattern: lsp::GlobPattern::String(
2318 path!("**/Cargo.lock").to_string(),
2319 ),
2320 kind: None,
2321 },
2322 ],
2323 },
2324 )
2325 .ok(),
2326 }],
2327 },
2328 DEFAULT_LSP_REQUEST_TIMEOUT,
2329 )
2330 .await
2331 .into_response()
2332 .unwrap();
2333 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2334 let file_changes = file_changes.clone();
2335 move |params, _| {
2336 let mut file_changes = file_changes.lock();
2337 file_changes.extend(params.changes);
2338 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2339 }
2340 });
2341
2342 cx.executor().run_until_parked();
2343 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2344 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2345
2346 let mut new_watched_paths = fs.watched_paths();
2347 new_watched_paths.retain(|path| {
2348 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2349 });
2350 assert_eq!(
2351 &new_watched_paths,
2352 &[
2353 Path::new(path!("/the-root")),
2354 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2355 Path::new(path!("/the/stdlib/src"))
2356 ]
2357 );
2358
2359 // Now the language server has asked us to watch an ignored directory path,
2360 // so we recursively load it.
2361 project.update(cx, |project, cx| {
2362 let worktree = project.visible_worktrees(cx).next().unwrap();
2363 assert_eq!(
2364 worktree
2365 .read(cx)
2366 .snapshot()
2367 .entries(true, 0)
2368 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2369 .collect::<Vec<_>>(),
2370 &[
2371 ("", false),
2372 (".gitignore", false),
2373 ("Cargo.lock", false),
2374 ("src", false),
2375 ("src/a.rs", false),
2376 ("src/b.rs", false),
2377 ("target", true),
2378 ("target/x", true),
2379 ("target/y", true),
2380 ("target/y/out", true),
2381 ("target/y/out/y.rs", true),
2382 ("target/z", true),
2383 ]
2384 );
2385 });
2386
2387 // Perform some file system mutations, two of which match the watched patterns,
2388 // and one of which does not.
2389 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2390 .await
2391 .unwrap();
2392 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2393 .await
2394 .unwrap();
2395 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2396 .await
2397 .unwrap();
2398 fs.create_file(
2399 path!("/the-root/target/x/out/x2.rs").as_ref(),
2400 Default::default(),
2401 )
2402 .await
2403 .unwrap();
2404 fs.create_file(
2405 path!("/the-root/target/y/out/y2.rs").as_ref(),
2406 Default::default(),
2407 )
2408 .await
2409 .unwrap();
2410 fs.save(
2411 path!("/the-root/Cargo.lock").as_ref(),
2412 &"".into(),
2413 Default::default(),
2414 )
2415 .await
2416 .unwrap();
2417 fs.save(
2418 path!("/the-stdlib/LICENSE").as_ref(),
2419 &"".into(),
2420 Default::default(),
2421 )
2422 .await
2423 .unwrap();
2424 fs.save(
2425 path!("/the/stdlib/src/string.rs").as_ref(),
2426 &"".into(),
2427 Default::default(),
2428 )
2429 .await
2430 .unwrap();
2431
2432 // The language server receives events for the FS mutations that match its watch patterns.
2433 cx.executor().run_until_parked();
2434 assert_eq!(
2435 &*file_changes.lock(),
2436 &[
2437 lsp::FileEvent {
2438 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2439 typ: lsp::FileChangeType::CHANGED,
2440 },
2441 lsp::FileEvent {
2442 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2443 typ: lsp::FileChangeType::DELETED,
2444 },
2445 lsp::FileEvent {
2446 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2447 typ: lsp::FileChangeType::CREATED,
2448 },
2449 lsp::FileEvent {
2450 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2451 typ: lsp::FileChangeType::CREATED,
2452 },
2453 lsp::FileEvent {
2454 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2455 typ: lsp::FileChangeType::CHANGED,
2456 },
2457 ]
2458 );
2459}
2460
2461#[gpui::test]
2462async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 path!("/dir"),
2468 json!({
2469 "a.rs": "let a = 1;",
2470 "b.rs": "let b = 2;"
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(
2476 fs,
2477 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2478 cx,
2479 )
2480 .await;
2481 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2482
2483 let buffer_a = project
2484 .update(cx, |project, cx| {
2485 project.open_local_buffer(path!("/dir/a.rs"), cx)
2486 })
2487 .await
2488 .unwrap();
2489 let buffer_b = project
2490 .update(cx, |project, cx| {
2491 project.open_local_buffer(path!("/dir/b.rs"), cx)
2492 })
2493 .await
2494 .unwrap();
2495
2496 lsp_store.update(cx, |lsp_store, cx| {
2497 lsp_store
2498 .update_diagnostics(
2499 LanguageServerId(0),
2500 lsp::PublishDiagnosticsParams {
2501 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: None,
2503 diagnostics: vec![lsp::Diagnostic {
2504 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2505 severity: Some(lsp::DiagnosticSeverity::ERROR),
2506 message: "error 1".to_string(),
2507 ..Default::default()
2508 }],
2509 },
2510 None,
2511 DiagnosticSourceKind::Pushed,
2512 &[],
2513 cx,
2514 )
2515 .unwrap();
2516 lsp_store
2517 .update_diagnostics(
2518 LanguageServerId(0),
2519 lsp::PublishDiagnosticsParams {
2520 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2521 version: None,
2522 diagnostics: vec![lsp::Diagnostic {
2523 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2524 severity: Some(DiagnosticSeverity::WARNING),
2525 message: "error 2".to_string(),
2526 ..Default::default()
2527 }],
2528 },
2529 None,
2530 DiagnosticSourceKind::Pushed,
2531 &[],
2532 cx,
2533 )
2534 .unwrap();
2535 });
2536
2537 buffer_a.update(cx, |buffer, _| {
2538 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2539 assert_eq!(
2540 chunks
2541 .iter()
2542 .map(|(s, d)| (s.as_str(), *d))
2543 .collect::<Vec<_>>(),
2544 &[
2545 ("let ", None),
2546 ("a", Some(DiagnosticSeverity::ERROR)),
2547 (" = 1;", None),
2548 ]
2549 );
2550 });
2551 buffer_b.update(cx, |buffer, _| {
2552 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2553 assert_eq!(
2554 chunks
2555 .iter()
2556 .map(|(s, d)| (s.as_str(), *d))
2557 .collect::<Vec<_>>(),
2558 &[
2559 ("let ", None),
2560 ("b", Some(DiagnosticSeverity::WARNING)),
2561 (" = 2;", None),
2562 ]
2563 );
2564 });
2565}
2566
2567#[gpui::test]
2568async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2569 init_test(cx);
2570
2571 let fs = FakeFs::new(cx.executor());
2572 fs.insert_tree(
2573 path!("/root"),
2574 json!({
2575 "dir": {
2576 ".git": {
2577 "HEAD": "ref: refs/heads/main",
2578 },
2579 ".gitignore": "b.rs",
2580 "a.rs": "let a = 1;",
2581 "b.rs": "let b = 2;",
2582 },
2583 "other.rs": "let b = c;"
2584 }),
2585 )
2586 .await;
2587
2588 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2589 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2590 let (worktree, _) = project
2591 .update(cx, |project, cx| {
2592 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2593 })
2594 .await
2595 .unwrap();
2596 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2597
2598 let (worktree, _) = project
2599 .update(cx, |project, cx| {
2600 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2601 })
2602 .await
2603 .unwrap();
2604 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2605
2606 let server_id = LanguageServerId(0);
2607 lsp_store.update(cx, |lsp_store, cx| {
2608 lsp_store
2609 .update_diagnostics(
2610 server_id,
2611 lsp::PublishDiagnosticsParams {
2612 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2613 version: None,
2614 diagnostics: vec![lsp::Diagnostic {
2615 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2616 severity: Some(lsp::DiagnosticSeverity::ERROR),
2617 message: "unused variable 'b'".to_string(),
2618 ..Default::default()
2619 }],
2620 },
2621 None,
2622 DiagnosticSourceKind::Pushed,
2623 &[],
2624 cx,
2625 )
2626 .unwrap();
2627 lsp_store
2628 .update_diagnostics(
2629 server_id,
2630 lsp::PublishDiagnosticsParams {
2631 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2632 version: None,
2633 diagnostics: vec![lsp::Diagnostic {
2634 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2635 severity: Some(lsp::DiagnosticSeverity::ERROR),
2636 message: "unknown variable 'c'".to_string(),
2637 ..Default::default()
2638 }],
2639 },
2640 None,
2641 DiagnosticSourceKind::Pushed,
2642 &[],
2643 cx,
2644 )
2645 .unwrap();
2646 });
2647
2648 let main_ignored_buffer = project
2649 .update(cx, |project, cx| {
2650 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2651 })
2652 .await
2653 .unwrap();
2654 main_ignored_buffer.update(cx, |buffer, _| {
2655 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2656 assert_eq!(
2657 chunks
2658 .iter()
2659 .map(|(s, d)| (s.as_str(), *d))
2660 .collect::<Vec<_>>(),
2661 &[
2662 ("let ", None),
2663 ("b", Some(DiagnosticSeverity::ERROR)),
2664 (" = 2;", None),
2665 ],
2666 "Gigitnored buffers should still get in-buffer diagnostics",
2667 );
2668 });
2669 let other_buffer = project
2670 .update(cx, |project, cx| {
2671 project.open_buffer((other_worktree_id, rel_path("")), cx)
2672 })
2673 .await
2674 .unwrap();
2675 other_buffer.update(cx, |buffer, _| {
2676 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2677 assert_eq!(
2678 chunks
2679 .iter()
2680 .map(|(s, d)| (s.as_str(), *d))
2681 .collect::<Vec<_>>(),
2682 &[
2683 ("let b = ", None),
2684 ("c", Some(DiagnosticSeverity::ERROR)),
2685 (";", None),
2686 ],
2687 "Buffers from hidden projects should still get in-buffer diagnostics"
2688 );
2689 });
2690
2691 project.update(cx, |project, cx| {
2692 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2693 assert_eq!(
2694 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2695 vec![(
2696 ProjectPath {
2697 worktree_id: main_worktree_id,
2698 path: rel_path("b.rs").into(),
2699 },
2700 server_id,
2701 DiagnosticSummary {
2702 error_count: 1,
2703 warning_count: 0,
2704 }
2705 )]
2706 );
2707 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2708 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2709 });
2710}
2711
2712#[gpui::test]
2713async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2714 init_test(cx);
2715
2716 let progress_token = "the-progress-token";
2717
2718 let fs = FakeFs::new(cx.executor());
2719 fs.insert_tree(
2720 path!("/dir"),
2721 json!({
2722 "a.rs": "fn a() { A }",
2723 "b.rs": "const y: i32 = 1",
2724 }),
2725 )
2726 .await;
2727
2728 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2730
2731 language_registry.add(rust_lang());
2732 let mut fake_servers = language_registry.register_fake_lsp(
2733 "Rust",
2734 FakeLspAdapter {
2735 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2736 disk_based_diagnostics_sources: vec!["disk".into()],
2737 ..Default::default()
2738 },
2739 );
2740
2741 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2742
2743 // Cause worktree to start the fake language server
2744 let _ = project
2745 .update(cx, |project, cx| {
2746 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2747 })
2748 .await
2749 .unwrap();
2750
2751 let mut events = cx.events(&project);
2752
2753 let fake_server = fake_servers.next().await.unwrap();
2754 assert_eq!(
2755 events.next().await.unwrap(),
2756 Event::LanguageServerAdded(
2757 LanguageServerId(0),
2758 fake_server.server.name(),
2759 Some(worktree_id)
2760 ),
2761 );
2762
2763 fake_server
2764 .start_progress(format!("{}/0", progress_token))
2765 .await;
2766 assert_eq!(
2767 events.next().await.unwrap(),
2768 Event::DiskBasedDiagnosticsStarted {
2769 language_server_id: LanguageServerId(0),
2770 }
2771 );
2772
2773 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2774 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2775 version: None,
2776 diagnostics: vec![lsp::Diagnostic {
2777 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2778 severity: Some(lsp::DiagnosticSeverity::ERROR),
2779 message: "undefined variable 'A'".to_string(),
2780 ..Default::default()
2781 }],
2782 });
2783 assert_eq!(
2784 events.next().await.unwrap(),
2785 Event::DiagnosticsUpdated {
2786 language_server_id: LanguageServerId(0),
2787 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2788 }
2789 );
2790
2791 fake_server.end_progress(format!("{}/0", progress_token));
2792 assert_eq!(
2793 events.next().await.unwrap(),
2794 Event::DiskBasedDiagnosticsFinished {
2795 language_server_id: LanguageServerId(0)
2796 }
2797 );
2798
2799 let buffer = project
2800 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2801 .await
2802 .unwrap();
2803
2804 buffer.update(cx, |buffer, _| {
2805 let snapshot = buffer.snapshot();
2806 let diagnostics = snapshot
2807 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2808 .collect::<Vec<_>>();
2809 assert_eq!(
2810 diagnostics,
2811 &[DiagnosticEntryRef {
2812 range: Point::new(0, 9)..Point::new(0, 10),
2813 diagnostic: &Diagnostic {
2814 severity: lsp::DiagnosticSeverity::ERROR,
2815 message: "undefined variable 'A'".to_string(),
2816 group_id: 0,
2817 is_primary: true,
2818 source_kind: DiagnosticSourceKind::Pushed,
2819 ..Diagnostic::default()
2820 }
2821 }]
2822 )
2823 });
2824
2825 // Ensure publishing empty diagnostics twice only results in one update event.
2826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2827 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2828 version: None,
2829 diagnostics: Default::default(),
2830 });
2831 assert_eq!(
2832 events.next().await.unwrap(),
2833 Event::DiagnosticsUpdated {
2834 language_server_id: LanguageServerId(0),
2835 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2836 }
2837 );
2838
2839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2840 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2841 version: None,
2842 diagnostics: Default::default(),
2843 });
2844 cx.executor().run_until_parked();
2845 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2846}
2847
2848#[gpui::test]
2849async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2850 init_test(cx);
2851
2852 let progress_token = "the-progress-token";
2853
2854 let fs = FakeFs::new(cx.executor());
2855 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2856
2857 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2858
2859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2860 language_registry.add(rust_lang());
2861 let mut fake_servers = language_registry.register_fake_lsp(
2862 "Rust",
2863 FakeLspAdapter {
2864 name: "the-language-server",
2865 disk_based_diagnostics_sources: vec!["disk".into()],
2866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2867 ..FakeLspAdapter::default()
2868 },
2869 );
2870
2871 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2872
2873 let (buffer, _handle) = project
2874 .update(cx, |project, cx| {
2875 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2876 })
2877 .await
2878 .unwrap();
2879 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2880 // Simulate diagnostics starting to update.
2881 let fake_server = fake_servers.next().await.unwrap();
2882 cx.executor().run_until_parked();
2883 fake_server.start_progress(progress_token).await;
2884
2885 // Restart the server before the diagnostics finish updating.
2886 project.update(cx, |project, cx| {
2887 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2888 });
2889 let mut events = cx.events(&project);
2890
2891 // Simulate the newly started server sending more diagnostics.
2892 let fake_server = fake_servers.next().await.unwrap();
2893 cx.executor().run_until_parked();
2894 assert_eq!(
2895 events.next().await.unwrap(),
2896 Event::LanguageServerRemoved(LanguageServerId(0))
2897 );
2898 assert_eq!(
2899 events.next().await.unwrap(),
2900 Event::LanguageServerAdded(
2901 LanguageServerId(1),
2902 fake_server.server.name(),
2903 Some(worktree_id)
2904 )
2905 );
2906 fake_server.start_progress(progress_token).await;
2907 assert_eq!(
2908 events.next().await.unwrap(),
2909 Event::LanguageServerBufferRegistered {
2910 server_id: LanguageServerId(1),
2911 buffer_id,
2912 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2913 name: Some(fake_server.server.name())
2914 }
2915 );
2916 assert_eq!(
2917 events.next().await.unwrap(),
2918 Event::DiskBasedDiagnosticsStarted {
2919 language_server_id: LanguageServerId(1)
2920 }
2921 );
2922 project.update(cx, |project, cx| {
2923 assert_eq!(
2924 project
2925 .language_servers_running_disk_based_diagnostics(cx)
2926 .collect::<Vec<_>>(),
2927 [LanguageServerId(1)]
2928 );
2929 });
2930
2931 // All diagnostics are considered done, despite the old server's diagnostic
2932 // task never completing.
2933 fake_server.end_progress(progress_token);
2934 assert_eq!(
2935 events.next().await.unwrap(),
2936 Event::DiskBasedDiagnosticsFinished {
2937 language_server_id: LanguageServerId(1)
2938 }
2939 );
2940 project.update(cx, |project, cx| {
2941 assert_eq!(
2942 project
2943 .language_servers_running_disk_based_diagnostics(cx)
2944 .collect::<Vec<_>>(),
2945 [] as [language::LanguageServerId; 0]
2946 );
2947 });
2948}
2949
2950#[gpui::test]
2951async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2952 init_test(cx);
2953
2954 let fs = FakeFs::new(cx.executor());
2955 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2956
2957 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2958
2959 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2960 language_registry.add(rust_lang());
2961 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2962
2963 let (buffer, _) = project
2964 .update(cx, |project, cx| {
2965 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2966 })
2967 .await
2968 .unwrap();
2969
2970 // Publish diagnostics
2971 let fake_server = fake_servers.next().await.unwrap();
2972 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2973 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2974 version: None,
2975 diagnostics: vec![lsp::Diagnostic {
2976 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2977 severity: Some(lsp::DiagnosticSeverity::ERROR),
2978 message: "the message".to_string(),
2979 ..Default::default()
2980 }],
2981 });
2982
2983 cx.executor().run_until_parked();
2984 buffer.update(cx, |buffer, _| {
2985 assert_eq!(
2986 buffer
2987 .snapshot()
2988 .diagnostics_in_range::<_, usize>(0..1, false)
2989 .map(|entry| entry.diagnostic.message.clone())
2990 .collect::<Vec<_>>(),
2991 ["the message".to_string()]
2992 );
2993 });
2994 project.update(cx, |project, cx| {
2995 assert_eq!(
2996 project.diagnostic_summary(false, cx),
2997 DiagnosticSummary {
2998 error_count: 1,
2999 warning_count: 0,
3000 }
3001 );
3002 });
3003
3004 project.update(cx, |project, cx| {
3005 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3006 });
3007
3008 // The diagnostics are cleared.
3009 cx.executor().run_until_parked();
3010 buffer.update(cx, |buffer, _| {
3011 assert_eq!(
3012 buffer
3013 .snapshot()
3014 .diagnostics_in_range::<_, usize>(0..1, false)
3015 .map(|entry| entry.diagnostic.message.clone())
3016 .collect::<Vec<_>>(),
3017 Vec::<String>::new(),
3018 );
3019 });
3020 project.update(cx, |project, cx| {
3021 assert_eq!(
3022 project.diagnostic_summary(false, cx),
3023 DiagnosticSummary {
3024 error_count: 0,
3025 warning_count: 0,
3026 }
3027 );
3028 });
3029}
3030
3031#[gpui::test]
3032async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let fs = FakeFs::new(cx.executor());
3036 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3037
3038 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3039 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3040
3041 language_registry.add(rust_lang());
3042 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3043
3044 let (buffer, _handle) = project
3045 .update(cx, |project, cx| {
3046 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3047 })
3048 .await
3049 .unwrap();
3050
3051 // Before restarting the server, report diagnostics with an unknown buffer version.
3052 let fake_server = fake_servers.next().await.unwrap();
3053 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3054 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3055 version: Some(10000),
3056 diagnostics: Vec::new(),
3057 });
3058 cx.executor().run_until_parked();
3059 project.update(cx, |project, cx| {
3060 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3061 });
3062
3063 let mut fake_server = fake_servers.next().await.unwrap();
3064 let notification = fake_server
3065 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3066 .await
3067 .text_document;
3068 assert_eq!(notification.version, 0);
3069}
3070
3071#[gpui::test]
3072async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let progress_token = "the-progress-token";
3076
3077 let fs = FakeFs::new(cx.executor());
3078 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3079
3080 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3081
3082 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3083 language_registry.add(rust_lang());
3084 let mut fake_servers = language_registry.register_fake_lsp(
3085 "Rust",
3086 FakeLspAdapter {
3087 name: "the-language-server",
3088 disk_based_diagnostics_sources: vec!["disk".into()],
3089 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3090 ..Default::default()
3091 },
3092 );
3093
3094 let (buffer, _handle) = project
3095 .update(cx, |project, cx| {
3096 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3097 })
3098 .await
3099 .unwrap();
3100
3101 // Simulate diagnostics starting to update.
3102 let mut fake_server = fake_servers.next().await.unwrap();
3103 fake_server
3104 .start_progress_with(
3105 "another-token",
3106 lsp::WorkDoneProgressBegin {
3107 cancellable: Some(false),
3108 ..Default::default()
3109 },
3110 DEFAULT_LSP_REQUEST_TIMEOUT,
3111 )
3112 .await;
3113 // Ensure progress notification is fully processed before starting the next one
3114 cx.executor().run_until_parked();
3115
3116 fake_server
3117 .start_progress_with(
3118 progress_token,
3119 lsp::WorkDoneProgressBegin {
3120 cancellable: Some(true),
3121 ..Default::default()
3122 },
3123 DEFAULT_LSP_REQUEST_TIMEOUT,
3124 )
3125 .await;
3126 // Ensure progress notification is fully processed before cancelling
3127 cx.executor().run_until_parked();
3128
3129 project.update(cx, |project, cx| {
3130 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3131 });
3132 cx.executor().run_until_parked();
3133
3134 let cancel_notification = fake_server
3135 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3136 .await;
3137 assert_eq!(
3138 cancel_notification.token,
3139 NumberOrString::String(progress_token.into())
3140 );
3141}
3142
3143#[gpui::test]
3144async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3149 .await;
3150
3151 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3153
3154 let mut fake_rust_servers = language_registry.register_fake_lsp(
3155 "Rust",
3156 FakeLspAdapter {
3157 name: "rust-lsp",
3158 ..Default::default()
3159 },
3160 );
3161 let mut fake_js_servers = language_registry.register_fake_lsp(
3162 "JavaScript",
3163 FakeLspAdapter {
3164 name: "js-lsp",
3165 ..Default::default()
3166 },
3167 );
3168 language_registry.add(rust_lang());
3169 language_registry.add(js_lang());
3170
3171 let _rs_buffer = project
3172 .update(cx, |project, cx| {
3173 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3174 })
3175 .await
3176 .unwrap();
3177 let _js_buffer = project
3178 .update(cx, |project, cx| {
3179 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3180 })
3181 .await
3182 .unwrap();
3183
3184 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3185 assert_eq!(
3186 fake_rust_server_1
3187 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3188 .await
3189 .text_document
3190 .uri
3191 .as_str(),
3192 uri!("file:///dir/a.rs")
3193 );
3194
3195 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3196 assert_eq!(
3197 fake_js_server
3198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3199 .await
3200 .text_document
3201 .uri
3202 .as_str(),
3203 uri!("file:///dir/b.js")
3204 );
3205
3206 // Disable Rust language server, ensuring only that server gets stopped.
3207 cx.update(|cx| {
3208 SettingsStore::update_global(cx, |settings, cx| {
3209 settings.update_user_settings(cx, |settings| {
3210 settings.languages_mut().insert(
3211 "Rust".into(),
3212 LanguageSettingsContent {
3213 enable_language_server: Some(false),
3214 ..Default::default()
3215 },
3216 );
3217 });
3218 })
3219 });
3220 fake_rust_server_1
3221 .receive_notification::<lsp::notification::Exit>()
3222 .await;
3223
3224 // Enable Rust and disable JavaScript language servers, ensuring that the
3225 // former gets started again and that the latter stops.
3226 cx.update(|cx| {
3227 SettingsStore::update_global(cx, |settings, cx| {
3228 settings.update_user_settings(cx, |settings| {
3229 settings.languages_mut().insert(
3230 "Rust".into(),
3231 LanguageSettingsContent {
3232 enable_language_server: Some(true),
3233 ..Default::default()
3234 },
3235 );
3236 settings.languages_mut().insert(
3237 "JavaScript".into(),
3238 LanguageSettingsContent {
3239 enable_language_server: Some(false),
3240 ..Default::default()
3241 },
3242 );
3243 });
3244 })
3245 });
3246 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3247 assert_eq!(
3248 fake_rust_server_2
3249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3250 .await
3251 .text_document
3252 .uri
3253 .as_str(),
3254 uri!("file:///dir/a.rs")
3255 );
3256 fake_js_server
3257 .receive_notification::<lsp::notification::Exit>()
3258 .await;
3259}
3260
3261#[gpui::test(iterations = 3)]
3262async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3263 init_test(cx);
3264
3265 let text = "
3266 fn a() { A }
3267 fn b() { BB }
3268 fn c() { CCC }
3269 "
3270 .unindent();
3271
3272 let fs = FakeFs::new(cx.executor());
3273 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3274
3275 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3276 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3277
3278 language_registry.add(rust_lang());
3279 let mut fake_servers = language_registry.register_fake_lsp(
3280 "Rust",
3281 FakeLspAdapter {
3282 disk_based_diagnostics_sources: vec!["disk".into()],
3283 ..Default::default()
3284 },
3285 );
3286
3287 let buffer = project
3288 .update(cx, |project, cx| {
3289 project.open_local_buffer(path!("/dir/a.rs"), cx)
3290 })
3291 .await
3292 .unwrap();
3293
3294 let _handle = project.update(cx, |project, cx| {
3295 project.register_buffer_with_language_servers(&buffer, cx)
3296 });
3297
3298 let mut fake_server = fake_servers.next().await.unwrap();
3299 let open_notification = fake_server
3300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3301 .await;
3302
3303 // Edit the buffer, moving the content down
3304 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3305 let change_notification_1 = fake_server
3306 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3307 .await;
3308 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3309
3310 // Report some diagnostics for the initial version of the buffer
3311 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3312 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3313 version: Some(open_notification.text_document.version),
3314 diagnostics: vec![
3315 lsp::Diagnostic {
3316 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3317 severity: Some(DiagnosticSeverity::ERROR),
3318 message: "undefined variable 'A'".to_string(),
3319 source: Some("disk".to_string()),
3320 ..Default::default()
3321 },
3322 lsp::Diagnostic {
3323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3324 severity: Some(DiagnosticSeverity::ERROR),
3325 message: "undefined variable 'BB'".to_string(),
3326 source: Some("disk".to_string()),
3327 ..Default::default()
3328 },
3329 lsp::Diagnostic {
3330 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3331 severity: Some(DiagnosticSeverity::ERROR),
3332 source: Some("disk".to_string()),
3333 message: "undefined variable 'CCC'".to_string(),
3334 ..Default::default()
3335 },
3336 ],
3337 });
3338
3339 // The diagnostics have moved down since they were created.
3340 cx.executor().run_until_parked();
3341 buffer.update(cx, |buffer, _| {
3342 assert_eq!(
3343 buffer
3344 .snapshot()
3345 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3346 .collect::<Vec<_>>(),
3347 &[
3348 DiagnosticEntry {
3349 range: Point::new(3, 9)..Point::new(3, 11),
3350 diagnostic: Diagnostic {
3351 source: Some("disk".into()),
3352 severity: DiagnosticSeverity::ERROR,
3353 message: "undefined variable 'BB'".to_string(),
3354 is_disk_based: true,
3355 group_id: 1,
3356 is_primary: true,
3357 source_kind: DiagnosticSourceKind::Pushed,
3358 ..Diagnostic::default()
3359 },
3360 },
3361 DiagnosticEntry {
3362 range: Point::new(4, 9)..Point::new(4, 12),
3363 diagnostic: Diagnostic {
3364 source: Some("disk".into()),
3365 severity: DiagnosticSeverity::ERROR,
3366 message: "undefined variable 'CCC'".to_string(),
3367 is_disk_based: true,
3368 group_id: 2,
3369 is_primary: true,
3370 source_kind: DiagnosticSourceKind::Pushed,
3371 ..Diagnostic::default()
3372 }
3373 }
3374 ]
3375 );
3376 assert_eq!(
3377 chunks_with_diagnostics(buffer, 0..buffer.len()),
3378 [
3379 ("\n\nfn a() { ".to_string(), None),
3380 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3381 (" }\nfn b() { ".to_string(), None),
3382 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3383 (" }\nfn c() { ".to_string(), None),
3384 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3385 (" }\n".to_string(), None),
3386 ]
3387 );
3388 assert_eq!(
3389 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3390 [
3391 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3392 (" }\nfn c() { ".to_string(), None),
3393 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3394 ]
3395 );
3396 });
3397
3398 // Ensure overlapping diagnostics are highlighted correctly.
3399 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3400 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3401 version: Some(open_notification.text_document.version),
3402 diagnostics: vec![
3403 lsp::Diagnostic {
3404 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3405 severity: Some(DiagnosticSeverity::ERROR),
3406 message: "undefined variable 'A'".to_string(),
3407 source: Some("disk".to_string()),
3408 ..Default::default()
3409 },
3410 lsp::Diagnostic {
3411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3412 severity: Some(DiagnosticSeverity::WARNING),
3413 message: "unreachable statement".to_string(),
3414 source: Some("disk".to_string()),
3415 ..Default::default()
3416 },
3417 ],
3418 });
3419
3420 cx.executor().run_until_parked();
3421 buffer.update(cx, |buffer, _| {
3422 assert_eq!(
3423 buffer
3424 .snapshot()
3425 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3426 .collect::<Vec<_>>(),
3427 &[
3428 DiagnosticEntry {
3429 range: Point::new(2, 9)..Point::new(2, 12),
3430 diagnostic: Diagnostic {
3431 source: Some("disk".into()),
3432 severity: DiagnosticSeverity::WARNING,
3433 message: "unreachable statement".to_string(),
3434 is_disk_based: true,
3435 group_id: 4,
3436 is_primary: true,
3437 source_kind: DiagnosticSourceKind::Pushed,
3438 ..Diagnostic::default()
3439 }
3440 },
3441 DiagnosticEntry {
3442 range: Point::new(2, 9)..Point::new(2, 10),
3443 diagnostic: Diagnostic {
3444 source: Some("disk".into()),
3445 severity: DiagnosticSeverity::ERROR,
3446 message: "undefined variable 'A'".to_string(),
3447 is_disk_based: true,
3448 group_id: 3,
3449 is_primary: true,
3450 source_kind: DiagnosticSourceKind::Pushed,
3451 ..Diagnostic::default()
3452 },
3453 }
3454 ]
3455 );
3456 assert_eq!(
3457 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3458 [
3459 ("fn a() { ".to_string(), None),
3460 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3461 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3462 ("\n".to_string(), None),
3463 ]
3464 );
3465 assert_eq!(
3466 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3467 [
3468 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3469 ("\n".to_string(), None),
3470 ]
3471 );
3472 });
3473
3474 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3475 // changes since the last save.
3476 buffer.update(cx, |buffer, cx| {
3477 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3478 buffer.edit(
3479 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3480 None,
3481 cx,
3482 );
3483 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3484 });
3485 let change_notification_2 = fake_server
3486 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3487 .await;
3488 assert!(
3489 change_notification_2.text_document.version > change_notification_1.text_document.version
3490 );
3491
3492 // Handle out-of-order diagnostics
3493 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3494 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3495 version: Some(change_notification_2.text_document.version),
3496 diagnostics: vec![
3497 lsp::Diagnostic {
3498 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3499 severity: Some(DiagnosticSeverity::ERROR),
3500 message: "undefined variable 'BB'".to_string(),
3501 source: Some("disk".to_string()),
3502 ..Default::default()
3503 },
3504 lsp::Diagnostic {
3505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3506 severity: Some(DiagnosticSeverity::WARNING),
3507 message: "undefined variable 'A'".to_string(),
3508 source: Some("disk".to_string()),
3509 ..Default::default()
3510 },
3511 ],
3512 });
3513
3514 cx.executor().run_until_parked();
3515 buffer.update(cx, |buffer, _| {
3516 assert_eq!(
3517 buffer
3518 .snapshot()
3519 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3520 .collect::<Vec<_>>(),
3521 &[
3522 DiagnosticEntry {
3523 range: Point::new(2, 21)..Point::new(2, 22),
3524 diagnostic: Diagnostic {
3525 source: Some("disk".into()),
3526 severity: DiagnosticSeverity::WARNING,
3527 message: "undefined variable 'A'".to_string(),
3528 is_disk_based: true,
3529 group_id: 6,
3530 is_primary: true,
3531 source_kind: DiagnosticSourceKind::Pushed,
3532 ..Diagnostic::default()
3533 }
3534 },
3535 DiagnosticEntry {
3536 range: Point::new(3, 9)..Point::new(3, 14),
3537 diagnostic: Diagnostic {
3538 source: Some("disk".into()),
3539 severity: DiagnosticSeverity::ERROR,
3540 message: "undefined variable 'BB'".to_string(),
3541 is_disk_based: true,
3542 group_id: 5,
3543 is_primary: true,
3544 source_kind: DiagnosticSourceKind::Pushed,
3545 ..Diagnostic::default()
3546 },
3547 }
3548 ]
3549 );
3550 });
3551}
3552
3553#[gpui::test]
3554async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3555 init_test(cx);
3556
3557 let text = concat!(
3558 "let one = ;\n", //
3559 "let two = \n",
3560 "let three = 3;\n",
3561 );
3562
3563 let fs = FakeFs::new(cx.executor());
3564 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3565
3566 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3567 let buffer = project
3568 .update(cx, |project, cx| {
3569 project.open_local_buffer(path!("/dir/a.rs"), cx)
3570 })
3571 .await
3572 .unwrap();
3573
3574 project.update(cx, |project, cx| {
3575 project.lsp_store().update(cx, |lsp_store, cx| {
3576 lsp_store
3577 .update_diagnostic_entries(
3578 LanguageServerId(0),
3579 PathBuf::from(path!("/dir/a.rs")),
3580 None,
3581 None,
3582 vec![
3583 DiagnosticEntry {
3584 range: Unclipped(PointUtf16::new(0, 10))
3585 ..Unclipped(PointUtf16::new(0, 10)),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::ERROR,
3588 message: "syntax error 1".to_string(),
3589 source_kind: DiagnosticSourceKind::Pushed,
3590 ..Diagnostic::default()
3591 },
3592 },
3593 DiagnosticEntry {
3594 range: Unclipped(PointUtf16::new(1, 10))
3595 ..Unclipped(PointUtf16::new(1, 10)),
3596 diagnostic: Diagnostic {
3597 severity: DiagnosticSeverity::ERROR,
3598 message: "syntax error 2".to_string(),
3599 source_kind: DiagnosticSourceKind::Pushed,
3600 ..Diagnostic::default()
3601 },
3602 },
3603 ],
3604 cx,
3605 )
3606 .unwrap();
3607 })
3608 });
3609
3610 // An empty range is extended forward to include the following character.
3611 // At the end of a line, an empty range is extended backward to include
3612 // the preceding character.
3613 buffer.update(cx, |buffer, _| {
3614 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3615 assert_eq!(
3616 chunks
3617 .iter()
3618 .map(|(s, d)| (s.as_str(), *d))
3619 .collect::<Vec<_>>(),
3620 &[
3621 ("let one = ", None),
3622 (";", Some(DiagnosticSeverity::ERROR)),
3623 ("\nlet two =", None),
3624 (" ", Some(DiagnosticSeverity::ERROR)),
3625 ("\nlet three = 3;\n", None)
3626 ]
3627 );
3628 });
3629}
3630
3631#[gpui::test]
3632async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3633 init_test(cx);
3634
3635 let fs = FakeFs::new(cx.executor());
3636 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3637 .await;
3638
3639 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 lsp_store.update(cx, |lsp_store, cx| {
3643 lsp_store
3644 .update_diagnostic_entries(
3645 LanguageServerId(0),
3646 Path::new(path!("/dir/a.rs")).to_owned(),
3647 None,
3648 None,
3649 vec![DiagnosticEntry {
3650 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3651 diagnostic: Diagnostic {
3652 severity: DiagnosticSeverity::ERROR,
3653 is_primary: true,
3654 message: "syntax error a1".to_string(),
3655 source_kind: DiagnosticSourceKind::Pushed,
3656 ..Diagnostic::default()
3657 },
3658 }],
3659 cx,
3660 )
3661 .unwrap();
3662 lsp_store
3663 .update_diagnostic_entries(
3664 LanguageServerId(1),
3665 Path::new(path!("/dir/a.rs")).to_owned(),
3666 None,
3667 None,
3668 vec![DiagnosticEntry {
3669 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3670 diagnostic: Diagnostic {
3671 severity: DiagnosticSeverity::ERROR,
3672 is_primary: true,
3673 message: "syntax error b1".to_string(),
3674 source_kind: DiagnosticSourceKind::Pushed,
3675 ..Diagnostic::default()
3676 },
3677 }],
3678 cx,
3679 )
3680 .unwrap();
3681
3682 assert_eq!(
3683 lsp_store.diagnostic_summary(false, cx),
3684 DiagnosticSummary {
3685 error_count: 2,
3686 warning_count: 0,
3687 }
3688 );
3689 });
3690}
3691
3692#[gpui::test]
3693async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3694 cx: &mut gpui::TestAppContext,
3695) {
3696 init_test(cx);
3697
3698 let fs = FakeFs::new(cx.executor());
3699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3700 .await;
3701
3702 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3703 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3704
3705 lsp_store.update(cx, |lsp_store, cx| {
3706 lsp_store
3707 .update_diagnostic_entries(
3708 LanguageServerId(0),
3709 Path::new(path!("/dir/a.rs")).to_owned(),
3710 None,
3711 None,
3712 vec![DiagnosticEntry {
3713 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3714 diagnostic: Diagnostic {
3715 severity: DiagnosticSeverity::ERROR,
3716 is_primary: true,
3717 message: "error in a".to_string(),
3718 source_kind: DiagnosticSourceKind::Pushed,
3719 ..Diagnostic::default()
3720 },
3721 }],
3722 cx,
3723 )
3724 .unwrap();
3725 lsp_store
3726 .update_diagnostic_entries(
3727 LanguageServerId(0),
3728 Path::new(path!("/dir/b.rs")).to_owned(),
3729 None,
3730 None,
3731 vec![DiagnosticEntry {
3732 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3733 diagnostic: Diagnostic {
3734 severity: DiagnosticSeverity::WARNING,
3735 is_primary: true,
3736 message: "warning in b".to_string(),
3737 source_kind: DiagnosticSourceKind::Pushed,
3738 ..Diagnostic::default()
3739 },
3740 }],
3741 cx,
3742 )
3743 .unwrap();
3744
3745 assert_eq!(
3746 lsp_store.diagnostic_summary(false, cx),
3747 DiagnosticSummary {
3748 error_count: 1,
3749 warning_count: 1,
3750 }
3751 );
3752 });
3753
3754 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3755 .await
3756 .unwrap();
3757 cx.executor().run_until_parked();
3758
3759 lsp_store.update(cx, |lsp_store, cx| {
3760 assert_eq!(
3761 lsp_store.diagnostic_summary(false, cx),
3762 DiagnosticSummary {
3763 error_count: 0,
3764 warning_count: 1,
3765 },
3766 );
3767 });
3768}
3769
3770#[gpui::test]
3771async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3772 init_test(cx);
3773
3774 let fs = FakeFs::new(cx.executor());
3775 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3776
3777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3778
3779 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3780 language_registry.add(rust_lang());
3781 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3782
3783 let (buffer, _handle) = project
3784 .update(cx, |project, cx| {
3785 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3786 })
3787 .await
3788 .unwrap();
3789
3790 let fake_server = fake_servers.next().await.unwrap();
3791 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3792 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3793 version: None,
3794 diagnostics: vec![lsp::Diagnostic {
3795 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3796 severity: Some(lsp::DiagnosticSeverity::ERROR),
3797 message: "error before restart".to_string(),
3798 ..Default::default()
3799 }],
3800 });
3801 cx.executor().run_until_parked();
3802
3803 project.update(cx, |project, cx| {
3804 assert_eq!(
3805 project.diagnostic_summary(false, cx),
3806 DiagnosticSummary {
3807 error_count: 1,
3808 warning_count: 0,
3809 }
3810 );
3811 });
3812
3813 let mut events = cx.events(&project);
3814
3815 project.update(cx, |project, cx| {
3816 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3817 });
3818 cx.executor().run_until_parked();
3819
3820 let mut received_diagnostics_updated = false;
3821 while let Some(Some(event)) =
3822 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3823 {
3824 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3825 received_diagnostics_updated = true;
3826 }
3827 }
3828 assert!(
3829 received_diagnostics_updated,
3830 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3831 );
3832
3833 project.update(cx, |project, cx| {
3834 assert_eq!(
3835 project.diagnostic_summary(false, cx),
3836 DiagnosticSummary {
3837 error_count: 0,
3838 warning_count: 0,
3839 }
3840 );
3841 });
3842}
3843
3844#[gpui::test]
3845async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3846 init_test(cx);
3847
3848 let fs = FakeFs::new(cx.executor());
3849 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3850 .await;
3851
3852 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3853
3854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3855 language_registry.add(rust_lang());
3856 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3857 let closure_pull_count = pull_count.clone();
3858 let mut fake_servers = language_registry.register_fake_lsp(
3859 "Rust",
3860 FakeLspAdapter {
3861 capabilities: lsp::ServerCapabilities {
3862 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3863 lsp::DiagnosticOptions {
3864 identifier: Some("test-reload".to_string()),
3865 inter_file_dependencies: true,
3866 workspace_diagnostics: false,
3867 work_done_progress_options: Default::default(),
3868 },
3869 )),
3870 ..lsp::ServerCapabilities::default()
3871 },
3872 initializer: Some(Box::new(move |fake_server| {
3873 let pull_count = closure_pull_count.clone();
3874 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3875 move |_, _| {
3876 let pull_count = pull_count.clone();
3877 async move {
3878 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3879 Ok(lsp::DocumentDiagnosticReportResult::Report(
3880 lsp::DocumentDiagnosticReport::Full(
3881 lsp::RelatedFullDocumentDiagnosticReport {
3882 related_documents: None,
3883 full_document_diagnostic_report:
3884 lsp::FullDocumentDiagnosticReport {
3885 result_id: None,
3886 items: Vec::new(),
3887 },
3888 },
3889 ),
3890 ))
3891 }
3892 },
3893 );
3894 })),
3895 ..FakeLspAdapter::default()
3896 },
3897 );
3898
3899 let (_buffer, _handle) = project
3900 .update(cx, |project, cx| {
3901 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3902 })
3903 .await
3904 .unwrap();
3905
3906 let fake_server = fake_servers.next().await.unwrap();
3907 cx.executor().run_until_parked();
3908
3909 // Publish initial diagnostics via the fake server.
3910 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3911 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3912 version: None,
3913 diagnostics: vec![lsp::Diagnostic {
3914 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3915 severity: Some(lsp::DiagnosticSeverity::ERROR),
3916 message: "error in a".to_string(),
3917 ..Default::default()
3918 }],
3919 });
3920 cx.executor().run_until_parked();
3921
3922 project.update(cx, |project, cx| {
3923 assert_eq!(
3924 project.diagnostic_summary(false, cx),
3925 DiagnosticSummary {
3926 error_count: 1,
3927 warning_count: 0,
3928 }
3929 );
3930 });
3931
3932 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3933
3934 // Change the file on disk. The FS event triggers buffer reload,
3935 // which in turn triggers pull_diagnostics_for_buffer.
3936 fs.save(
3937 path!("/dir/a.rs").as_ref(),
3938 &"fixed content".into(),
3939 LineEnding::Unix,
3940 )
3941 .await
3942 .unwrap();
3943 cx.executor().run_until_parked();
3944
3945 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3946 assert!(
3947 pulls_after > pulls_before,
3948 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3949 );
3950}
3951
3952#[gpui::test]
3953async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3954 init_test(cx);
3955
3956 let text = "
3957 fn a() {
3958 f1();
3959 }
3960 fn b() {
3961 f2();
3962 }
3963 fn c() {
3964 f3();
3965 }
3966 "
3967 .unindent();
3968
3969 let fs = FakeFs::new(cx.executor());
3970 fs.insert_tree(
3971 path!("/dir"),
3972 json!({
3973 "a.rs": text.clone(),
3974 }),
3975 )
3976 .await;
3977
3978 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3979 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3980
3981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3982 language_registry.add(rust_lang());
3983 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3984
3985 let (buffer, _handle) = project
3986 .update(cx, |project, cx| {
3987 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3988 })
3989 .await
3990 .unwrap();
3991
3992 let mut fake_server = fake_servers.next().await.unwrap();
3993 let lsp_document_version = fake_server
3994 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3995 .await
3996 .text_document
3997 .version;
3998
3999 // Simulate editing the buffer after the language server computes some edits.
4000 buffer.update(cx, |buffer, cx| {
4001 buffer.edit(
4002 [(
4003 Point::new(0, 0)..Point::new(0, 0),
4004 "// above first function\n",
4005 )],
4006 None,
4007 cx,
4008 );
4009 buffer.edit(
4010 [(
4011 Point::new(2, 0)..Point::new(2, 0),
4012 " // inside first function\n",
4013 )],
4014 None,
4015 cx,
4016 );
4017 buffer.edit(
4018 [(
4019 Point::new(6, 4)..Point::new(6, 4),
4020 "// inside second function ",
4021 )],
4022 None,
4023 cx,
4024 );
4025
4026 assert_eq!(
4027 buffer.text(),
4028 "
4029 // above first function
4030 fn a() {
4031 // inside first function
4032 f1();
4033 }
4034 fn b() {
4035 // inside second function f2();
4036 }
4037 fn c() {
4038 f3();
4039 }
4040 "
4041 .unindent()
4042 );
4043 });
4044
4045 let edits = lsp_store
4046 .update(cx, |lsp_store, cx| {
4047 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4048 &buffer,
4049 vec![
4050 // replace body of first function
4051 lsp::TextEdit {
4052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4053 new_text: "
4054 fn a() {
4055 f10();
4056 }
4057 "
4058 .unindent(),
4059 },
4060 // edit inside second function
4061 lsp::TextEdit {
4062 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4063 new_text: "00".into(),
4064 },
4065 // edit inside third function via two distinct edits
4066 lsp::TextEdit {
4067 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4068 new_text: "4000".into(),
4069 },
4070 lsp::TextEdit {
4071 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4072 new_text: "".into(),
4073 },
4074 ],
4075 LanguageServerId(0),
4076 Some(lsp_document_version),
4077 cx,
4078 )
4079 })
4080 .await
4081 .unwrap();
4082
4083 buffer.update(cx, |buffer, cx| {
4084 for (range, new_text) in edits {
4085 buffer.edit([(range, new_text)], None, cx);
4086 }
4087 assert_eq!(
4088 buffer.text(),
4089 "
4090 // above first function
4091 fn a() {
4092 // inside first function
4093 f10();
4094 }
4095 fn b() {
4096 // inside second function f200();
4097 }
4098 fn c() {
4099 f4000();
4100 }
4101 "
4102 .unindent()
4103 );
4104 });
4105}
4106
4107#[gpui::test]
4108async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4109 init_test(cx);
4110
4111 let text = "
4112 use a::b;
4113 use a::c;
4114
4115 fn f() {
4116 b();
4117 c();
4118 }
4119 "
4120 .unindent();
4121
4122 let fs = FakeFs::new(cx.executor());
4123 fs.insert_tree(
4124 path!("/dir"),
4125 json!({
4126 "a.rs": text.clone(),
4127 }),
4128 )
4129 .await;
4130
4131 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4132 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4133 let buffer = project
4134 .update(cx, |project, cx| {
4135 project.open_local_buffer(path!("/dir/a.rs"), cx)
4136 })
4137 .await
4138 .unwrap();
4139
4140 // Simulate the language server sending us a small edit in the form of a very large diff.
4141 // Rust-analyzer does this when performing a merge-imports code action.
4142 let edits = lsp_store
4143 .update(cx, |lsp_store, cx| {
4144 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4145 &buffer,
4146 [
4147 // Replace the first use statement without editing the semicolon.
4148 lsp::TextEdit {
4149 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4150 new_text: "a::{b, c}".into(),
4151 },
4152 // Reinsert the remainder of the file between the semicolon and the final
4153 // newline of the file.
4154 lsp::TextEdit {
4155 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4156 new_text: "\n\n".into(),
4157 },
4158 lsp::TextEdit {
4159 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4160 new_text: "
4161 fn f() {
4162 b();
4163 c();
4164 }"
4165 .unindent(),
4166 },
4167 // Delete everything after the first newline of the file.
4168 lsp::TextEdit {
4169 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4170 new_text: "".into(),
4171 },
4172 ],
4173 LanguageServerId(0),
4174 None,
4175 cx,
4176 )
4177 })
4178 .await
4179 .unwrap();
4180
4181 buffer.update(cx, |buffer, cx| {
4182 let edits = edits
4183 .into_iter()
4184 .map(|(range, text)| {
4185 (
4186 range.start.to_point(buffer)..range.end.to_point(buffer),
4187 text,
4188 )
4189 })
4190 .collect::<Vec<_>>();
4191
4192 assert_eq!(
4193 edits,
4194 [
4195 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4196 (Point::new(1, 0)..Point::new(2, 0), "".into())
4197 ]
4198 );
4199
4200 for (range, new_text) in edits {
4201 buffer.edit([(range, new_text)], None, cx);
4202 }
4203 assert_eq!(
4204 buffer.text(),
4205 "
4206 use a::{b, c};
4207
4208 fn f() {
4209 b();
4210 c();
4211 }
4212 "
4213 .unindent()
4214 );
4215 });
4216}
4217
4218#[gpui::test]
4219async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4220 cx: &mut gpui::TestAppContext,
4221) {
4222 init_test(cx);
4223
4224 let text = "Path()";
4225
4226 let fs = FakeFs::new(cx.executor());
4227 fs.insert_tree(
4228 path!("/dir"),
4229 json!({
4230 "a.rs": text
4231 }),
4232 )
4233 .await;
4234
4235 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4236 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4237 let buffer = project
4238 .update(cx, |project, cx| {
4239 project.open_local_buffer(path!("/dir/a.rs"), cx)
4240 })
4241 .await
4242 .unwrap();
4243
4244 // Simulate the language server sending us a pair of edits at the same location,
4245 // with an insertion following a replacement (which violates the LSP spec).
4246 let edits = lsp_store
4247 .update(cx, |lsp_store, cx| {
4248 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4249 &buffer,
4250 [
4251 lsp::TextEdit {
4252 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4253 new_text: "Path".into(),
4254 },
4255 lsp::TextEdit {
4256 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4257 new_text: "from path import Path\n\n\n".into(),
4258 },
4259 ],
4260 LanguageServerId(0),
4261 None,
4262 cx,
4263 )
4264 })
4265 .await
4266 .unwrap();
4267
4268 buffer.update(cx, |buffer, cx| {
4269 buffer.edit(edits, None, cx);
4270 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4271 });
4272}
4273
4274#[gpui::test]
4275async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4276 init_test(cx);
4277
4278 let text = "
4279 use a::b;
4280 use a::c;
4281
4282 fn f() {
4283 b();
4284 c();
4285 }
4286 "
4287 .unindent();
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 path!("/dir"),
4292 json!({
4293 "a.rs": text.clone(),
4294 }),
4295 )
4296 .await;
4297
4298 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4299 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4300 let buffer = project
4301 .update(cx, |project, cx| {
4302 project.open_local_buffer(path!("/dir/a.rs"), cx)
4303 })
4304 .await
4305 .unwrap();
4306
4307 // Simulate the language server sending us edits in a non-ordered fashion,
4308 // with ranges sometimes being inverted or pointing to invalid locations.
4309 let edits = lsp_store
4310 .update(cx, |lsp_store, cx| {
4311 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4312 &buffer,
4313 [
4314 lsp::TextEdit {
4315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4316 new_text: "\n\n".into(),
4317 },
4318 lsp::TextEdit {
4319 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4320 new_text: "a::{b, c}".into(),
4321 },
4322 lsp::TextEdit {
4323 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4324 new_text: "".into(),
4325 },
4326 lsp::TextEdit {
4327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4328 new_text: "
4329 fn f() {
4330 b();
4331 c();
4332 }"
4333 .unindent(),
4334 },
4335 ],
4336 LanguageServerId(0),
4337 None,
4338 cx,
4339 )
4340 })
4341 .await
4342 .unwrap();
4343
4344 buffer.update(cx, |buffer, cx| {
4345 let edits = edits
4346 .into_iter()
4347 .map(|(range, text)| {
4348 (
4349 range.start.to_point(buffer)..range.end.to_point(buffer),
4350 text,
4351 )
4352 })
4353 .collect::<Vec<_>>();
4354
4355 assert_eq!(
4356 edits,
4357 [
4358 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4359 (Point::new(1, 0)..Point::new(2, 0), "".into())
4360 ]
4361 );
4362
4363 for (range, new_text) in edits {
4364 buffer.edit([(range, new_text)], None, cx);
4365 }
4366 assert_eq!(
4367 buffer.text(),
4368 "
4369 use a::{b, c};
4370
4371 fn f() {
4372 b();
4373 c();
4374 }
4375 "
4376 .unindent()
4377 );
4378 });
4379}
4380
4381fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4382 buffer: &Buffer,
4383 range: Range<T>,
4384) -> Vec<(String, Option<DiagnosticSeverity>)> {
4385 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4386 for chunk in buffer.snapshot().chunks(
4387 range,
4388 LanguageAwareStyling {
4389 tree_sitter: true,
4390 diagnostics: true,
4391 },
4392 ) {
4393 if chunks
4394 .last()
4395 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4396 {
4397 chunks.last_mut().unwrap().0.push_str(chunk.text);
4398 } else {
4399 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4400 }
4401 }
4402 chunks
4403}
4404
4405#[gpui::test(iterations = 10)]
4406async fn test_definition(cx: &mut gpui::TestAppContext) {
4407 init_test(cx);
4408
4409 let fs = FakeFs::new(cx.executor());
4410 fs.insert_tree(
4411 path!("/dir"),
4412 json!({
4413 "a.rs": "const fn a() { A }",
4414 "b.rs": "const y: i32 = crate::a()",
4415 }),
4416 )
4417 .await;
4418
4419 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4420
4421 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4422 language_registry.add(rust_lang());
4423 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4424
4425 let (buffer, _handle) = project
4426 .update(cx, |project, cx| {
4427 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4428 })
4429 .await
4430 .unwrap();
4431
4432 let fake_server = fake_servers.next().await.unwrap();
4433 cx.executor().run_until_parked();
4434
4435 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4436 let params = params.text_document_position_params;
4437 assert_eq!(
4438 params.text_document.uri.to_file_path().unwrap(),
4439 Path::new(path!("/dir/b.rs")),
4440 );
4441 assert_eq!(params.position, lsp::Position::new(0, 22));
4442
4443 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4444 lsp::Location::new(
4445 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4446 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4447 ),
4448 )))
4449 });
4450 let mut definitions = project
4451 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4452 .await
4453 .unwrap()
4454 .unwrap();
4455
4456 // Assert no new language server started
4457 cx.executor().run_until_parked();
4458 assert!(fake_servers.try_recv().is_err());
4459
4460 assert_eq!(definitions.len(), 1);
4461 let definition = definitions.pop().unwrap();
4462 cx.update(|cx| {
4463 let target_buffer = definition.target.buffer.read(cx);
4464 assert_eq!(
4465 target_buffer
4466 .file()
4467 .unwrap()
4468 .as_local()
4469 .unwrap()
4470 .abs_path(cx),
4471 Path::new(path!("/dir/a.rs")),
4472 );
4473 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4474 assert_eq!(
4475 list_worktrees(&project, cx),
4476 [
4477 (path!("/dir/a.rs").as_ref(), false),
4478 (path!("/dir/b.rs").as_ref(), true)
4479 ],
4480 );
4481
4482 drop(definition);
4483 });
4484 cx.update(|cx| {
4485 assert_eq!(
4486 list_worktrees(&project, cx),
4487 [(path!("/dir/b.rs").as_ref(), true)]
4488 );
4489 });
4490
4491 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4492 project
4493 .read(cx)
4494 .worktrees(cx)
4495 .map(|worktree| {
4496 let worktree = worktree.read(cx);
4497 (
4498 worktree.as_local().unwrap().abs_path().as_ref(),
4499 worktree.is_visible(),
4500 )
4501 })
4502 .collect::<Vec<_>>()
4503 }
4504}
4505
4506#[gpui::test]
4507async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4508 init_test(cx);
4509
4510 let fs = FakeFs::new(cx.executor());
4511 fs.insert_tree(
4512 path!("/dir"),
4513 json!({
4514 "a.ts": "",
4515 }),
4516 )
4517 .await;
4518
4519 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4520
4521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4522 language_registry.add(typescript_lang());
4523 let mut fake_language_servers = language_registry.register_fake_lsp(
4524 "TypeScript",
4525 FakeLspAdapter {
4526 capabilities: lsp::ServerCapabilities {
4527 completion_provider: Some(lsp::CompletionOptions {
4528 trigger_characters: Some(vec![".".to_string()]),
4529 ..Default::default()
4530 }),
4531 ..Default::default()
4532 },
4533 ..Default::default()
4534 },
4535 );
4536
4537 let (buffer, _handle) = project
4538 .update(cx, |p, cx| {
4539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4540 })
4541 .await
4542 .unwrap();
4543
4544 let fake_server = fake_language_servers.next().await.unwrap();
4545 cx.executor().run_until_parked();
4546
4547 // When text_edit exists, it takes precedence over insert_text and label
4548 let text = "let a = obj.fqn";
4549 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4550 let completions = project.update(cx, |project, cx| {
4551 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4552 });
4553
4554 fake_server
4555 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4556 Ok(Some(lsp::CompletionResponse::Array(vec![
4557 lsp::CompletionItem {
4558 label: "labelText".into(),
4559 insert_text: Some("insertText".into()),
4560 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4561 range: lsp::Range::new(
4562 lsp::Position::new(0, text.len() as u32 - 3),
4563 lsp::Position::new(0, text.len() as u32),
4564 ),
4565 new_text: "textEditText".into(),
4566 })),
4567 ..Default::default()
4568 },
4569 ])))
4570 })
4571 .next()
4572 .await;
4573
4574 let completions = completions
4575 .await
4576 .unwrap()
4577 .into_iter()
4578 .flat_map(|response| response.completions)
4579 .collect::<Vec<_>>();
4580 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4581
4582 assert_eq!(completions.len(), 1);
4583 assert_eq!(completions[0].new_text, "textEditText");
4584 assert_eq!(
4585 completions[0].replace_range.to_offset(&snapshot),
4586 text.len() - 3..text.len()
4587 );
4588}
4589
4590#[gpui::test]
4591async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4592 init_test(cx);
4593
4594 let fs = FakeFs::new(cx.executor());
4595 fs.insert_tree(
4596 path!("/dir"),
4597 json!({
4598 "a.ts": "",
4599 }),
4600 )
4601 .await;
4602
4603 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4604
4605 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4606 language_registry.add(typescript_lang());
4607 let mut fake_language_servers = language_registry.register_fake_lsp(
4608 "TypeScript",
4609 FakeLspAdapter {
4610 capabilities: lsp::ServerCapabilities {
4611 completion_provider: Some(lsp::CompletionOptions {
4612 trigger_characters: Some(vec![".".to_string()]),
4613 ..Default::default()
4614 }),
4615 ..Default::default()
4616 },
4617 ..Default::default()
4618 },
4619 );
4620
4621 let (buffer, _handle) = project
4622 .update(cx, |p, cx| {
4623 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4624 })
4625 .await
4626 .unwrap();
4627
4628 let fake_server = fake_language_servers.next().await.unwrap();
4629 cx.executor().run_until_parked();
4630 let text = "let a = obj.fqn";
4631
4632 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4633 {
4634 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4635 let completions = project.update(cx, |project, cx| {
4636 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4637 });
4638
4639 fake_server
4640 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4641 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4642 is_incomplete: false,
4643 item_defaults: Some(lsp::CompletionListItemDefaults {
4644 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4645 lsp::Range::new(
4646 lsp::Position::new(0, text.len() as u32 - 3),
4647 lsp::Position::new(0, text.len() as u32),
4648 ),
4649 )),
4650 ..Default::default()
4651 }),
4652 items: vec![lsp::CompletionItem {
4653 label: "labelText".into(),
4654 text_edit_text: Some("textEditText".into()),
4655 text_edit: None,
4656 ..Default::default()
4657 }],
4658 })))
4659 })
4660 .next()
4661 .await;
4662
4663 let completions = completions
4664 .await
4665 .unwrap()
4666 .into_iter()
4667 .flat_map(|response| response.completions)
4668 .collect::<Vec<_>>();
4669 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4670
4671 assert_eq!(completions.len(), 1);
4672 assert_eq!(completions[0].new_text, "textEditText");
4673 assert_eq!(
4674 completions[0].replace_range.to_offset(&snapshot),
4675 text.len() - 3..text.len()
4676 );
4677 }
4678
4679 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4680 {
4681 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4682 let completions = project.update(cx, |project, cx| {
4683 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4684 });
4685
4686 fake_server
4687 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4688 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4689 is_incomplete: false,
4690 item_defaults: Some(lsp::CompletionListItemDefaults {
4691 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4692 lsp::Range::new(
4693 lsp::Position::new(0, text.len() as u32 - 3),
4694 lsp::Position::new(0, text.len() as u32),
4695 ),
4696 )),
4697 ..Default::default()
4698 }),
4699 items: vec![lsp::CompletionItem {
4700 label: "labelText".into(),
4701 text_edit_text: None,
4702 insert_text: Some("irrelevant".into()),
4703 text_edit: None,
4704 ..Default::default()
4705 }],
4706 })))
4707 })
4708 .next()
4709 .await;
4710
4711 let completions = completions
4712 .await
4713 .unwrap()
4714 .into_iter()
4715 .flat_map(|response| response.completions)
4716 .collect::<Vec<_>>();
4717 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4718
4719 assert_eq!(completions.len(), 1);
4720 assert_eq!(completions[0].new_text, "labelText");
4721 assert_eq!(
4722 completions[0].replace_range.to_offset(&snapshot),
4723 text.len() - 3..text.len()
4724 );
4725 }
4726}
4727
4728#[gpui::test]
4729async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4730 init_test(cx);
4731
4732 let fs = FakeFs::new(cx.executor());
4733 fs.insert_tree(
4734 path!("/dir"),
4735 json!({
4736 "a.ts": "",
4737 }),
4738 )
4739 .await;
4740
4741 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4742
4743 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4744 language_registry.add(typescript_lang());
4745 let mut fake_language_servers = language_registry.register_fake_lsp(
4746 "TypeScript",
4747 FakeLspAdapter {
4748 capabilities: lsp::ServerCapabilities {
4749 completion_provider: Some(lsp::CompletionOptions {
4750 trigger_characters: Some(vec![":".to_string()]),
4751 ..Default::default()
4752 }),
4753 ..Default::default()
4754 },
4755 ..Default::default()
4756 },
4757 );
4758
4759 let (buffer, _handle) = project
4760 .update(cx, |p, cx| {
4761 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4762 })
4763 .await
4764 .unwrap();
4765
4766 let fake_server = fake_language_servers.next().await.unwrap();
4767 cx.executor().run_until_parked();
4768
4769 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4770 let text = "let a = b.fqn";
4771 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4772 let completions = project.update(cx, |project, cx| {
4773 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4774 });
4775
4776 fake_server
4777 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4778 Ok(Some(lsp::CompletionResponse::Array(vec![
4779 lsp::CompletionItem {
4780 label: "fullyQualifiedName?".into(),
4781 insert_text: Some("fullyQualifiedName".into()),
4782 ..Default::default()
4783 },
4784 ])))
4785 })
4786 .next()
4787 .await;
4788 let completions = completions
4789 .await
4790 .unwrap()
4791 .into_iter()
4792 .flat_map(|response| response.completions)
4793 .collect::<Vec<_>>();
4794 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4795 assert_eq!(completions.len(), 1);
4796 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4797 assert_eq!(
4798 completions[0].replace_range.to_offset(&snapshot),
4799 text.len() - 3..text.len()
4800 );
4801
4802 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4803 let text = "let a = \"atoms/cmp\"";
4804 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4805 let completions = project.update(cx, |project, cx| {
4806 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4807 });
4808
4809 fake_server
4810 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4811 Ok(Some(lsp::CompletionResponse::Array(vec![
4812 lsp::CompletionItem {
4813 label: "component".into(),
4814 ..Default::default()
4815 },
4816 ])))
4817 })
4818 .next()
4819 .await;
4820 let completions = completions
4821 .await
4822 .unwrap()
4823 .into_iter()
4824 .flat_map(|response| response.completions)
4825 .collect::<Vec<_>>();
4826 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4827 assert_eq!(completions.len(), 1);
4828 assert_eq!(completions[0].new_text, "component");
4829 assert_eq!(
4830 completions[0].replace_range.to_offset(&snapshot),
4831 text.len() - 4..text.len() - 1
4832 );
4833}
4834
4835#[gpui::test]
4836async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4837 init_test(cx);
4838
4839 let fs = FakeFs::new(cx.executor());
4840 fs.insert_tree(
4841 path!("/dir"),
4842 json!({
4843 "a.ts": "",
4844 }),
4845 )
4846 .await;
4847
4848 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4849
4850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4851 language_registry.add(typescript_lang());
4852 let mut fake_language_servers = language_registry.register_fake_lsp(
4853 "TypeScript",
4854 FakeLspAdapter {
4855 capabilities: lsp::ServerCapabilities {
4856 completion_provider: Some(lsp::CompletionOptions {
4857 trigger_characters: Some(vec![":".to_string()]),
4858 ..Default::default()
4859 }),
4860 ..Default::default()
4861 },
4862 ..Default::default()
4863 },
4864 );
4865
4866 let (buffer, _handle) = project
4867 .update(cx, |p, cx| {
4868 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4869 })
4870 .await
4871 .unwrap();
4872
4873 let fake_server = fake_language_servers.next().await.unwrap();
4874 cx.executor().run_until_parked();
4875
4876 let text = "let a = b.fqn";
4877 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4878 let completions = project.update(cx, |project, cx| {
4879 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4880 });
4881
4882 fake_server
4883 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4884 Ok(Some(lsp::CompletionResponse::Array(vec![
4885 lsp::CompletionItem {
4886 label: "fullyQualifiedName?".into(),
4887 insert_text: Some("fully\rQualified\r\nName".into()),
4888 ..Default::default()
4889 },
4890 ])))
4891 })
4892 .next()
4893 .await;
4894 let completions = completions
4895 .await
4896 .unwrap()
4897 .into_iter()
4898 .flat_map(|response| response.completions)
4899 .collect::<Vec<_>>();
4900 assert_eq!(completions.len(), 1);
4901 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4902}
4903
4904#[gpui::test(iterations = 10)]
4905async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4906 init_test(cx);
4907
4908 let fs = FakeFs::new(cx.executor());
4909 fs.insert_tree(
4910 path!("/dir"),
4911 json!({
4912 "a.ts": "a",
4913 }),
4914 )
4915 .await;
4916
4917 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4918
4919 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4920 language_registry.add(typescript_lang());
4921 let mut fake_language_servers = language_registry.register_fake_lsp(
4922 "TypeScript",
4923 FakeLspAdapter {
4924 capabilities: lsp::ServerCapabilities {
4925 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4926 lsp::CodeActionOptions {
4927 resolve_provider: Some(true),
4928 ..lsp::CodeActionOptions::default()
4929 },
4930 )),
4931 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4932 commands: vec!["_the/command".to_string()],
4933 ..lsp::ExecuteCommandOptions::default()
4934 }),
4935 ..lsp::ServerCapabilities::default()
4936 },
4937 ..FakeLspAdapter::default()
4938 },
4939 );
4940
4941 let (buffer, _handle) = project
4942 .update(cx, |p, cx| {
4943 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4944 })
4945 .await
4946 .unwrap();
4947
4948 let fake_server = fake_language_servers.next().await.unwrap();
4949 cx.executor().run_until_parked();
4950
4951 // Language server returns code actions that contain commands, and not edits.
4952 let actions = project.update(cx, |project, cx| {
4953 project.code_actions(&buffer, 0..0, None, cx)
4954 });
4955 fake_server
4956 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4957 Ok(Some(vec![
4958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4959 title: "The code action".into(),
4960 data: Some(serde_json::json!({
4961 "command": "_the/command",
4962 })),
4963 ..lsp::CodeAction::default()
4964 }),
4965 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4966 title: "two".into(),
4967 ..lsp::CodeAction::default()
4968 }),
4969 ]))
4970 })
4971 .next()
4972 .await;
4973
4974 let action = actions.await.unwrap().unwrap()[0].clone();
4975 let apply = project.update(cx, |project, cx| {
4976 project.apply_code_action(buffer.clone(), action, true, cx)
4977 });
4978
4979 // Resolving the code action does not populate its edits. In absence of
4980 // edits, we must execute the given command.
4981 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4982 |mut action, _| async move {
4983 if action.data.is_some() {
4984 action.command = Some(lsp::Command {
4985 title: "The command".into(),
4986 command: "_the/command".into(),
4987 arguments: Some(vec![json!("the-argument")]),
4988 });
4989 }
4990 Ok(action)
4991 },
4992 );
4993
4994 // While executing the command, the language server sends the editor
4995 // a `workspaceEdit` request.
4996 fake_server
4997 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4998 let fake = fake_server.clone();
4999 move |params, _| {
5000 assert_eq!(params.command, "_the/command");
5001 let fake = fake.clone();
5002 async move {
5003 fake.server
5004 .request::<lsp::request::ApplyWorkspaceEdit>(
5005 lsp::ApplyWorkspaceEditParams {
5006 label: None,
5007 edit: lsp::WorkspaceEdit {
5008 changes: Some(
5009 [(
5010 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5011 vec![lsp::TextEdit {
5012 range: lsp::Range::new(
5013 lsp::Position::new(0, 0),
5014 lsp::Position::new(0, 0),
5015 ),
5016 new_text: "X".into(),
5017 }],
5018 )]
5019 .into_iter()
5020 .collect(),
5021 ),
5022 ..Default::default()
5023 },
5024 },
5025 DEFAULT_LSP_REQUEST_TIMEOUT,
5026 )
5027 .await
5028 .into_response()
5029 .unwrap();
5030 Ok(Some(json!(null)))
5031 }
5032 }
5033 })
5034 .next()
5035 .await;
5036
5037 // Applying the code action returns a project transaction containing the edits
5038 // sent by the language server in its `workspaceEdit` request.
5039 let transaction = apply.await.unwrap();
5040 assert!(transaction.0.contains_key(&buffer));
5041 buffer.update(cx, |buffer, cx| {
5042 assert_eq!(buffer.text(), "Xa");
5043 buffer.undo(cx);
5044 assert_eq!(buffer.text(), "a");
5045 });
5046}
5047
5048#[gpui::test]
5049async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5050 init_test(cx);
5051 let fs = FakeFs::new(cx.background_executor.clone());
5052 let expected_contents = "content";
5053 fs.as_fake()
5054 .insert_tree(
5055 "/root",
5056 json!({
5057 "test.txt": expected_contents
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5063
5064 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5065 let worktree = project.worktrees(cx).next().unwrap();
5066 let entry_id = worktree
5067 .read(cx)
5068 .entry_for_path(rel_path("test.txt"))
5069 .unwrap()
5070 .id;
5071 (worktree, entry_id)
5072 });
5073 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5074 let _result = project
5075 .update(cx, |project, cx| {
5076 project.rename_entry(
5077 entry_id,
5078 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5079 cx,
5080 )
5081 })
5082 .await
5083 .unwrap();
5084 worktree.read_with(cx, |worktree, _| {
5085 assert!(
5086 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5087 "Old file should have been removed"
5088 );
5089 assert!(
5090 worktree
5091 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5092 .is_some(),
5093 "Whole directory hierarchy and the new file should have been created"
5094 );
5095 });
5096 assert_eq!(
5097 worktree
5098 .update(cx, |worktree, cx| {
5099 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5100 })
5101 .await
5102 .unwrap()
5103 .text,
5104 expected_contents,
5105 "Moved file's contents should be preserved"
5106 );
5107
5108 let entry_id = worktree.read_with(cx, |worktree, _| {
5109 worktree
5110 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5111 .unwrap()
5112 .id
5113 });
5114
5115 let _result = project
5116 .update(cx, |project, cx| {
5117 project.rename_entry(
5118 entry_id,
5119 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5120 cx,
5121 )
5122 })
5123 .await
5124 .unwrap();
5125 worktree.read_with(cx, |worktree, _| {
5126 assert!(
5127 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5128 "First file should not reappear"
5129 );
5130 assert!(
5131 worktree
5132 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5133 .is_none(),
5134 "Old file should have been removed"
5135 );
5136 assert!(
5137 worktree
5138 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5139 .is_some(),
5140 "No error should have occurred after moving into existing directory"
5141 );
5142 });
5143 assert_eq!(
5144 worktree
5145 .update(cx, |worktree, cx| {
5146 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5147 })
5148 .await
5149 .unwrap()
5150 .text,
5151 expected_contents,
5152 "Moved file's contents should be preserved"
5153 );
5154}
5155
5156#[gpui::test(iterations = 10)]
5157async fn test_save_file(cx: &mut gpui::TestAppContext) {
5158 init_test(cx);
5159
5160 let fs = FakeFs::new(cx.executor());
5161 fs.insert_tree(
5162 path!("/dir"),
5163 json!({
5164 "file1": "the old contents",
5165 }),
5166 )
5167 .await;
5168
5169 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5170 let buffer = project
5171 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5172 .await
5173 .unwrap();
5174 buffer.update(cx, |buffer, cx| {
5175 assert_eq!(buffer.text(), "the old contents");
5176 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5177 });
5178
5179 project
5180 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5181 .await
5182 .unwrap();
5183
5184 let new_text = fs
5185 .load(Path::new(path!("/dir/file1")))
5186 .await
5187 .unwrap()
5188 .replace("\r\n", "\n");
5189 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5190}
5191
5192#[gpui::test(iterations = 10)]
5193async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5194 // Issue: #24349
5195 init_test(cx);
5196
5197 let fs = FakeFs::new(cx.executor());
5198 fs.insert_tree(path!("/dir"), json!({})).await;
5199
5200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5201 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5202
5203 language_registry.add(rust_lang());
5204 let mut fake_rust_servers = language_registry.register_fake_lsp(
5205 "Rust",
5206 FakeLspAdapter {
5207 name: "the-rust-language-server",
5208 capabilities: lsp::ServerCapabilities {
5209 completion_provider: Some(lsp::CompletionOptions {
5210 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5211 ..Default::default()
5212 }),
5213 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5214 lsp::TextDocumentSyncOptions {
5215 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5216 ..Default::default()
5217 },
5218 )),
5219 ..Default::default()
5220 },
5221 ..Default::default()
5222 },
5223 );
5224
5225 let buffer = project
5226 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5227 .unwrap()
5228 .await;
5229 project.update(cx, |this, cx| {
5230 this.register_buffer_with_language_servers(&buffer, cx);
5231 buffer.update(cx, |buffer, cx| {
5232 assert!(!this.has_language_servers_for(buffer, cx));
5233 })
5234 });
5235
5236 project
5237 .update(cx, |this, cx| {
5238 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5239 this.save_buffer_as(
5240 buffer.clone(),
5241 ProjectPath {
5242 worktree_id,
5243 path: rel_path("file.rs").into(),
5244 },
5245 cx,
5246 )
5247 })
5248 .await
5249 .unwrap();
5250 // A server is started up, and it is notified about Rust files.
5251 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5252 assert_eq!(
5253 fake_rust_server
5254 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5255 .await
5256 .text_document,
5257 lsp::TextDocumentItem {
5258 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5259 version: 0,
5260 text: "".to_string(),
5261 language_id: "rust".to_string(),
5262 }
5263 );
5264
5265 project.update(cx, |this, cx| {
5266 buffer.update(cx, |buffer, cx| {
5267 assert!(this.has_language_servers_for(buffer, cx));
5268 })
5269 });
5270}
5271
5272#[gpui::test(iterations = 30)]
5273async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5274 init_test(cx);
5275
5276 let fs = FakeFs::new(cx.executor());
5277 fs.insert_tree(
5278 path!("/dir"),
5279 json!({
5280 "file1": "the original contents",
5281 }),
5282 )
5283 .await;
5284
5285 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5286 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5287 let buffer = project
5288 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5289 .await
5290 .unwrap();
5291
5292 // Change the buffer's file on disk, and then wait for the file change
5293 // to be detected by the worktree, so that the buffer starts reloading.
5294 fs.save(
5295 path!("/dir/file1").as_ref(),
5296 &"the first contents".into(),
5297 Default::default(),
5298 )
5299 .await
5300 .unwrap();
5301 worktree.next_event(cx).await;
5302
5303 // Change the buffer's file again. Depending on the random seed, the
5304 // previous file change may still be in progress.
5305 fs.save(
5306 path!("/dir/file1").as_ref(),
5307 &"the second contents".into(),
5308 Default::default(),
5309 )
5310 .await
5311 .unwrap();
5312 worktree.next_event(cx).await;
5313
5314 cx.executor().run_until_parked();
5315 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5316 buffer.read_with(cx, |buffer, _| {
5317 assert_eq!(buffer.text(), on_disk_text);
5318 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5319 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5320 });
5321}
5322
5323#[gpui::test(iterations = 30)]
5324async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5325 init_test(cx);
5326
5327 let fs = FakeFs::new(cx.executor());
5328 fs.insert_tree(
5329 path!("/dir"),
5330 json!({
5331 "file1": "the original contents",
5332 }),
5333 )
5334 .await;
5335
5336 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5337 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5338 let buffer = project
5339 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5340 .await
5341 .unwrap();
5342
5343 // Change the buffer's file on disk, and then wait for the file change
5344 // to be detected by the worktree, so that the buffer starts reloading.
5345 fs.save(
5346 path!("/dir/file1").as_ref(),
5347 &"the first contents".into(),
5348 Default::default(),
5349 )
5350 .await
5351 .unwrap();
5352 worktree.next_event(cx).await;
5353
5354 cx.executor()
5355 .spawn(cx.executor().simulate_random_delay())
5356 .await;
5357
5358 // Perform a noop edit, causing the buffer's version to increase.
5359 buffer.update(cx, |buffer, cx| {
5360 buffer.edit([(0..0, " ")], None, cx);
5361 buffer.undo(cx);
5362 });
5363
5364 cx.executor().run_until_parked();
5365 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5366 buffer.read_with(cx, |buffer, _| {
5367 let buffer_text = buffer.text();
5368 if buffer_text == on_disk_text {
5369 assert!(
5370 !buffer.is_dirty() && !buffer.has_conflict(),
5371 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5372 );
5373 }
5374 // If the file change occurred while the buffer was processing the first
5375 // change, the buffer will be in a conflicting state.
5376 else {
5377 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5378 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5379 }
5380 });
5381}
5382
5383#[gpui::test]
5384async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5385 init_test(cx);
5386
5387 let fs = FakeFs::new(cx.executor());
5388 fs.insert_tree(
5389 path!("/dir"),
5390 json!({
5391 "file1": "the old contents",
5392 }),
5393 )
5394 .await;
5395
5396 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5397 let buffer = project
5398 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5399 .await
5400 .unwrap();
5401 buffer.update(cx, |buffer, cx| {
5402 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5403 });
5404
5405 project
5406 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5407 .await
5408 .unwrap();
5409
5410 let new_text = fs
5411 .load(Path::new(path!("/dir/file1")))
5412 .await
5413 .unwrap()
5414 .replace("\r\n", "\n");
5415 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5416}
5417
5418#[gpui::test]
5419async fn test_save_as(cx: &mut gpui::TestAppContext) {
5420 init_test(cx);
5421
5422 let fs = FakeFs::new(cx.executor());
5423 fs.insert_tree("/dir", json!({})).await;
5424
5425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5426
5427 let languages = project.update(cx, |project, _| project.languages().clone());
5428 languages.add(rust_lang());
5429
5430 let buffer = project.update(cx, |project, cx| {
5431 project.create_local_buffer("", None, false, cx)
5432 });
5433 buffer.update(cx, |buffer, cx| {
5434 buffer.edit([(0..0, "abc")], None, cx);
5435 assert!(buffer.is_dirty());
5436 assert!(!buffer.has_conflict());
5437 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5438 });
5439 project
5440 .update(cx, |project, cx| {
5441 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5442 let path = ProjectPath {
5443 worktree_id,
5444 path: rel_path("file1.rs").into(),
5445 };
5446 project.save_buffer_as(buffer.clone(), path, cx)
5447 })
5448 .await
5449 .unwrap();
5450 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5451
5452 cx.executor().run_until_parked();
5453 buffer.update(cx, |buffer, cx| {
5454 assert_eq!(
5455 buffer.file().unwrap().full_path(cx),
5456 Path::new("dir/file1.rs")
5457 );
5458 assert!(!buffer.is_dirty());
5459 assert!(!buffer.has_conflict());
5460 assert_eq!(buffer.language().unwrap().name(), "Rust");
5461 });
5462
5463 let opened_buffer = project
5464 .update(cx, |project, cx| {
5465 project.open_local_buffer("/dir/file1.rs", cx)
5466 })
5467 .await
5468 .unwrap();
5469 assert_eq!(opened_buffer, buffer);
5470}
5471
5472#[gpui::test]
5473async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5474 init_test(cx);
5475
5476 let fs = FakeFs::new(cx.executor());
5477 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5478
5479 fs.insert_tree(
5480 path!("/dir"),
5481 json!({
5482 "data_a.txt": "data about a"
5483 }),
5484 )
5485 .await;
5486
5487 let buffer = project
5488 .update(cx, |project, cx| {
5489 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5490 })
5491 .await
5492 .unwrap();
5493
5494 buffer.update(cx, |buffer, cx| {
5495 buffer.edit([(11..12, "b")], None, cx);
5496 });
5497
5498 // Save buffer's contents as a new file and confirm that the buffer's now
5499 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5500 // file associated with the buffer has now been updated to `data_b.txt`
5501 project
5502 .update(cx, |project, cx| {
5503 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5504 let new_path = ProjectPath {
5505 worktree_id,
5506 path: rel_path("data_b.txt").into(),
5507 };
5508
5509 project.save_buffer_as(buffer.clone(), new_path, cx)
5510 })
5511 .await
5512 .unwrap();
5513
5514 buffer.update(cx, |buffer, cx| {
5515 assert_eq!(
5516 buffer.file().unwrap().full_path(cx),
5517 Path::new("dir/data_b.txt")
5518 )
5519 });
5520
5521 // Open the original `data_a.txt` file, confirming that its contents are
5522 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5523 let original_buffer = project
5524 .update(cx, |project, cx| {
5525 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5526 })
5527 .await
5528 .unwrap();
5529
5530 original_buffer.update(cx, |buffer, cx| {
5531 assert_eq!(buffer.text(), "data about a");
5532 assert_eq!(
5533 buffer.file().unwrap().full_path(cx),
5534 Path::new("dir/data_a.txt")
5535 )
5536 });
5537}
5538
5539#[gpui::test(retries = 5)]
5540async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5541 use worktree::WorktreeModelHandle as _;
5542
5543 init_test(cx);
5544 cx.executor().allow_parking();
5545
5546 let dir = TempTree::new(json!({
5547 "a": {
5548 "file1": "",
5549 "file2": "",
5550 "file3": "",
5551 },
5552 "b": {
5553 "c": {
5554 "file4": "",
5555 "file5": "",
5556 }
5557 }
5558 }));
5559
5560 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5561
5562 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5563 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5564 async move { buffer.await.unwrap() }
5565 };
5566 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5567 project.update(cx, |project, cx| {
5568 let tree = project.worktrees(cx).next().unwrap();
5569 tree.read(cx)
5570 .entry_for_path(rel_path(path))
5571 .unwrap_or_else(|| panic!("no entry for path {}", path))
5572 .id
5573 })
5574 };
5575
5576 let buffer2 = buffer_for_path("a/file2", cx).await;
5577 let buffer3 = buffer_for_path("a/file3", cx).await;
5578 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5579 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5580
5581 let file2_id = id_for_path("a/file2", cx);
5582 let file3_id = id_for_path("a/file3", cx);
5583 let file4_id = id_for_path("b/c/file4", cx);
5584
5585 // Create a remote copy of this worktree.
5586 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5587 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5588
5589 let updates = Arc::new(Mutex::new(Vec::new()));
5590 tree.update(cx, |tree, cx| {
5591 let updates = updates.clone();
5592 tree.observe_updates(0, cx, move |update| {
5593 updates.lock().push(update);
5594 async { true }
5595 });
5596 });
5597
5598 let remote = cx.update(|cx| {
5599 Worktree::remote(
5600 0,
5601 ReplicaId::REMOTE_SERVER,
5602 metadata,
5603 project.read(cx).client().into(),
5604 project.read(cx).path_style(cx),
5605 cx,
5606 )
5607 });
5608
5609 cx.executor().run_until_parked();
5610
5611 cx.update(|cx| {
5612 assert!(!buffer2.read(cx).is_dirty());
5613 assert!(!buffer3.read(cx).is_dirty());
5614 assert!(!buffer4.read(cx).is_dirty());
5615 assert!(!buffer5.read(cx).is_dirty());
5616 });
5617
5618 // Rename and delete files and directories.
5619 tree.flush_fs_events(cx).await;
5620 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5621 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5622 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5623 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5624 tree.flush_fs_events(cx).await;
5625
5626 cx.update(|app| {
5627 assert_eq!(
5628 tree.read(app).paths().collect::<Vec<_>>(),
5629 vec![
5630 rel_path("a"),
5631 rel_path("a/file1"),
5632 rel_path("a/file2.new"),
5633 rel_path("b"),
5634 rel_path("d"),
5635 rel_path("d/file3"),
5636 rel_path("d/file4"),
5637 ]
5638 );
5639 });
5640
5641 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5642 assert_eq!(id_for_path("d/file3", cx), file3_id);
5643 assert_eq!(id_for_path("d/file4", cx), file4_id);
5644
5645 cx.update(|cx| {
5646 assert_eq!(
5647 buffer2.read(cx).file().unwrap().path().as_ref(),
5648 rel_path("a/file2.new")
5649 );
5650 assert_eq!(
5651 buffer3.read(cx).file().unwrap().path().as_ref(),
5652 rel_path("d/file3")
5653 );
5654 assert_eq!(
5655 buffer4.read(cx).file().unwrap().path().as_ref(),
5656 rel_path("d/file4")
5657 );
5658 assert_eq!(
5659 buffer5.read(cx).file().unwrap().path().as_ref(),
5660 rel_path("b/c/file5")
5661 );
5662
5663 assert_matches!(
5664 buffer2.read(cx).file().unwrap().disk_state(),
5665 DiskState::Present { .. }
5666 );
5667 assert_matches!(
5668 buffer3.read(cx).file().unwrap().disk_state(),
5669 DiskState::Present { .. }
5670 );
5671 assert_matches!(
5672 buffer4.read(cx).file().unwrap().disk_state(),
5673 DiskState::Present { .. }
5674 );
5675 assert_eq!(
5676 buffer5.read(cx).file().unwrap().disk_state(),
5677 DiskState::Deleted
5678 );
5679 });
5680
5681 // Update the remote worktree. Check that it becomes consistent with the
5682 // local worktree.
5683 cx.executor().run_until_parked();
5684
5685 remote.update(cx, |remote, _| {
5686 for update in updates.lock().drain(..) {
5687 remote.as_remote_mut().unwrap().update_from_remote(update);
5688 }
5689 });
5690 cx.executor().run_until_parked();
5691 remote.update(cx, |remote, _| {
5692 assert_eq!(
5693 remote.paths().collect::<Vec<_>>(),
5694 vec![
5695 rel_path("a"),
5696 rel_path("a/file1"),
5697 rel_path("a/file2.new"),
5698 rel_path("b"),
5699 rel_path("d"),
5700 rel_path("d/file3"),
5701 rel_path("d/file4"),
5702 ]
5703 );
5704 });
5705}
5706
5707#[cfg(target_os = "linux")]
5708#[gpui::test(retries = 5)]
5709async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5710 init_test(cx);
5711 cx.executor().allow_parking();
5712
5713 let dir = TempTree::new(json!({}));
5714 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5715 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5716
5717 tree.flush_fs_events(cx).await;
5718
5719 let repro_dir = dir.path().join("repro");
5720 std::fs::create_dir(&repro_dir).unwrap();
5721 tree.flush_fs_events(cx).await;
5722
5723 cx.update(|cx| {
5724 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5725 });
5726
5727 std::fs::remove_dir_all(&repro_dir).unwrap();
5728 tree.flush_fs_events(cx).await;
5729
5730 cx.update(|cx| {
5731 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5732 });
5733
5734 std::fs::create_dir(&repro_dir).unwrap();
5735 tree.flush_fs_events(cx).await;
5736
5737 cx.update(|cx| {
5738 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5739 });
5740
5741 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5742 tree.flush_fs_events(cx).await;
5743
5744 cx.update(|cx| {
5745 assert!(
5746 tree.read(cx)
5747 .entry_for_path(rel_path("repro/repro-marker"))
5748 .is_some()
5749 );
5750 });
5751}
5752
5753#[gpui::test(iterations = 10)]
5754async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5755 init_test(cx);
5756
5757 let fs = FakeFs::new(cx.executor());
5758 fs.insert_tree(
5759 path!("/dir"),
5760 json!({
5761 "a": {
5762 "file1": "",
5763 }
5764 }),
5765 )
5766 .await;
5767
5768 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5769 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5770 let tree_id = tree.update(cx, |tree, _| tree.id());
5771
5772 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5773 project.update(cx, |project, cx| {
5774 let tree = project.worktrees(cx).next().unwrap();
5775 tree.read(cx)
5776 .entry_for_path(rel_path(path))
5777 .unwrap_or_else(|| panic!("no entry for path {}", path))
5778 .id
5779 })
5780 };
5781
5782 let dir_id = id_for_path("a", cx);
5783 let file_id = id_for_path("a/file1", cx);
5784 let buffer = project
5785 .update(cx, |p, cx| {
5786 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5787 })
5788 .await
5789 .unwrap();
5790 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5791
5792 project
5793 .update(cx, |project, cx| {
5794 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5795 })
5796 .unwrap()
5797 .await
5798 .into_included()
5799 .unwrap();
5800 cx.executor().run_until_parked();
5801
5802 assert_eq!(id_for_path("b", cx), dir_id);
5803 assert_eq!(id_for_path("b/file1", cx), file_id);
5804 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5805}
5806
5807#[gpui::test]
5808async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5809 init_test(cx);
5810
5811 let fs = FakeFs::new(cx.executor());
5812 fs.insert_tree(
5813 "/dir",
5814 json!({
5815 "a.txt": "a-contents",
5816 "b.txt": "b-contents",
5817 }),
5818 )
5819 .await;
5820
5821 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5822
5823 // Spawn multiple tasks to open paths, repeating some paths.
5824 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5825 (
5826 p.open_local_buffer("/dir/a.txt", cx),
5827 p.open_local_buffer("/dir/b.txt", cx),
5828 p.open_local_buffer("/dir/a.txt", cx),
5829 )
5830 });
5831
5832 let buffer_a_1 = buffer_a_1.await.unwrap();
5833 let buffer_a_2 = buffer_a_2.await.unwrap();
5834 let buffer_b = buffer_b.await.unwrap();
5835 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5836 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5837
5838 // There is only one buffer per path.
5839 let buffer_a_id = buffer_a_1.entity_id();
5840 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5841
5842 // Open the same path again while it is still open.
5843 drop(buffer_a_1);
5844 let buffer_a_3 = project
5845 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5846 .await
5847 .unwrap();
5848
5849 // There's still only one buffer per path.
5850 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5851}
5852
5853#[gpui::test]
5854async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5855 init_test(cx);
5856
5857 let fs = FakeFs::new(cx.executor());
5858 fs.insert_tree(
5859 path!("/dir"),
5860 json!({
5861 "file1": "abc",
5862 "file2": "def",
5863 "file3": "ghi",
5864 }),
5865 )
5866 .await;
5867
5868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5869
5870 let buffer1 = project
5871 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5872 .await
5873 .unwrap();
5874 let events = Arc::new(Mutex::new(Vec::new()));
5875
5876 // initially, the buffer isn't dirty.
5877 buffer1.update(cx, |buffer, cx| {
5878 cx.subscribe(&buffer1, {
5879 let events = events.clone();
5880 move |_, _, event, _| match event {
5881 BufferEvent::Operation { .. } => {}
5882 _ => events.lock().push(event.clone()),
5883 }
5884 })
5885 .detach();
5886
5887 assert!(!buffer.is_dirty());
5888 assert!(events.lock().is_empty());
5889
5890 buffer.edit([(1..2, "")], None, cx);
5891 });
5892
5893 // after the first edit, the buffer is dirty, and emits a dirtied event.
5894 buffer1.update(cx, |buffer, cx| {
5895 assert!(buffer.text() == "ac");
5896 assert!(buffer.is_dirty());
5897 assert_eq!(
5898 *events.lock(),
5899 &[
5900 language::BufferEvent::Edited { is_local: true },
5901 language::BufferEvent::DirtyChanged
5902 ]
5903 );
5904 events.lock().clear();
5905 buffer.did_save(
5906 buffer.version(),
5907 buffer.file().unwrap().disk_state().mtime(),
5908 cx,
5909 );
5910 });
5911
5912 // after saving, the buffer is not dirty, and emits a saved event.
5913 buffer1.update(cx, |buffer, cx| {
5914 assert!(!buffer.is_dirty());
5915 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5916 events.lock().clear();
5917
5918 buffer.edit([(1..1, "B")], None, cx);
5919 buffer.edit([(2..2, "D")], None, cx);
5920 });
5921
5922 // after editing again, the buffer is dirty, and emits another dirty event.
5923 buffer1.update(cx, |buffer, cx| {
5924 assert!(buffer.text() == "aBDc");
5925 assert!(buffer.is_dirty());
5926 assert_eq!(
5927 *events.lock(),
5928 &[
5929 language::BufferEvent::Edited { is_local: true },
5930 language::BufferEvent::DirtyChanged,
5931 language::BufferEvent::Edited { is_local: true },
5932 ],
5933 );
5934 events.lock().clear();
5935
5936 // After restoring the buffer to its previously-saved state,
5937 // the buffer is not considered dirty anymore.
5938 buffer.edit([(1..3, "")], None, cx);
5939 assert!(buffer.text() == "ac");
5940 assert!(!buffer.is_dirty());
5941 });
5942
5943 assert_eq!(
5944 *events.lock(),
5945 &[
5946 language::BufferEvent::Edited { is_local: true },
5947 language::BufferEvent::DirtyChanged
5948 ]
5949 );
5950
5951 // When a file is deleted, it is not considered dirty.
5952 let events = Arc::new(Mutex::new(Vec::new()));
5953 let buffer2 = project
5954 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5955 .await
5956 .unwrap();
5957 buffer2.update(cx, |_, cx| {
5958 cx.subscribe(&buffer2, {
5959 let events = events.clone();
5960 move |_, _, event, _| match event {
5961 BufferEvent::Operation { .. } => {}
5962 _ => events.lock().push(event.clone()),
5963 }
5964 })
5965 .detach();
5966 });
5967
5968 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5969 .await
5970 .unwrap();
5971 cx.executor().run_until_parked();
5972 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5973 assert_eq!(
5974 mem::take(&mut *events.lock()),
5975 &[language::BufferEvent::FileHandleChanged]
5976 );
5977
5978 // Buffer becomes dirty when edited.
5979 buffer2.update(cx, |buffer, cx| {
5980 buffer.edit([(2..3, "")], None, cx);
5981 assert_eq!(buffer.is_dirty(), true);
5982 });
5983 assert_eq!(
5984 mem::take(&mut *events.lock()),
5985 &[
5986 language::BufferEvent::Edited { is_local: true },
5987 language::BufferEvent::DirtyChanged
5988 ]
5989 );
5990
5991 // Buffer becomes clean again when all of its content is removed, because
5992 // the file was deleted.
5993 buffer2.update(cx, |buffer, cx| {
5994 buffer.edit([(0..2, "")], None, cx);
5995 assert_eq!(buffer.is_empty(), true);
5996 assert_eq!(buffer.is_dirty(), false);
5997 });
5998 assert_eq!(
5999 *events.lock(),
6000 &[
6001 language::BufferEvent::Edited { is_local: true },
6002 language::BufferEvent::DirtyChanged
6003 ]
6004 );
6005
6006 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6007 let events = Arc::new(Mutex::new(Vec::new()));
6008 let buffer3 = project
6009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6010 .await
6011 .unwrap();
6012 buffer3.update(cx, |_, cx| {
6013 cx.subscribe(&buffer3, {
6014 let events = events.clone();
6015 move |_, _, event, _| match event {
6016 BufferEvent::Operation { .. } => {}
6017 _ => events.lock().push(event.clone()),
6018 }
6019 })
6020 .detach();
6021 });
6022
6023 buffer3.update(cx, |buffer, cx| {
6024 buffer.edit([(0..0, "x")], None, cx);
6025 });
6026 events.lock().clear();
6027 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6028 .await
6029 .unwrap();
6030 cx.executor().run_until_parked();
6031 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6032 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6033}
6034
6035#[gpui::test]
6036async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6037 init_test(cx);
6038
6039 let fs = FakeFs::new(cx.executor());
6040 fs.insert_tree(
6041 path!("/dir"),
6042 json!({
6043 "file.txt": "version 1",
6044 }),
6045 )
6046 .await;
6047
6048 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6049 let buffer = project
6050 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6051 .await
6052 .unwrap();
6053
6054 buffer.read_with(cx, |buffer, _| {
6055 assert_eq!(buffer.text(), "version 1");
6056 assert!(!buffer.is_dirty());
6057 });
6058
6059 // User makes an edit, making the buffer dirty.
6060 buffer.update(cx, |buffer, cx| {
6061 buffer.edit([(0..0, "user edit: ")], None, cx);
6062 });
6063
6064 buffer.read_with(cx, |buffer, _| {
6065 assert!(buffer.is_dirty());
6066 assert_eq!(buffer.text(), "user edit: version 1");
6067 });
6068
6069 // External tool writes new content while buffer is dirty.
6070 // file_updated() updates the File but suppresses ReloadNeeded.
6071 fs.save(
6072 path!("/dir/file.txt").as_ref(),
6073 &"version 2 from external tool".into(),
6074 Default::default(),
6075 )
6076 .await
6077 .unwrap();
6078 cx.executor().run_until_parked();
6079
6080 buffer.read_with(cx, |buffer, _| {
6081 assert!(buffer.has_conflict());
6082 assert_eq!(buffer.text(), "user edit: version 1");
6083 });
6084
6085 // User undoes their edit. Buffer becomes clean, but disk has different
6086 // content. did_edit() detects the dirty->clean transition and checks if
6087 // disk changed while dirty. Since mtime differs from saved_mtime, it
6088 // emits ReloadNeeded.
6089 buffer.update(cx, |buffer, cx| {
6090 buffer.undo(cx);
6091 });
6092 cx.executor().run_until_parked();
6093
6094 buffer.read_with(cx, |buffer, _| {
6095 assert_eq!(
6096 buffer.text(),
6097 "version 2 from external tool",
6098 "buffer should reload from disk after undo makes it clean"
6099 );
6100 assert!(!buffer.is_dirty());
6101 });
6102}
6103
6104#[gpui::test]
6105async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6106 init_test(cx);
6107
6108 let (initial_contents, initial_offsets) =
6109 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6110 let fs = FakeFs::new(cx.executor());
6111 fs.insert_tree(
6112 path!("/dir"),
6113 json!({
6114 "the-file": initial_contents,
6115 }),
6116 )
6117 .await;
6118 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6119 let buffer = project
6120 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6121 .await
6122 .unwrap();
6123
6124 let anchors = initial_offsets
6125 .iter()
6126 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6127 .collect::<Vec<_>>();
6128
6129 // Change the file on disk, adding two new lines of text, and removing
6130 // one line.
6131 buffer.update(cx, |buffer, _| {
6132 assert!(!buffer.is_dirty());
6133 assert!(!buffer.has_conflict());
6134 });
6135
6136 let (new_contents, new_offsets) =
6137 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6138 fs.save(
6139 path!("/dir/the-file").as_ref(),
6140 &new_contents.as_str().into(),
6141 LineEnding::Unix,
6142 )
6143 .await
6144 .unwrap();
6145
6146 // Because the buffer was not modified, it is reloaded from disk. Its
6147 // contents are edited according to the diff between the old and new
6148 // file contents.
6149 cx.executor().run_until_parked();
6150 buffer.update(cx, |buffer, _| {
6151 assert_eq!(buffer.text(), new_contents);
6152 assert!(!buffer.is_dirty());
6153 assert!(!buffer.has_conflict());
6154
6155 let anchor_offsets = anchors
6156 .iter()
6157 .map(|anchor| anchor.to_offset(&*buffer))
6158 .collect::<Vec<_>>();
6159 assert_eq!(anchor_offsets, new_offsets);
6160 });
6161
6162 // Modify the buffer
6163 buffer.update(cx, |buffer, cx| {
6164 buffer.edit([(0..0, " ")], None, cx);
6165 assert!(buffer.is_dirty());
6166 assert!(!buffer.has_conflict());
6167 });
6168
6169 // Change the file on disk again, adding blank lines to the beginning.
6170 fs.save(
6171 path!("/dir/the-file").as_ref(),
6172 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6173 LineEnding::Unix,
6174 )
6175 .await
6176 .unwrap();
6177
6178 // Because the buffer is modified, it doesn't reload from disk, but is
6179 // marked as having a conflict.
6180 cx.executor().run_until_parked();
6181 buffer.update(cx, |buffer, _| {
6182 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6183 assert!(buffer.has_conflict());
6184 });
6185}
6186
6187#[gpui::test]
6188async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6189 init_test(cx);
6190
6191 let fs = FakeFs::new(cx.executor());
6192 fs.insert_tree(
6193 path!("/dir"),
6194 json!({
6195 "file1": "a\nb\nc\n",
6196 "file2": "one\r\ntwo\r\nthree\r\n",
6197 }),
6198 )
6199 .await;
6200
6201 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6202 let buffer1 = project
6203 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6204 .await
6205 .unwrap();
6206 let buffer2 = project
6207 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6208 .await
6209 .unwrap();
6210
6211 buffer1.update(cx, |buffer, _| {
6212 assert_eq!(buffer.text(), "a\nb\nc\n");
6213 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6214 });
6215 buffer2.update(cx, |buffer, _| {
6216 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6217 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6218 });
6219
6220 // Change a file's line endings on disk from unix to windows. The buffer's
6221 // state updates correctly.
6222 fs.save(
6223 path!("/dir/file1").as_ref(),
6224 &"aaa\nb\nc\n".into(),
6225 LineEnding::Windows,
6226 )
6227 .await
6228 .unwrap();
6229 cx.executor().run_until_parked();
6230 buffer1.update(cx, |buffer, _| {
6231 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6232 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6233 });
6234
6235 // Save a file with windows line endings. The file is written correctly.
6236 buffer2.update(cx, |buffer, cx| {
6237 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6238 });
6239 project
6240 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6241 .await
6242 .unwrap();
6243 assert_eq!(
6244 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6245 "one\r\ntwo\r\nthree\r\nfour\r\n",
6246 );
6247}
6248
6249#[gpui::test]
6250async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6251 init_test(cx);
6252
6253 let fs = FakeFs::new(cx.executor());
6254 fs.insert_tree(
6255 path!("/dir"),
6256 json!({
6257 "a.rs": "
6258 fn foo(mut v: Vec<usize>) {
6259 for x in &v {
6260 v.push(1);
6261 }
6262 }
6263 "
6264 .unindent(),
6265 }),
6266 )
6267 .await;
6268
6269 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6270 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6271 let buffer = project
6272 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6273 .await
6274 .unwrap();
6275
6276 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6277 let message = lsp::PublishDiagnosticsParams {
6278 uri: buffer_uri.clone(),
6279 diagnostics: vec![
6280 lsp::Diagnostic {
6281 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6282 severity: Some(DiagnosticSeverity::WARNING),
6283 message: "error 1".to_string(),
6284 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6285 location: lsp::Location {
6286 uri: buffer_uri.clone(),
6287 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6288 },
6289 message: "error 1 hint 1".to_string(),
6290 }]),
6291 ..Default::default()
6292 },
6293 lsp::Diagnostic {
6294 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6295 severity: Some(DiagnosticSeverity::HINT),
6296 message: "error 1 hint 1".to_string(),
6297 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6298 location: lsp::Location {
6299 uri: buffer_uri.clone(),
6300 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6301 },
6302 message: "original diagnostic".to_string(),
6303 }]),
6304 ..Default::default()
6305 },
6306 lsp::Diagnostic {
6307 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6308 severity: Some(DiagnosticSeverity::ERROR),
6309 message: "error 2".to_string(),
6310 related_information: Some(vec![
6311 lsp::DiagnosticRelatedInformation {
6312 location: lsp::Location {
6313 uri: buffer_uri.clone(),
6314 range: lsp::Range::new(
6315 lsp::Position::new(1, 13),
6316 lsp::Position::new(1, 15),
6317 ),
6318 },
6319 message: "error 2 hint 1".to_string(),
6320 },
6321 lsp::DiagnosticRelatedInformation {
6322 location: lsp::Location {
6323 uri: buffer_uri.clone(),
6324 range: lsp::Range::new(
6325 lsp::Position::new(1, 13),
6326 lsp::Position::new(1, 15),
6327 ),
6328 },
6329 message: "error 2 hint 2".to_string(),
6330 },
6331 ]),
6332 ..Default::default()
6333 },
6334 lsp::Diagnostic {
6335 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6336 severity: Some(DiagnosticSeverity::HINT),
6337 message: "error 2 hint 1".to_string(),
6338 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6339 location: lsp::Location {
6340 uri: buffer_uri.clone(),
6341 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6342 },
6343 message: "original diagnostic".to_string(),
6344 }]),
6345 ..Default::default()
6346 },
6347 lsp::Diagnostic {
6348 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6349 severity: Some(DiagnosticSeverity::HINT),
6350 message: "error 2 hint 2".to_string(),
6351 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6352 location: lsp::Location {
6353 uri: buffer_uri,
6354 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6355 },
6356 message: "original diagnostic".to_string(),
6357 }]),
6358 ..Default::default()
6359 },
6360 ],
6361 version: None,
6362 };
6363
6364 lsp_store
6365 .update(cx, |lsp_store, cx| {
6366 lsp_store.update_diagnostics(
6367 LanguageServerId(0),
6368 message,
6369 None,
6370 DiagnosticSourceKind::Pushed,
6371 &[],
6372 cx,
6373 )
6374 })
6375 .unwrap();
6376 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6377
6378 assert_eq!(
6379 buffer
6380 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6381 .collect::<Vec<_>>(),
6382 &[
6383 DiagnosticEntry {
6384 range: Point::new(1, 8)..Point::new(1, 9),
6385 diagnostic: Diagnostic {
6386 severity: DiagnosticSeverity::WARNING,
6387 message: "error 1".to_string(),
6388 group_id: 1,
6389 is_primary: true,
6390 source_kind: DiagnosticSourceKind::Pushed,
6391 ..Diagnostic::default()
6392 }
6393 },
6394 DiagnosticEntry {
6395 range: Point::new(1, 8)..Point::new(1, 9),
6396 diagnostic: Diagnostic {
6397 severity: DiagnosticSeverity::HINT,
6398 message: "error 1 hint 1".to_string(),
6399 group_id: 1,
6400 is_primary: false,
6401 source_kind: DiagnosticSourceKind::Pushed,
6402 ..Diagnostic::default()
6403 }
6404 },
6405 DiagnosticEntry {
6406 range: Point::new(1, 13)..Point::new(1, 15),
6407 diagnostic: Diagnostic {
6408 severity: DiagnosticSeverity::HINT,
6409 message: "error 2 hint 1".to_string(),
6410 group_id: 0,
6411 is_primary: false,
6412 source_kind: DiagnosticSourceKind::Pushed,
6413 ..Diagnostic::default()
6414 }
6415 },
6416 DiagnosticEntry {
6417 range: Point::new(1, 13)..Point::new(1, 15),
6418 diagnostic: Diagnostic {
6419 severity: DiagnosticSeverity::HINT,
6420 message: "error 2 hint 2".to_string(),
6421 group_id: 0,
6422 is_primary: false,
6423 source_kind: DiagnosticSourceKind::Pushed,
6424 ..Diagnostic::default()
6425 }
6426 },
6427 DiagnosticEntry {
6428 range: Point::new(2, 8)..Point::new(2, 17),
6429 diagnostic: Diagnostic {
6430 severity: DiagnosticSeverity::ERROR,
6431 message: "error 2".to_string(),
6432 group_id: 0,
6433 is_primary: true,
6434 source_kind: DiagnosticSourceKind::Pushed,
6435 ..Diagnostic::default()
6436 }
6437 }
6438 ]
6439 );
6440
6441 assert_eq!(
6442 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6443 &[
6444 DiagnosticEntry {
6445 range: Point::new(1, 13)..Point::new(1, 15),
6446 diagnostic: Diagnostic {
6447 severity: DiagnosticSeverity::HINT,
6448 message: "error 2 hint 1".to_string(),
6449 group_id: 0,
6450 is_primary: false,
6451 source_kind: DiagnosticSourceKind::Pushed,
6452 ..Diagnostic::default()
6453 }
6454 },
6455 DiagnosticEntry {
6456 range: Point::new(1, 13)..Point::new(1, 15),
6457 diagnostic: Diagnostic {
6458 severity: DiagnosticSeverity::HINT,
6459 message: "error 2 hint 2".to_string(),
6460 group_id: 0,
6461 is_primary: false,
6462 source_kind: DiagnosticSourceKind::Pushed,
6463 ..Diagnostic::default()
6464 }
6465 },
6466 DiagnosticEntry {
6467 range: Point::new(2, 8)..Point::new(2, 17),
6468 diagnostic: Diagnostic {
6469 severity: DiagnosticSeverity::ERROR,
6470 message: "error 2".to_string(),
6471 group_id: 0,
6472 is_primary: true,
6473 source_kind: DiagnosticSourceKind::Pushed,
6474 ..Diagnostic::default()
6475 }
6476 }
6477 ]
6478 );
6479
6480 assert_eq!(
6481 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6482 &[
6483 DiagnosticEntry {
6484 range: Point::new(1, 8)..Point::new(1, 9),
6485 diagnostic: Diagnostic {
6486 severity: DiagnosticSeverity::WARNING,
6487 message: "error 1".to_string(),
6488 group_id: 1,
6489 is_primary: true,
6490 source_kind: DiagnosticSourceKind::Pushed,
6491 ..Diagnostic::default()
6492 }
6493 },
6494 DiagnosticEntry {
6495 range: Point::new(1, 8)..Point::new(1, 9),
6496 diagnostic: Diagnostic {
6497 severity: DiagnosticSeverity::HINT,
6498 message: "error 1 hint 1".to_string(),
6499 group_id: 1,
6500 is_primary: false,
6501 source_kind: DiagnosticSourceKind::Pushed,
6502 ..Diagnostic::default()
6503 }
6504 },
6505 ]
6506 );
6507}
6508
6509#[gpui::test]
6510async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6511 init_test(cx);
6512
6513 let fs = FakeFs::new(cx.executor());
6514 fs.insert_tree(
6515 path!("/dir"),
6516 json!({
6517 "one.rs": "const ONE: usize = 1;",
6518 "two": {
6519 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6520 }
6521
6522 }),
6523 )
6524 .await;
6525 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6526
6527 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6528 language_registry.add(rust_lang());
6529 let watched_paths = lsp::FileOperationRegistrationOptions {
6530 filters: vec![
6531 FileOperationFilter {
6532 scheme: Some("file".to_owned()),
6533 pattern: lsp::FileOperationPattern {
6534 glob: "**/*.rs".to_owned(),
6535 matches: Some(lsp::FileOperationPatternKind::File),
6536 options: None,
6537 },
6538 },
6539 FileOperationFilter {
6540 scheme: Some("file".to_owned()),
6541 pattern: lsp::FileOperationPattern {
6542 glob: "**/**".to_owned(),
6543 matches: Some(lsp::FileOperationPatternKind::Folder),
6544 options: None,
6545 },
6546 },
6547 ],
6548 };
6549 let mut fake_servers = language_registry.register_fake_lsp(
6550 "Rust",
6551 FakeLspAdapter {
6552 capabilities: lsp::ServerCapabilities {
6553 workspace: Some(lsp::WorkspaceServerCapabilities {
6554 workspace_folders: None,
6555 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6556 did_rename: Some(watched_paths.clone()),
6557 will_rename: Some(watched_paths),
6558 ..Default::default()
6559 }),
6560 }),
6561 ..Default::default()
6562 },
6563 ..Default::default()
6564 },
6565 );
6566
6567 let _ = project
6568 .update(cx, |project, cx| {
6569 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6570 })
6571 .await
6572 .unwrap();
6573
6574 let fake_server = fake_servers.next().await.unwrap();
6575 cx.executor().run_until_parked();
6576 let response = project.update(cx, |project, cx| {
6577 let worktree = project.worktrees(cx).next().unwrap();
6578 let entry = worktree
6579 .read(cx)
6580 .entry_for_path(rel_path("one.rs"))
6581 .unwrap();
6582 project.rename_entry(
6583 entry.id,
6584 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6585 cx,
6586 )
6587 });
6588 let expected_edit = lsp::WorkspaceEdit {
6589 changes: None,
6590 document_changes: Some(DocumentChanges::Edits({
6591 vec![TextDocumentEdit {
6592 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6593 range: lsp::Range {
6594 start: lsp::Position {
6595 line: 0,
6596 character: 1,
6597 },
6598 end: lsp::Position {
6599 line: 0,
6600 character: 3,
6601 },
6602 },
6603 new_text: "This is not a drill".to_owned(),
6604 })],
6605 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6606 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6607 version: Some(1337),
6608 },
6609 }]
6610 })),
6611 change_annotations: None,
6612 };
6613 let resolved_workspace_edit = Arc::new(OnceLock::new());
6614 fake_server
6615 .set_request_handler::<WillRenameFiles, _, _>({
6616 let resolved_workspace_edit = resolved_workspace_edit.clone();
6617 let expected_edit = expected_edit.clone();
6618 move |params, _| {
6619 let resolved_workspace_edit = resolved_workspace_edit.clone();
6620 let expected_edit = expected_edit.clone();
6621 async move {
6622 assert_eq!(params.files.len(), 1);
6623 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6624 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6625 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6626 Ok(Some(expected_edit))
6627 }
6628 }
6629 })
6630 .next()
6631 .await
6632 .unwrap();
6633 let _ = response.await.unwrap();
6634 fake_server
6635 .handle_notification::<DidRenameFiles, _>(|params, _| {
6636 assert_eq!(params.files.len(), 1);
6637 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6638 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6639 })
6640 .next()
6641 .await
6642 .unwrap();
6643 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6644}
6645
6646#[gpui::test]
6647async fn test_rename(cx: &mut gpui::TestAppContext) {
6648 // hi
6649 init_test(cx);
6650
6651 let fs = FakeFs::new(cx.executor());
6652 fs.insert_tree(
6653 path!("/dir"),
6654 json!({
6655 "one.rs": "const ONE: usize = 1;",
6656 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6657 }),
6658 )
6659 .await;
6660
6661 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6662
6663 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6664 language_registry.add(rust_lang());
6665 let mut fake_servers = language_registry.register_fake_lsp(
6666 "Rust",
6667 FakeLspAdapter {
6668 capabilities: lsp::ServerCapabilities {
6669 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6670 prepare_provider: Some(true),
6671 work_done_progress_options: Default::default(),
6672 })),
6673 ..Default::default()
6674 },
6675 ..Default::default()
6676 },
6677 );
6678
6679 let (buffer, _handle) = project
6680 .update(cx, |project, cx| {
6681 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6682 })
6683 .await
6684 .unwrap();
6685
6686 let fake_server = fake_servers.next().await.unwrap();
6687 cx.executor().run_until_parked();
6688
6689 let response = project.update(cx, |project, cx| {
6690 project.prepare_rename(buffer.clone(), 7, cx)
6691 });
6692 fake_server
6693 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6694 assert_eq!(
6695 params.text_document.uri.as_str(),
6696 uri!("file:///dir/one.rs")
6697 );
6698 assert_eq!(params.position, lsp::Position::new(0, 7));
6699 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6700 lsp::Position::new(0, 6),
6701 lsp::Position::new(0, 9),
6702 ))))
6703 })
6704 .next()
6705 .await
6706 .unwrap();
6707 let response = response.await.unwrap();
6708 let PrepareRenameResponse::Success(range) = response else {
6709 panic!("{:?}", response);
6710 };
6711 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6712 assert_eq!(range, 6..9);
6713
6714 let response = project.update(cx, |project, cx| {
6715 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6716 });
6717 fake_server
6718 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6719 assert_eq!(
6720 params.text_document_position.text_document.uri.as_str(),
6721 uri!("file:///dir/one.rs")
6722 );
6723 assert_eq!(
6724 params.text_document_position.position,
6725 lsp::Position::new(0, 7)
6726 );
6727 assert_eq!(params.new_name, "THREE");
6728 Ok(Some(lsp::WorkspaceEdit {
6729 changes: Some(
6730 [
6731 (
6732 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6733 vec![lsp::TextEdit::new(
6734 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6735 "THREE".to_string(),
6736 )],
6737 ),
6738 (
6739 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6740 vec![
6741 lsp::TextEdit::new(
6742 lsp::Range::new(
6743 lsp::Position::new(0, 24),
6744 lsp::Position::new(0, 27),
6745 ),
6746 "THREE".to_string(),
6747 ),
6748 lsp::TextEdit::new(
6749 lsp::Range::new(
6750 lsp::Position::new(0, 35),
6751 lsp::Position::new(0, 38),
6752 ),
6753 "THREE".to_string(),
6754 ),
6755 ],
6756 ),
6757 ]
6758 .into_iter()
6759 .collect(),
6760 ),
6761 ..Default::default()
6762 }))
6763 })
6764 .next()
6765 .await
6766 .unwrap();
6767 let mut transaction = response.await.unwrap().0;
6768 assert_eq!(transaction.len(), 2);
6769 assert_eq!(
6770 transaction
6771 .remove_entry(&buffer)
6772 .unwrap()
6773 .0
6774 .update(cx, |buffer, _| buffer.text()),
6775 "const THREE: usize = 1;"
6776 );
6777 assert_eq!(
6778 transaction
6779 .into_keys()
6780 .next()
6781 .unwrap()
6782 .update(cx, |buffer, _| buffer.text()),
6783 "const TWO: usize = one::THREE + one::THREE;"
6784 );
6785}
6786
6787#[gpui::test]
6788async fn test_search(cx: &mut gpui::TestAppContext) {
6789 init_test(cx);
6790
6791 let fs = FakeFs::new(cx.executor());
6792 fs.insert_tree(
6793 path!("/dir"),
6794 json!({
6795 "one.rs": "const ONE: usize = 1;",
6796 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6797 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6798 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6799 }),
6800 )
6801 .await;
6802 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6803 assert_eq!(
6804 search(
6805 &project,
6806 SearchQuery::text(
6807 "TWO",
6808 false,
6809 true,
6810 false,
6811 Default::default(),
6812 Default::default(),
6813 false,
6814 None
6815 )
6816 .unwrap(),
6817 cx
6818 )
6819 .await
6820 .unwrap(),
6821 HashMap::from_iter([
6822 (path!("dir/two.rs").to_string(), vec![6..9]),
6823 (path!("dir/three.rs").to_string(), vec![37..40])
6824 ])
6825 );
6826
6827 let buffer_4 = project
6828 .update(cx, |project, cx| {
6829 project.open_local_buffer(path!("/dir/four.rs"), cx)
6830 })
6831 .await
6832 .unwrap();
6833 buffer_4.update(cx, |buffer, cx| {
6834 let text = "two::TWO";
6835 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6836 });
6837
6838 assert_eq!(
6839 search(
6840 &project,
6841 SearchQuery::text(
6842 "TWO",
6843 false,
6844 true,
6845 false,
6846 Default::default(),
6847 Default::default(),
6848 false,
6849 None,
6850 )
6851 .unwrap(),
6852 cx
6853 )
6854 .await
6855 .unwrap(),
6856 HashMap::from_iter([
6857 (path!("dir/two.rs").to_string(), vec![6..9]),
6858 (path!("dir/three.rs").to_string(), vec![37..40]),
6859 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6860 ])
6861 );
6862}
6863
6864#[gpui::test]
6865async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6866 init_test(cx);
6867
6868 let search_query = "file";
6869
6870 let fs = FakeFs::new(cx.executor());
6871 fs.insert_tree(
6872 path!("/dir"),
6873 json!({
6874 "one.rs": r#"// Rust file one"#,
6875 "one.ts": r#"// TypeScript file one"#,
6876 "two.rs": r#"// Rust file two"#,
6877 "two.ts": r#"// TypeScript file two"#,
6878 }),
6879 )
6880 .await;
6881 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6882
6883 assert!(
6884 search(
6885 &project,
6886 SearchQuery::text(
6887 search_query,
6888 false,
6889 true,
6890 false,
6891 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6892 Default::default(),
6893 false,
6894 None
6895 )
6896 .unwrap(),
6897 cx
6898 )
6899 .await
6900 .unwrap()
6901 .is_empty(),
6902 "If no inclusions match, no files should be returned"
6903 );
6904
6905 assert_eq!(
6906 search(
6907 &project,
6908 SearchQuery::text(
6909 search_query,
6910 false,
6911 true,
6912 false,
6913 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6914 Default::default(),
6915 false,
6916 None
6917 )
6918 .unwrap(),
6919 cx
6920 )
6921 .await
6922 .unwrap(),
6923 HashMap::from_iter([
6924 (path!("dir/one.rs").to_string(), vec![8..12]),
6925 (path!("dir/two.rs").to_string(), vec![8..12]),
6926 ]),
6927 "Rust only search should give only Rust files"
6928 );
6929
6930 assert_eq!(
6931 search(
6932 &project,
6933 SearchQuery::text(
6934 search_query,
6935 false,
6936 true,
6937 false,
6938 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6939 .unwrap(),
6940 Default::default(),
6941 false,
6942 None,
6943 )
6944 .unwrap(),
6945 cx
6946 )
6947 .await
6948 .unwrap(),
6949 HashMap::from_iter([
6950 (path!("dir/one.ts").to_string(), vec![14..18]),
6951 (path!("dir/two.ts").to_string(), vec![14..18]),
6952 ]),
6953 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6954 );
6955
6956 assert_eq!(
6957 search(
6958 &project,
6959 SearchQuery::text(
6960 search_query,
6961 false,
6962 true,
6963 false,
6964 PathMatcher::new(
6965 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6966 PathStyle::local()
6967 )
6968 .unwrap(),
6969 Default::default(),
6970 false,
6971 None,
6972 )
6973 .unwrap(),
6974 cx
6975 )
6976 .await
6977 .unwrap(),
6978 HashMap::from_iter([
6979 (path!("dir/two.ts").to_string(), vec![14..18]),
6980 (path!("dir/one.rs").to_string(), vec![8..12]),
6981 (path!("dir/one.ts").to_string(), vec![14..18]),
6982 (path!("dir/two.rs").to_string(), vec![8..12]),
6983 ]),
6984 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6985 );
6986}
6987
6988#[gpui::test]
6989async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6990 init_test(cx);
6991
6992 let search_query = "file";
6993
6994 let fs = FakeFs::new(cx.executor());
6995 fs.insert_tree(
6996 path!("/dir"),
6997 json!({
6998 "one.rs": r#"// Rust file one"#,
6999 "one.ts": r#"// TypeScript file one"#,
7000 "two.rs": r#"// Rust file two"#,
7001 "two.ts": r#"// TypeScript file two"#,
7002 }),
7003 )
7004 .await;
7005 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7006
7007 assert_eq!(
7008 search(
7009 &project,
7010 SearchQuery::text(
7011 search_query,
7012 false,
7013 true,
7014 false,
7015 Default::default(),
7016 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7017 false,
7018 None,
7019 )
7020 .unwrap(),
7021 cx
7022 )
7023 .await
7024 .unwrap(),
7025 HashMap::from_iter([
7026 (path!("dir/one.rs").to_string(), vec![8..12]),
7027 (path!("dir/one.ts").to_string(), vec![14..18]),
7028 (path!("dir/two.rs").to_string(), vec![8..12]),
7029 (path!("dir/two.ts").to_string(), vec![14..18]),
7030 ]),
7031 "If no exclusions match, all files should be returned"
7032 );
7033
7034 assert_eq!(
7035 search(
7036 &project,
7037 SearchQuery::text(
7038 search_query,
7039 false,
7040 true,
7041 false,
7042 Default::default(),
7043 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7044 false,
7045 None,
7046 )
7047 .unwrap(),
7048 cx
7049 )
7050 .await
7051 .unwrap(),
7052 HashMap::from_iter([
7053 (path!("dir/one.ts").to_string(), vec![14..18]),
7054 (path!("dir/two.ts").to_string(), vec![14..18]),
7055 ]),
7056 "Rust exclusion search should give only TypeScript files"
7057 );
7058
7059 assert_eq!(
7060 search(
7061 &project,
7062 SearchQuery::text(
7063 search_query,
7064 false,
7065 true,
7066 false,
7067 Default::default(),
7068 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7069 .unwrap(),
7070 false,
7071 None,
7072 )
7073 .unwrap(),
7074 cx
7075 )
7076 .await
7077 .unwrap(),
7078 HashMap::from_iter([
7079 (path!("dir/one.rs").to_string(), vec![8..12]),
7080 (path!("dir/two.rs").to_string(), vec![8..12]),
7081 ]),
7082 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7083 );
7084
7085 assert!(
7086 search(
7087 &project,
7088 SearchQuery::text(
7089 search_query,
7090 false,
7091 true,
7092 false,
7093 Default::default(),
7094 PathMatcher::new(
7095 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7096 PathStyle::local(),
7097 )
7098 .unwrap(),
7099 false,
7100 None,
7101 )
7102 .unwrap(),
7103 cx
7104 )
7105 .await
7106 .unwrap()
7107 .is_empty(),
7108 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7109 );
7110}
7111
7112#[gpui::test]
7113async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7114 init_test(cx);
7115
7116 let search_query = "file";
7117
7118 let fs = FakeFs::new(cx.executor());
7119 fs.insert_tree(
7120 path!("/dir"),
7121 json!({
7122 "one.rs": r#"// Rust file one"#,
7123 "one.ts": r#"// TypeScript file one"#,
7124 "two.rs": r#"// Rust file two"#,
7125 "two.ts": r#"// TypeScript file two"#,
7126 }),
7127 )
7128 .await;
7129
7130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7131 let path_style = PathStyle::local();
7132 let _buffer = project.update(cx, |project, cx| {
7133 project.create_local_buffer("file", None, false, cx)
7134 });
7135
7136 assert_eq!(
7137 search(
7138 &project,
7139 SearchQuery::text(
7140 search_query,
7141 false,
7142 true,
7143 false,
7144 Default::default(),
7145 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7146 false,
7147 None,
7148 )
7149 .unwrap(),
7150 cx
7151 )
7152 .await
7153 .unwrap(),
7154 HashMap::from_iter([
7155 (path!("dir/one.rs").to_string(), vec![8..12]),
7156 (path!("dir/one.ts").to_string(), vec![14..18]),
7157 (path!("dir/two.rs").to_string(), vec![8..12]),
7158 (path!("dir/two.ts").to_string(), vec![14..18]),
7159 ]),
7160 "If no exclusions match, all files should be returned"
7161 );
7162
7163 assert_eq!(
7164 search(
7165 &project,
7166 SearchQuery::text(
7167 search_query,
7168 false,
7169 true,
7170 false,
7171 Default::default(),
7172 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7173 false,
7174 None,
7175 )
7176 .unwrap(),
7177 cx
7178 )
7179 .await
7180 .unwrap(),
7181 HashMap::from_iter([
7182 (path!("dir/one.ts").to_string(), vec![14..18]),
7183 (path!("dir/two.ts").to_string(), vec![14..18]),
7184 ]),
7185 "Rust exclusion search should give only TypeScript files"
7186 );
7187
7188 assert_eq!(
7189 search(
7190 &project,
7191 SearchQuery::text(
7192 search_query,
7193 false,
7194 true,
7195 false,
7196 Default::default(),
7197 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7198 false,
7199 None,
7200 )
7201 .unwrap(),
7202 cx
7203 )
7204 .await
7205 .unwrap(),
7206 HashMap::from_iter([
7207 (path!("dir/one.rs").to_string(), vec![8..12]),
7208 (path!("dir/two.rs").to_string(), vec![8..12]),
7209 ]),
7210 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7211 );
7212
7213 assert!(
7214 search(
7215 &project,
7216 SearchQuery::text(
7217 search_query,
7218 false,
7219 true,
7220 false,
7221 Default::default(),
7222 PathMatcher::new(
7223 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7224 PathStyle::local(),
7225 )
7226 .unwrap(),
7227 false,
7228 None,
7229 )
7230 .unwrap(),
7231 cx
7232 )
7233 .await
7234 .unwrap()
7235 .is_empty(),
7236 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7237 );
7238}
7239
7240#[gpui::test]
7241async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7242 init_test(cx);
7243
7244 let search_query = "file";
7245
7246 let fs = FakeFs::new(cx.executor());
7247 fs.insert_tree(
7248 path!("/dir"),
7249 json!({
7250 "one.rs": r#"// Rust file one"#,
7251 "one.ts": r#"// TypeScript file one"#,
7252 "two.rs": r#"// Rust file two"#,
7253 "two.ts": r#"// TypeScript file two"#,
7254 }),
7255 )
7256 .await;
7257 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7258 assert!(
7259 search(
7260 &project,
7261 SearchQuery::text(
7262 search_query,
7263 false,
7264 true,
7265 false,
7266 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7267 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7268 false,
7269 None,
7270 )
7271 .unwrap(),
7272 cx
7273 )
7274 .await
7275 .unwrap()
7276 .is_empty(),
7277 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7278 );
7279
7280 assert!(
7281 search(
7282 &project,
7283 SearchQuery::text(
7284 search_query,
7285 false,
7286 true,
7287 false,
7288 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7289 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7290 false,
7291 None,
7292 )
7293 .unwrap(),
7294 cx
7295 )
7296 .await
7297 .unwrap()
7298 .is_empty(),
7299 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7300 );
7301
7302 assert!(
7303 search(
7304 &project,
7305 SearchQuery::text(
7306 search_query,
7307 false,
7308 true,
7309 false,
7310 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7311 .unwrap(),
7312 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7313 .unwrap(),
7314 false,
7315 None,
7316 )
7317 .unwrap(),
7318 cx
7319 )
7320 .await
7321 .unwrap()
7322 .is_empty(),
7323 "Non-matching inclusions and exclusions should not change that."
7324 );
7325
7326 assert_eq!(
7327 search(
7328 &project,
7329 SearchQuery::text(
7330 search_query,
7331 false,
7332 true,
7333 false,
7334 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7335 .unwrap(),
7336 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7337 .unwrap(),
7338 false,
7339 None,
7340 )
7341 .unwrap(),
7342 cx
7343 )
7344 .await
7345 .unwrap(),
7346 HashMap::from_iter([
7347 (path!("dir/one.ts").to_string(), vec![14..18]),
7348 (path!("dir/two.ts").to_string(), vec![14..18]),
7349 ]),
7350 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7351 );
7352}
7353
7354#[gpui::test]
7355async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7356 init_test(cx);
7357
7358 let fs = FakeFs::new(cx.executor());
7359 fs.insert_tree(
7360 path!("/worktree-a"),
7361 json!({
7362 "haystack.rs": r#"// NEEDLE"#,
7363 "haystack.ts": r#"// NEEDLE"#,
7364 }),
7365 )
7366 .await;
7367 fs.insert_tree(
7368 path!("/worktree-b"),
7369 json!({
7370 "haystack.rs": r#"// NEEDLE"#,
7371 "haystack.ts": r#"// NEEDLE"#,
7372 }),
7373 )
7374 .await;
7375
7376 let path_style = PathStyle::local();
7377 let project = Project::test(
7378 fs.clone(),
7379 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7380 cx,
7381 )
7382 .await;
7383
7384 assert_eq!(
7385 search(
7386 &project,
7387 SearchQuery::text(
7388 "NEEDLE",
7389 false,
7390 true,
7391 false,
7392 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7393 Default::default(),
7394 true,
7395 None,
7396 )
7397 .unwrap(),
7398 cx
7399 )
7400 .await
7401 .unwrap(),
7402 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7403 "should only return results from included worktree"
7404 );
7405 assert_eq!(
7406 search(
7407 &project,
7408 SearchQuery::text(
7409 "NEEDLE",
7410 false,
7411 true,
7412 false,
7413 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7414 Default::default(),
7415 true,
7416 None,
7417 )
7418 .unwrap(),
7419 cx
7420 )
7421 .await
7422 .unwrap(),
7423 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7424 "should only return results from included worktree"
7425 );
7426
7427 assert_eq!(
7428 search(
7429 &project,
7430 SearchQuery::text(
7431 "NEEDLE",
7432 false,
7433 true,
7434 false,
7435 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7436 Default::default(),
7437 false,
7438 None,
7439 )
7440 .unwrap(),
7441 cx
7442 )
7443 .await
7444 .unwrap(),
7445 HashMap::from_iter([
7446 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7447 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7448 ]),
7449 "should return results from both worktrees"
7450 );
7451}
7452
7453#[gpui::test]
7454async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7455 init_test(cx);
7456
7457 let fs = FakeFs::new(cx.background_executor.clone());
7458 fs.insert_tree(
7459 path!("/dir"),
7460 json!({
7461 ".git": {},
7462 ".gitignore": "**/target\n/node_modules\n",
7463 "target": {
7464 "index.txt": "index_key:index_value"
7465 },
7466 "node_modules": {
7467 "eslint": {
7468 "index.ts": "const eslint_key = 'eslint value'",
7469 "package.json": r#"{ "some_key": "some value" }"#,
7470 },
7471 "prettier": {
7472 "index.ts": "const prettier_key = 'prettier value'",
7473 "package.json": r#"{ "other_key": "other value" }"#,
7474 },
7475 },
7476 "package.json": r#"{ "main_key": "main value" }"#,
7477 }),
7478 )
7479 .await;
7480 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7481
7482 let query = "key";
7483 assert_eq!(
7484 search(
7485 &project,
7486 SearchQuery::text(
7487 query,
7488 false,
7489 false,
7490 false,
7491 Default::default(),
7492 Default::default(),
7493 false,
7494 None,
7495 )
7496 .unwrap(),
7497 cx
7498 )
7499 .await
7500 .unwrap(),
7501 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7502 "Only one non-ignored file should have the query"
7503 );
7504
7505 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7506 let path_style = PathStyle::local();
7507 assert_eq!(
7508 search(
7509 &project,
7510 SearchQuery::text(
7511 query,
7512 false,
7513 false,
7514 true,
7515 Default::default(),
7516 Default::default(),
7517 false,
7518 None,
7519 )
7520 .unwrap(),
7521 cx
7522 )
7523 .await
7524 .unwrap(),
7525 HashMap::from_iter([
7526 (path!("dir/package.json").to_string(), vec![8..11]),
7527 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7528 (
7529 path!("dir/node_modules/prettier/package.json").to_string(),
7530 vec![9..12]
7531 ),
7532 (
7533 path!("dir/node_modules/prettier/index.ts").to_string(),
7534 vec![15..18]
7535 ),
7536 (
7537 path!("dir/node_modules/eslint/index.ts").to_string(),
7538 vec![13..16]
7539 ),
7540 (
7541 path!("dir/node_modules/eslint/package.json").to_string(),
7542 vec![8..11]
7543 ),
7544 ]),
7545 "Unrestricted search with ignored directories should find every file with the query"
7546 );
7547
7548 let files_to_include =
7549 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7550 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7551 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7552 assert_eq!(
7553 search(
7554 &project,
7555 SearchQuery::text(
7556 query,
7557 false,
7558 false,
7559 true,
7560 files_to_include,
7561 files_to_exclude,
7562 false,
7563 None,
7564 )
7565 .unwrap(),
7566 cx
7567 )
7568 .await
7569 .unwrap(),
7570 HashMap::from_iter([(
7571 path!("dir/node_modules/prettier/package.json").to_string(),
7572 vec![9..12]
7573 )]),
7574 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7575 );
7576}
7577
7578#[gpui::test]
7579async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7580 init_test(cx);
7581
7582 let fs = FakeFs::new(cx.executor());
7583 fs.insert_tree(
7584 path!("/dir"),
7585 json!({
7586 "one.rs": "// ПРИВЕТ? привет!",
7587 "two.rs": "// ПРИВЕТ.",
7588 "three.rs": "// привет",
7589 }),
7590 )
7591 .await;
7592 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7593 let unicode_case_sensitive_query = SearchQuery::text(
7594 "привет",
7595 false,
7596 true,
7597 false,
7598 Default::default(),
7599 Default::default(),
7600 false,
7601 None,
7602 );
7603 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7604 assert_eq!(
7605 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7606 .await
7607 .unwrap(),
7608 HashMap::from_iter([
7609 (path!("dir/one.rs").to_string(), vec![17..29]),
7610 (path!("dir/three.rs").to_string(), vec![3..15]),
7611 ])
7612 );
7613
7614 let unicode_case_insensitive_query = SearchQuery::text(
7615 "привет",
7616 false,
7617 false,
7618 false,
7619 Default::default(),
7620 Default::default(),
7621 false,
7622 None,
7623 );
7624 assert_matches!(
7625 unicode_case_insensitive_query,
7626 Ok(SearchQuery::Regex { .. })
7627 );
7628 assert_eq!(
7629 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7630 .await
7631 .unwrap(),
7632 HashMap::from_iter([
7633 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7634 (path!("dir/two.rs").to_string(), vec![3..15]),
7635 (path!("dir/three.rs").to_string(), vec![3..15]),
7636 ])
7637 );
7638
7639 assert_eq!(
7640 search(
7641 &project,
7642 SearchQuery::text(
7643 "привет.",
7644 false,
7645 false,
7646 false,
7647 Default::default(),
7648 Default::default(),
7649 false,
7650 None,
7651 )
7652 .unwrap(),
7653 cx
7654 )
7655 .await
7656 .unwrap(),
7657 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7658 );
7659}
7660
7661#[gpui::test]
7662async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7663 init_test(cx);
7664
7665 let fs = FakeFs::new(cx.executor());
7666 fs.insert_tree(
7667 "/one/two",
7668 json!({
7669 "three": {
7670 "a.txt": "",
7671 "four": {}
7672 },
7673 "c.rs": ""
7674 }),
7675 )
7676 .await;
7677
7678 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7679 project
7680 .update(cx, |project, cx| {
7681 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7682 project.create_entry((id, rel_path("b..")), true, cx)
7683 })
7684 .await
7685 .unwrap()
7686 .into_included()
7687 .unwrap();
7688
7689 assert_eq!(
7690 fs.paths(true),
7691 vec![
7692 PathBuf::from(path!("/")),
7693 PathBuf::from(path!("/one")),
7694 PathBuf::from(path!("/one/two")),
7695 PathBuf::from(path!("/one/two/c.rs")),
7696 PathBuf::from(path!("/one/two/three")),
7697 PathBuf::from(path!("/one/two/three/a.txt")),
7698 PathBuf::from(path!("/one/two/three/b..")),
7699 PathBuf::from(path!("/one/two/three/four")),
7700 ]
7701 );
7702}
7703
7704#[gpui::test]
7705async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7706 init_test(cx);
7707
7708 let fs = FakeFs::new(cx.executor());
7709 fs.insert_tree(
7710 path!("/dir"),
7711 json!({
7712 "a.tsx": "a",
7713 }),
7714 )
7715 .await;
7716
7717 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7718
7719 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7720 language_registry.add(tsx_lang());
7721 let language_server_names = [
7722 "TypeScriptServer",
7723 "TailwindServer",
7724 "ESLintServer",
7725 "NoHoverCapabilitiesServer",
7726 ];
7727 let mut language_servers = [
7728 language_registry.register_fake_lsp(
7729 "tsx",
7730 FakeLspAdapter {
7731 name: language_server_names[0],
7732 capabilities: lsp::ServerCapabilities {
7733 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7734 ..lsp::ServerCapabilities::default()
7735 },
7736 ..FakeLspAdapter::default()
7737 },
7738 ),
7739 language_registry.register_fake_lsp(
7740 "tsx",
7741 FakeLspAdapter {
7742 name: language_server_names[1],
7743 capabilities: lsp::ServerCapabilities {
7744 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7745 ..lsp::ServerCapabilities::default()
7746 },
7747 ..FakeLspAdapter::default()
7748 },
7749 ),
7750 language_registry.register_fake_lsp(
7751 "tsx",
7752 FakeLspAdapter {
7753 name: language_server_names[2],
7754 capabilities: lsp::ServerCapabilities {
7755 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7756 ..lsp::ServerCapabilities::default()
7757 },
7758 ..FakeLspAdapter::default()
7759 },
7760 ),
7761 language_registry.register_fake_lsp(
7762 "tsx",
7763 FakeLspAdapter {
7764 name: language_server_names[3],
7765 capabilities: lsp::ServerCapabilities {
7766 hover_provider: None,
7767 ..lsp::ServerCapabilities::default()
7768 },
7769 ..FakeLspAdapter::default()
7770 },
7771 ),
7772 ];
7773
7774 let (buffer, _handle) = project
7775 .update(cx, |p, cx| {
7776 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7777 })
7778 .await
7779 .unwrap();
7780 cx.executor().run_until_parked();
7781
7782 let mut servers_with_hover_requests = HashMap::default();
7783 for i in 0..language_server_names.len() {
7784 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7785 panic!(
7786 "Failed to get language server #{i} with name {}",
7787 &language_server_names[i]
7788 )
7789 });
7790 let new_server_name = new_server.server.name();
7791 assert!(
7792 !servers_with_hover_requests.contains_key(&new_server_name),
7793 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7794 );
7795 match new_server_name.as_ref() {
7796 "TailwindServer" | "TypeScriptServer" => {
7797 servers_with_hover_requests.insert(
7798 new_server_name.clone(),
7799 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7800 move |_, _| {
7801 let name = new_server_name.clone();
7802 async move {
7803 Ok(Some(lsp::Hover {
7804 contents: lsp::HoverContents::Scalar(
7805 lsp::MarkedString::String(format!("{name} hover")),
7806 ),
7807 range: None,
7808 }))
7809 }
7810 },
7811 ),
7812 );
7813 }
7814 "ESLintServer" => {
7815 servers_with_hover_requests.insert(
7816 new_server_name,
7817 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7818 |_, _| async move { Ok(None) },
7819 ),
7820 );
7821 }
7822 "NoHoverCapabilitiesServer" => {
7823 let _never_handled = new_server
7824 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7825 panic!(
7826 "Should not call for hovers server with no corresponding capabilities"
7827 )
7828 });
7829 }
7830 unexpected => panic!("Unexpected server name: {unexpected}"),
7831 }
7832 }
7833
7834 let hover_task = project.update(cx, |project, cx| {
7835 project.hover(&buffer, Point::new(0, 0), cx)
7836 });
7837 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7838 |mut hover_request| async move {
7839 hover_request
7840 .next()
7841 .await
7842 .expect("All hover requests should have been triggered")
7843 },
7844 ))
7845 .await;
7846 assert_eq!(
7847 vec!["TailwindServer hover", "TypeScriptServer hover"],
7848 hover_task
7849 .await
7850 .into_iter()
7851 .flatten()
7852 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7853 .sorted()
7854 .collect::<Vec<_>>(),
7855 "Should receive hover responses from all related servers with hover capabilities"
7856 );
7857}
7858
7859#[gpui::test]
7860async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7861 init_test(cx);
7862
7863 let fs = FakeFs::new(cx.executor());
7864 fs.insert_tree(
7865 path!("/dir"),
7866 json!({
7867 "a.ts": "a",
7868 }),
7869 )
7870 .await;
7871
7872 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7873
7874 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7875 language_registry.add(typescript_lang());
7876 let mut fake_language_servers = language_registry.register_fake_lsp(
7877 "TypeScript",
7878 FakeLspAdapter {
7879 capabilities: lsp::ServerCapabilities {
7880 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7881 ..lsp::ServerCapabilities::default()
7882 },
7883 ..FakeLspAdapter::default()
7884 },
7885 );
7886
7887 let (buffer, _handle) = project
7888 .update(cx, |p, cx| {
7889 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7890 })
7891 .await
7892 .unwrap();
7893 cx.executor().run_until_parked();
7894
7895 let fake_server = fake_language_servers
7896 .next()
7897 .await
7898 .expect("failed to get the language server");
7899
7900 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7901 move |_, _| async move {
7902 Ok(Some(lsp::Hover {
7903 contents: lsp::HoverContents::Array(vec![
7904 lsp::MarkedString::String("".to_string()),
7905 lsp::MarkedString::String(" ".to_string()),
7906 lsp::MarkedString::String("\n\n\n".to_string()),
7907 ]),
7908 range: None,
7909 }))
7910 },
7911 );
7912
7913 let hover_task = project.update(cx, |project, cx| {
7914 project.hover(&buffer, Point::new(0, 0), cx)
7915 });
7916 let () = request_handled
7917 .next()
7918 .await
7919 .expect("All hover requests should have been triggered");
7920 assert_eq!(
7921 Vec::<String>::new(),
7922 hover_task
7923 .await
7924 .into_iter()
7925 .flatten()
7926 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7927 .sorted()
7928 .collect::<Vec<_>>(),
7929 "Empty hover parts should be ignored"
7930 );
7931}
7932
7933#[gpui::test]
7934async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7935 init_test(cx);
7936
7937 let fs = FakeFs::new(cx.executor());
7938 fs.insert_tree(
7939 path!("/dir"),
7940 json!({
7941 "a.ts": "a",
7942 }),
7943 )
7944 .await;
7945
7946 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7947
7948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7949 language_registry.add(typescript_lang());
7950 let mut fake_language_servers = language_registry.register_fake_lsp(
7951 "TypeScript",
7952 FakeLspAdapter {
7953 capabilities: lsp::ServerCapabilities {
7954 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7955 ..lsp::ServerCapabilities::default()
7956 },
7957 ..FakeLspAdapter::default()
7958 },
7959 );
7960
7961 let (buffer, _handle) = project
7962 .update(cx, |p, cx| {
7963 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7964 })
7965 .await
7966 .unwrap();
7967 cx.executor().run_until_parked();
7968
7969 let fake_server = fake_language_servers
7970 .next()
7971 .await
7972 .expect("failed to get the language server");
7973
7974 let mut request_handled = fake_server
7975 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7976 Ok(Some(vec![
7977 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7978 title: "organize imports".to_string(),
7979 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7980 ..lsp::CodeAction::default()
7981 }),
7982 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7983 title: "fix code".to_string(),
7984 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7985 ..lsp::CodeAction::default()
7986 }),
7987 ]))
7988 });
7989
7990 let code_actions_task = project.update(cx, |project, cx| {
7991 project.code_actions(
7992 &buffer,
7993 0..buffer.read(cx).len(),
7994 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7995 cx,
7996 )
7997 });
7998
7999 let () = request_handled
8000 .next()
8001 .await
8002 .expect("The code action request should have been triggered");
8003
8004 let code_actions = code_actions_task.await.unwrap().unwrap();
8005 assert_eq!(code_actions.len(), 1);
8006 assert_eq!(
8007 code_actions[0].lsp_action.action_kind(),
8008 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8009 );
8010}
8011
8012#[gpui::test]
8013async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8014 cx: &mut gpui::TestAppContext,
8015) {
8016 init_test(cx);
8017
8018 let fs = FakeFs::new(cx.executor());
8019 fs.insert_tree(
8020 path!("/dir"),
8021 json!({
8022 "a.ts": "a",
8023 }),
8024 )
8025 .await;
8026
8027 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8028
8029 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8030 language_registry.add(typescript_lang());
8031 let mut fake_language_servers = language_registry.register_fake_lsp(
8032 "TypeScript",
8033 FakeLspAdapter {
8034 capabilities: lsp::ServerCapabilities {
8035 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8036 lsp::CodeActionOptions {
8037 code_action_kinds: Some(vec![
8038 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8039 "source.doc".into(),
8040 ]),
8041 ..lsp::CodeActionOptions::default()
8042 },
8043 )),
8044 ..lsp::ServerCapabilities::default()
8045 },
8046 ..FakeLspAdapter::default()
8047 },
8048 );
8049
8050 let (buffer, _handle) = project
8051 .update(cx, |p, cx| {
8052 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8053 })
8054 .await
8055 .unwrap();
8056 cx.executor().run_until_parked();
8057
8058 let fake_server = fake_language_servers
8059 .next()
8060 .await
8061 .expect("failed to get the language server");
8062
8063 let mut request_handled = fake_server.set_request_handler::<
8064 lsp::request::CodeActionRequest,
8065 _,
8066 _,
8067 >(move |params, _| async move {
8068 assert_eq!(
8069 params.context.only, None,
8070 "Code action requests without explicit kind filters should not send `context.only`"
8071 );
8072 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8073 lsp::CodeAction {
8074 title: "Add test".to_string(),
8075 kind: Some("source.addTest".into()),
8076 ..lsp::CodeAction::default()
8077 },
8078 )]))
8079 });
8080
8081 let code_actions_task = project.update(cx, |project, cx| {
8082 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8083 });
8084
8085 let () = request_handled
8086 .next()
8087 .await
8088 .expect("The code action request should have been triggered");
8089
8090 let code_actions = code_actions_task.await.unwrap().unwrap();
8091 assert_eq!(code_actions.len(), 1);
8092 assert_eq!(
8093 code_actions[0].lsp_action.action_kind(),
8094 Some("source.addTest".into())
8095 );
8096}
8097
8098#[gpui::test]
8099async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8100 init_test(cx);
8101
8102 let fs = FakeFs::new(cx.executor());
8103 fs.insert_tree(
8104 path!("/dir"),
8105 json!({
8106 "a.tsx": "a",
8107 }),
8108 )
8109 .await;
8110
8111 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8112
8113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8114 language_registry.add(tsx_lang());
8115 let language_server_names = [
8116 "TypeScriptServer",
8117 "TailwindServer",
8118 "ESLintServer",
8119 "NoActionsCapabilitiesServer",
8120 ];
8121
8122 let mut language_server_rxs = [
8123 language_registry.register_fake_lsp(
8124 "tsx",
8125 FakeLspAdapter {
8126 name: language_server_names[0],
8127 capabilities: lsp::ServerCapabilities {
8128 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8129 ..lsp::ServerCapabilities::default()
8130 },
8131 ..FakeLspAdapter::default()
8132 },
8133 ),
8134 language_registry.register_fake_lsp(
8135 "tsx",
8136 FakeLspAdapter {
8137 name: language_server_names[1],
8138 capabilities: lsp::ServerCapabilities {
8139 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8140 ..lsp::ServerCapabilities::default()
8141 },
8142 ..FakeLspAdapter::default()
8143 },
8144 ),
8145 language_registry.register_fake_lsp(
8146 "tsx",
8147 FakeLspAdapter {
8148 name: language_server_names[2],
8149 capabilities: lsp::ServerCapabilities {
8150 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8151 ..lsp::ServerCapabilities::default()
8152 },
8153 ..FakeLspAdapter::default()
8154 },
8155 ),
8156 language_registry.register_fake_lsp(
8157 "tsx",
8158 FakeLspAdapter {
8159 name: language_server_names[3],
8160 capabilities: lsp::ServerCapabilities {
8161 code_action_provider: None,
8162 ..lsp::ServerCapabilities::default()
8163 },
8164 ..FakeLspAdapter::default()
8165 },
8166 ),
8167 ];
8168
8169 let (buffer, _handle) = project
8170 .update(cx, |p, cx| {
8171 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8172 })
8173 .await
8174 .unwrap();
8175 cx.executor().run_until_parked();
8176
8177 let mut servers_with_actions_requests = HashMap::default();
8178 for i in 0..language_server_names.len() {
8179 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8180 panic!(
8181 "Failed to get language server #{i} with name {}",
8182 &language_server_names[i]
8183 )
8184 });
8185 let new_server_name = new_server.server.name();
8186
8187 assert!(
8188 !servers_with_actions_requests.contains_key(&new_server_name),
8189 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8190 );
8191 match new_server_name.0.as_ref() {
8192 "TailwindServer" | "TypeScriptServer" => {
8193 servers_with_actions_requests.insert(
8194 new_server_name.clone(),
8195 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8196 move |_, _| {
8197 let name = new_server_name.clone();
8198 async move {
8199 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8200 lsp::CodeAction {
8201 title: format!("{name} code action"),
8202 ..lsp::CodeAction::default()
8203 },
8204 )]))
8205 }
8206 },
8207 ),
8208 );
8209 }
8210 "ESLintServer" => {
8211 servers_with_actions_requests.insert(
8212 new_server_name,
8213 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8214 |_, _| async move { Ok(None) },
8215 ),
8216 );
8217 }
8218 "NoActionsCapabilitiesServer" => {
8219 let _never_handled = new_server
8220 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8221 panic!(
8222 "Should not call for code actions server with no corresponding capabilities"
8223 )
8224 });
8225 }
8226 unexpected => panic!("Unexpected server name: {unexpected}"),
8227 }
8228 }
8229
8230 let code_actions_task = project.update(cx, |project, cx| {
8231 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8232 });
8233
8234 // cx.run_until_parked();
8235 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8236 |mut code_actions_request| async move {
8237 code_actions_request
8238 .next()
8239 .await
8240 .expect("All code actions requests should have been triggered")
8241 },
8242 ))
8243 .await;
8244 assert_eq!(
8245 vec!["TailwindServer code action", "TypeScriptServer code action"],
8246 code_actions_task
8247 .await
8248 .unwrap()
8249 .unwrap()
8250 .into_iter()
8251 .map(|code_action| code_action.lsp_action.title().to_owned())
8252 .sorted()
8253 .collect::<Vec<_>>(),
8254 "Should receive code actions responses from all related servers with hover capabilities"
8255 );
8256}
8257
8258#[gpui::test]
8259async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8260 init_test(cx);
8261
8262 let fs = FakeFs::new(cx.executor());
8263 fs.insert_tree(
8264 "/dir",
8265 json!({
8266 "a.rs": "let a = 1;",
8267 "b.rs": "let b = 2;",
8268 "c.rs": "let c = 2;",
8269 }),
8270 )
8271 .await;
8272
8273 let project = Project::test(
8274 fs,
8275 [
8276 "/dir/a.rs".as_ref(),
8277 "/dir/b.rs".as_ref(),
8278 "/dir/c.rs".as_ref(),
8279 ],
8280 cx,
8281 )
8282 .await;
8283
8284 // check the initial state and get the worktrees
8285 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8286 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8287 assert_eq!(worktrees.len(), 3);
8288
8289 let worktree_a = worktrees[0].read(cx);
8290 let worktree_b = worktrees[1].read(cx);
8291 let worktree_c = worktrees[2].read(cx);
8292
8293 // check they start in the right order
8294 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8295 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8296 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8297
8298 (
8299 worktrees[0].clone(),
8300 worktrees[1].clone(),
8301 worktrees[2].clone(),
8302 )
8303 });
8304
8305 // move first worktree to after the second
8306 // [a, b, c] -> [b, a, c]
8307 project
8308 .update(cx, |project, cx| {
8309 let first = worktree_a.read(cx);
8310 let second = worktree_b.read(cx);
8311 project.move_worktree(first.id(), second.id(), cx)
8312 })
8313 .expect("moving first after second");
8314
8315 // check the state after moving
8316 project.update(cx, |project, cx| {
8317 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8318 assert_eq!(worktrees.len(), 3);
8319
8320 let first = worktrees[0].read(cx);
8321 let second = worktrees[1].read(cx);
8322 let third = worktrees[2].read(cx);
8323
8324 // check they are now in the right order
8325 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8326 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8327 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8328 });
8329
8330 // move the second worktree to before the first
8331 // [b, a, c] -> [a, b, c]
8332 project
8333 .update(cx, |project, cx| {
8334 let second = worktree_a.read(cx);
8335 let first = worktree_b.read(cx);
8336 project.move_worktree(first.id(), second.id(), cx)
8337 })
8338 .expect("moving second before first");
8339
8340 // check the state after moving
8341 project.update(cx, |project, cx| {
8342 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8343 assert_eq!(worktrees.len(), 3);
8344
8345 let first = worktrees[0].read(cx);
8346 let second = worktrees[1].read(cx);
8347 let third = worktrees[2].read(cx);
8348
8349 // check they are now in the right order
8350 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8351 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8352 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8353 });
8354
8355 // move the second worktree to after the third
8356 // [a, b, c] -> [a, c, b]
8357 project
8358 .update(cx, |project, cx| {
8359 let second = worktree_b.read(cx);
8360 let third = worktree_c.read(cx);
8361 project.move_worktree(second.id(), third.id(), cx)
8362 })
8363 .expect("moving second after third");
8364
8365 // check the state after moving
8366 project.update(cx, |project, cx| {
8367 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8368 assert_eq!(worktrees.len(), 3);
8369
8370 let first = worktrees[0].read(cx);
8371 let second = worktrees[1].read(cx);
8372 let third = worktrees[2].read(cx);
8373
8374 // check they are now in the right order
8375 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8376 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8377 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8378 });
8379
8380 // move the third worktree to before the second
8381 // [a, c, b] -> [a, b, c]
8382 project
8383 .update(cx, |project, cx| {
8384 let third = worktree_c.read(cx);
8385 let second = worktree_b.read(cx);
8386 project.move_worktree(third.id(), second.id(), cx)
8387 })
8388 .expect("moving third before second");
8389
8390 // check the state after moving
8391 project.update(cx, |project, cx| {
8392 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8393 assert_eq!(worktrees.len(), 3);
8394
8395 let first = worktrees[0].read(cx);
8396 let second = worktrees[1].read(cx);
8397 let third = worktrees[2].read(cx);
8398
8399 // check they are now in the right order
8400 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8401 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8402 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8403 });
8404
8405 // move the first worktree to after the third
8406 // [a, b, c] -> [b, c, a]
8407 project
8408 .update(cx, |project, cx| {
8409 let first = worktree_a.read(cx);
8410 let third = worktree_c.read(cx);
8411 project.move_worktree(first.id(), third.id(), cx)
8412 })
8413 .expect("moving first after third");
8414
8415 // check the state after moving
8416 project.update(cx, |project, cx| {
8417 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8418 assert_eq!(worktrees.len(), 3);
8419
8420 let first = worktrees[0].read(cx);
8421 let second = worktrees[1].read(cx);
8422 let third = worktrees[2].read(cx);
8423
8424 // check they are now in the right order
8425 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8426 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8427 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8428 });
8429
8430 // move the third worktree to before the first
8431 // [b, c, a] -> [a, b, c]
8432 project
8433 .update(cx, |project, cx| {
8434 let third = worktree_a.read(cx);
8435 let first = worktree_b.read(cx);
8436 project.move_worktree(third.id(), first.id(), cx)
8437 })
8438 .expect("moving third before first");
8439
8440 // check the state after moving
8441 project.update(cx, |project, cx| {
8442 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8443 assert_eq!(worktrees.len(), 3);
8444
8445 let first = worktrees[0].read(cx);
8446 let second = worktrees[1].read(cx);
8447 let third = worktrees[2].read(cx);
8448
8449 // check they are now in the right order
8450 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8451 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8452 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8453 });
8454}
8455
8456#[gpui::test]
8457async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8458 init_test(cx);
8459
8460 let staged_contents = r#"
8461 fn main() {
8462 println!("hello world");
8463 }
8464 "#
8465 .unindent();
8466 let file_contents = r#"
8467 // print goodbye
8468 fn main() {
8469 println!("goodbye world");
8470 }
8471 "#
8472 .unindent();
8473
8474 let fs = FakeFs::new(cx.background_executor.clone());
8475 fs.insert_tree(
8476 "/dir",
8477 json!({
8478 ".git": {},
8479 "src": {
8480 "main.rs": file_contents,
8481 }
8482 }),
8483 )
8484 .await;
8485
8486 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8487
8488 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8489
8490 let buffer = project
8491 .update(cx, |project, cx| {
8492 project.open_local_buffer("/dir/src/main.rs", cx)
8493 })
8494 .await
8495 .unwrap();
8496 let unstaged_diff = project
8497 .update(cx, |project, cx| {
8498 project.open_unstaged_diff(buffer.clone(), cx)
8499 })
8500 .await
8501 .unwrap();
8502
8503 cx.run_until_parked();
8504 unstaged_diff.update(cx, |unstaged_diff, cx| {
8505 let snapshot = buffer.read(cx).snapshot();
8506 assert_hunks(
8507 unstaged_diff.snapshot(cx).hunks(&snapshot),
8508 &snapshot,
8509 &unstaged_diff.base_text_string(cx).unwrap(),
8510 &[
8511 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8512 (
8513 2..3,
8514 " println!(\"hello world\");\n",
8515 " println!(\"goodbye world\");\n",
8516 DiffHunkStatus::modified_none(),
8517 ),
8518 ],
8519 );
8520 });
8521
8522 let staged_contents = r#"
8523 // print goodbye
8524 fn main() {
8525 }
8526 "#
8527 .unindent();
8528
8529 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8530
8531 cx.run_until_parked();
8532 unstaged_diff.update(cx, |unstaged_diff, cx| {
8533 let snapshot = buffer.read(cx).snapshot();
8534 assert_hunks(
8535 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8536 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8537 &snapshot,
8538 ),
8539 &snapshot,
8540 &unstaged_diff.base_text(cx).text(),
8541 &[(
8542 2..3,
8543 "",
8544 " println!(\"goodbye world\");\n",
8545 DiffHunkStatus::added_none(),
8546 )],
8547 );
8548 });
8549}
8550
8551#[gpui::test]
8552async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8553 init_test(cx);
8554
8555 let committed_contents = r#"
8556 fn main() {
8557 println!("hello world");
8558 }
8559 "#
8560 .unindent();
8561 let staged_contents = r#"
8562 fn main() {
8563 println!("goodbye world");
8564 }
8565 "#
8566 .unindent();
8567 let file_contents = r#"
8568 // print goodbye
8569 fn main() {
8570 println!("goodbye world");
8571 }
8572 "#
8573 .unindent();
8574
8575 let fs = FakeFs::new(cx.background_executor.clone());
8576 fs.insert_tree(
8577 "/dir",
8578 json!({
8579 ".git": {},
8580 "src": {
8581 "modification.rs": file_contents,
8582 }
8583 }),
8584 )
8585 .await;
8586
8587 fs.set_head_for_repo(
8588 Path::new("/dir/.git"),
8589 &[
8590 ("src/modification.rs", committed_contents),
8591 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8592 ],
8593 "deadbeef",
8594 );
8595 fs.set_index_for_repo(
8596 Path::new("/dir/.git"),
8597 &[
8598 ("src/modification.rs", staged_contents),
8599 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8600 ],
8601 );
8602
8603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8604 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8605 let language = rust_lang();
8606 language_registry.add(language.clone());
8607
8608 let buffer_1 = project
8609 .update(cx, |project, cx| {
8610 project.open_local_buffer("/dir/src/modification.rs", cx)
8611 })
8612 .await
8613 .unwrap();
8614 let diff_1 = project
8615 .update(cx, |project, cx| {
8616 project.open_uncommitted_diff(buffer_1.clone(), cx)
8617 })
8618 .await
8619 .unwrap();
8620 diff_1.read_with(cx, |diff, cx| {
8621 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8622 });
8623 cx.run_until_parked();
8624 diff_1.update(cx, |diff, cx| {
8625 let snapshot = buffer_1.read(cx).snapshot();
8626 assert_hunks(
8627 diff.snapshot(cx).hunks_intersecting_range(
8628 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8629 &snapshot,
8630 ),
8631 &snapshot,
8632 &diff.base_text_string(cx).unwrap(),
8633 &[
8634 (
8635 0..1,
8636 "",
8637 "// print goodbye\n",
8638 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8639 ),
8640 (
8641 2..3,
8642 " println!(\"hello world\");\n",
8643 " println!(\"goodbye world\");\n",
8644 DiffHunkStatus::modified_none(),
8645 ),
8646 ],
8647 );
8648 });
8649
8650 // Reset HEAD to a version that differs from both the buffer and the index.
8651 let committed_contents = r#"
8652 // print goodbye
8653 fn main() {
8654 }
8655 "#
8656 .unindent();
8657 fs.set_head_for_repo(
8658 Path::new("/dir/.git"),
8659 &[
8660 ("src/modification.rs", committed_contents.clone()),
8661 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8662 ],
8663 "deadbeef",
8664 );
8665
8666 // Buffer now has an unstaged hunk.
8667 cx.run_until_parked();
8668 diff_1.update(cx, |diff, cx| {
8669 let snapshot = buffer_1.read(cx).snapshot();
8670 assert_hunks(
8671 diff.snapshot(cx).hunks_intersecting_range(
8672 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8673 &snapshot,
8674 ),
8675 &snapshot,
8676 &diff.base_text(cx).text(),
8677 &[(
8678 2..3,
8679 "",
8680 " println!(\"goodbye world\");\n",
8681 DiffHunkStatus::added_none(),
8682 )],
8683 );
8684 });
8685
8686 // Open a buffer for a file that's been deleted.
8687 let buffer_2 = project
8688 .update(cx, |project, cx| {
8689 project.open_local_buffer("/dir/src/deletion.rs", cx)
8690 })
8691 .await
8692 .unwrap();
8693 let diff_2 = project
8694 .update(cx, |project, cx| {
8695 project.open_uncommitted_diff(buffer_2.clone(), cx)
8696 })
8697 .await
8698 .unwrap();
8699 cx.run_until_parked();
8700 diff_2.update(cx, |diff, cx| {
8701 let snapshot = buffer_2.read(cx).snapshot();
8702 assert_hunks(
8703 diff.snapshot(cx).hunks_intersecting_range(
8704 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8705 &snapshot,
8706 ),
8707 &snapshot,
8708 &diff.base_text_string(cx).unwrap(),
8709 &[(
8710 0..0,
8711 "// the-deleted-contents\n",
8712 "",
8713 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8714 )],
8715 );
8716 });
8717
8718 // Stage the deletion of this file
8719 fs.set_index_for_repo(
8720 Path::new("/dir/.git"),
8721 &[("src/modification.rs", committed_contents.clone())],
8722 );
8723 cx.run_until_parked();
8724 diff_2.update(cx, |diff, cx| {
8725 let snapshot = buffer_2.read(cx).snapshot();
8726 assert_hunks(
8727 diff.snapshot(cx).hunks_intersecting_range(
8728 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8729 &snapshot,
8730 ),
8731 &snapshot,
8732 &diff.base_text_string(cx).unwrap(),
8733 &[(
8734 0..0,
8735 "// the-deleted-contents\n",
8736 "",
8737 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8738 )],
8739 );
8740 });
8741}
8742
8743#[gpui::test]
8744async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8745 use DiffHunkSecondaryStatus::*;
8746 init_test(cx);
8747
8748 let committed_contents = r#"
8749 zero
8750 one
8751 two
8752 three
8753 four
8754 five
8755 "#
8756 .unindent();
8757 let file_contents = r#"
8758 one
8759 TWO
8760 three
8761 FOUR
8762 five
8763 "#
8764 .unindent();
8765
8766 let fs = FakeFs::new(cx.background_executor.clone());
8767 fs.insert_tree(
8768 "/dir",
8769 json!({
8770 ".git": {},
8771 "file.txt": file_contents.clone()
8772 }),
8773 )
8774 .await;
8775
8776 fs.set_head_and_index_for_repo(
8777 path!("/dir/.git").as_ref(),
8778 &[("file.txt", committed_contents.clone())],
8779 );
8780
8781 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8782
8783 let buffer = project
8784 .update(cx, |project, cx| {
8785 project.open_local_buffer("/dir/file.txt", cx)
8786 })
8787 .await
8788 .unwrap();
8789 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8790 let uncommitted_diff = project
8791 .update(cx, |project, cx| {
8792 project.open_uncommitted_diff(buffer.clone(), cx)
8793 })
8794 .await
8795 .unwrap();
8796 let mut diff_events = cx.events(&uncommitted_diff);
8797
8798 // The hunks are initially unstaged.
8799 uncommitted_diff.read_with(cx, |diff, cx| {
8800 assert_hunks(
8801 diff.snapshot(cx).hunks(&snapshot),
8802 &snapshot,
8803 &diff.base_text_string(cx).unwrap(),
8804 &[
8805 (
8806 0..0,
8807 "zero\n",
8808 "",
8809 DiffHunkStatus::deleted(HasSecondaryHunk),
8810 ),
8811 (
8812 1..2,
8813 "two\n",
8814 "TWO\n",
8815 DiffHunkStatus::modified(HasSecondaryHunk),
8816 ),
8817 (
8818 3..4,
8819 "four\n",
8820 "FOUR\n",
8821 DiffHunkStatus::modified(HasSecondaryHunk),
8822 ),
8823 ],
8824 );
8825 });
8826
8827 // Stage a hunk. It appears as optimistically staged.
8828 uncommitted_diff.update(cx, |diff, cx| {
8829 let range =
8830 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8831 let hunks = diff
8832 .snapshot(cx)
8833 .hunks_intersecting_range(range, &snapshot)
8834 .collect::<Vec<_>>();
8835 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8836
8837 assert_hunks(
8838 diff.snapshot(cx).hunks(&snapshot),
8839 &snapshot,
8840 &diff.base_text_string(cx).unwrap(),
8841 &[
8842 (
8843 0..0,
8844 "zero\n",
8845 "",
8846 DiffHunkStatus::deleted(HasSecondaryHunk),
8847 ),
8848 (
8849 1..2,
8850 "two\n",
8851 "TWO\n",
8852 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8853 ),
8854 (
8855 3..4,
8856 "four\n",
8857 "FOUR\n",
8858 DiffHunkStatus::modified(HasSecondaryHunk),
8859 ),
8860 ],
8861 );
8862 });
8863
8864 // The diff emits a change event for the range of the staged hunk.
8865 assert!(matches!(
8866 diff_events.next().await.unwrap(),
8867 BufferDiffEvent::HunksStagedOrUnstaged(_)
8868 ));
8869 let event = diff_events.next().await.unwrap();
8870 if let BufferDiffEvent::DiffChanged(DiffChanged {
8871 changed_range: Some(changed_range),
8872 base_text_changed_range: _,
8873 extended_range: _,
8874 }) = event
8875 {
8876 let changed_range = changed_range.to_point(&snapshot);
8877 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8878 } else {
8879 panic!("Unexpected event {event:?}");
8880 }
8881
8882 // When the write to the index completes, it appears as staged.
8883 cx.run_until_parked();
8884 uncommitted_diff.update(cx, |diff, cx| {
8885 assert_hunks(
8886 diff.snapshot(cx).hunks(&snapshot),
8887 &snapshot,
8888 &diff.base_text_string(cx).unwrap(),
8889 &[
8890 (
8891 0..0,
8892 "zero\n",
8893 "",
8894 DiffHunkStatus::deleted(HasSecondaryHunk),
8895 ),
8896 (
8897 1..2,
8898 "two\n",
8899 "TWO\n",
8900 DiffHunkStatus::modified(NoSecondaryHunk),
8901 ),
8902 (
8903 3..4,
8904 "four\n",
8905 "FOUR\n",
8906 DiffHunkStatus::modified(HasSecondaryHunk),
8907 ),
8908 ],
8909 );
8910 });
8911
8912 // The diff emits a change event for the changed index text.
8913 let event = diff_events.next().await.unwrap();
8914 if let BufferDiffEvent::DiffChanged(DiffChanged {
8915 changed_range: Some(changed_range),
8916 base_text_changed_range: _,
8917 extended_range: _,
8918 }) = event
8919 {
8920 let changed_range = changed_range.to_point(&snapshot);
8921 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8922 } else {
8923 panic!("Unexpected event {event:?}");
8924 }
8925
8926 // Simulate a problem writing to the git index.
8927 fs.set_error_message_for_index_write(
8928 "/dir/.git".as_ref(),
8929 Some("failed to write git index".into()),
8930 );
8931
8932 // Stage another hunk.
8933 uncommitted_diff.update(cx, |diff, cx| {
8934 let range =
8935 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8936 let hunks = diff
8937 .snapshot(cx)
8938 .hunks_intersecting_range(range, &snapshot)
8939 .collect::<Vec<_>>();
8940 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8941
8942 assert_hunks(
8943 diff.snapshot(cx).hunks(&snapshot),
8944 &snapshot,
8945 &diff.base_text_string(cx).unwrap(),
8946 &[
8947 (
8948 0..0,
8949 "zero\n",
8950 "",
8951 DiffHunkStatus::deleted(HasSecondaryHunk),
8952 ),
8953 (
8954 1..2,
8955 "two\n",
8956 "TWO\n",
8957 DiffHunkStatus::modified(NoSecondaryHunk),
8958 ),
8959 (
8960 3..4,
8961 "four\n",
8962 "FOUR\n",
8963 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8964 ),
8965 ],
8966 );
8967 });
8968 assert!(matches!(
8969 diff_events.next().await.unwrap(),
8970 BufferDiffEvent::HunksStagedOrUnstaged(_)
8971 ));
8972 let event = diff_events.next().await.unwrap();
8973 if let BufferDiffEvent::DiffChanged(DiffChanged {
8974 changed_range: Some(changed_range),
8975 base_text_changed_range: _,
8976 extended_range: _,
8977 }) = event
8978 {
8979 let changed_range = changed_range.to_point(&snapshot);
8980 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8981 } else {
8982 panic!("Unexpected event {event:?}");
8983 }
8984
8985 // When the write fails, the hunk returns to being unstaged.
8986 cx.run_until_parked();
8987 uncommitted_diff.update(cx, |diff, cx| {
8988 assert_hunks(
8989 diff.snapshot(cx).hunks(&snapshot),
8990 &snapshot,
8991 &diff.base_text_string(cx).unwrap(),
8992 &[
8993 (
8994 0..0,
8995 "zero\n",
8996 "",
8997 DiffHunkStatus::deleted(HasSecondaryHunk),
8998 ),
8999 (
9000 1..2,
9001 "two\n",
9002 "TWO\n",
9003 DiffHunkStatus::modified(NoSecondaryHunk),
9004 ),
9005 (
9006 3..4,
9007 "four\n",
9008 "FOUR\n",
9009 DiffHunkStatus::modified(HasSecondaryHunk),
9010 ),
9011 ],
9012 );
9013 });
9014
9015 let event = diff_events.next().await.unwrap();
9016 if let BufferDiffEvent::DiffChanged(DiffChanged {
9017 changed_range: Some(changed_range),
9018 base_text_changed_range: _,
9019 extended_range: _,
9020 }) = event
9021 {
9022 let changed_range = changed_range.to_point(&snapshot);
9023 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9024 } else {
9025 panic!("Unexpected event {event:?}");
9026 }
9027
9028 // Allow writing to the git index to succeed again.
9029 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9030
9031 // Stage two hunks with separate operations.
9032 uncommitted_diff.update(cx, |diff, cx| {
9033 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9034 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9035 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9036 });
9037
9038 // Both staged hunks appear as pending.
9039 uncommitted_diff.update(cx, |diff, cx| {
9040 assert_hunks(
9041 diff.snapshot(cx).hunks(&snapshot),
9042 &snapshot,
9043 &diff.base_text_string(cx).unwrap(),
9044 &[
9045 (
9046 0..0,
9047 "zero\n",
9048 "",
9049 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9050 ),
9051 (
9052 1..2,
9053 "two\n",
9054 "TWO\n",
9055 DiffHunkStatus::modified(NoSecondaryHunk),
9056 ),
9057 (
9058 3..4,
9059 "four\n",
9060 "FOUR\n",
9061 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9062 ),
9063 ],
9064 );
9065 });
9066
9067 // Both staging operations take effect.
9068 cx.run_until_parked();
9069 uncommitted_diff.update(cx, |diff, cx| {
9070 assert_hunks(
9071 diff.snapshot(cx).hunks(&snapshot),
9072 &snapshot,
9073 &diff.base_text_string(cx).unwrap(),
9074 &[
9075 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9076 (
9077 1..2,
9078 "two\n",
9079 "TWO\n",
9080 DiffHunkStatus::modified(NoSecondaryHunk),
9081 ),
9082 (
9083 3..4,
9084 "four\n",
9085 "FOUR\n",
9086 DiffHunkStatus::modified(NoSecondaryHunk),
9087 ),
9088 ],
9089 );
9090 });
9091}
9092
9093#[gpui::test(iterations = 10)]
9094async fn test_uncommitted_diff_opened_before_unstaged_diff(cx: &mut gpui::TestAppContext) {
9095 use DiffHunkSecondaryStatus::*;
9096 init_test(cx);
9097
9098 let committed_contents = "one\ntwo\nthree\n";
9099 let file_contents = "one\nTWO\nthree\n";
9100
9101 let fs = FakeFs::new(cx.background_executor.clone());
9102 fs.insert_tree(
9103 "/dir",
9104 json!({
9105 ".git": {},
9106 "file.txt": file_contents,
9107 }),
9108 )
9109 .await;
9110 fs.set_head_and_index_for_repo(
9111 path!("/dir/.git").as_ref(),
9112 &[("file.txt", committed_contents.into())],
9113 );
9114
9115 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9116 let buffer = project
9117 .update(cx, |project, cx| {
9118 project.open_local_buffer("/dir/file.txt", cx)
9119 })
9120 .await
9121 .unwrap();
9122
9123 let uncommitted_diff_task = project.update(cx, |project, cx| {
9124 project.open_uncommitted_diff(buffer.clone(), cx)
9125 });
9126 let unstaged_diff_task = project.update(cx, |project, cx| {
9127 project.open_unstaged_diff(buffer.clone(), cx)
9128 });
9129 let (uncommitted_diff, _unstaged_diff) =
9130 futures::future::join(uncommitted_diff_task, unstaged_diff_task).await;
9131 let uncommitted_diff = uncommitted_diff.unwrap();
9132 let _unstaged_diff = _unstaged_diff.unwrap();
9133
9134 cx.run_until_parked();
9135
9136 uncommitted_diff.read_with(cx, |diff, cx| {
9137 let snapshot = buffer.read(cx).snapshot();
9138 assert_hunks(
9139 diff.snapshot(cx).hunks_intersecting_range(
9140 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9141 &snapshot,
9142 ),
9143 &snapshot,
9144 &diff.base_text_string(cx).unwrap(),
9145 &[(
9146 1..2,
9147 "two\n",
9148 "TWO\n",
9149 DiffHunkStatus::modified(HasSecondaryHunk),
9150 )],
9151 );
9152 });
9153}
9154
9155#[gpui::test(seeds(340, 472))]
9156async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9157 use DiffHunkSecondaryStatus::*;
9158 init_test(cx);
9159
9160 let committed_contents = r#"
9161 zero
9162 one
9163 two
9164 three
9165 four
9166 five
9167 "#
9168 .unindent();
9169 let file_contents = r#"
9170 one
9171 TWO
9172 three
9173 FOUR
9174 five
9175 "#
9176 .unindent();
9177
9178 let fs = FakeFs::new(cx.background_executor.clone());
9179 fs.insert_tree(
9180 "/dir",
9181 json!({
9182 ".git": {},
9183 "file.txt": file_contents.clone()
9184 }),
9185 )
9186 .await;
9187
9188 fs.set_head_for_repo(
9189 "/dir/.git".as_ref(),
9190 &[("file.txt", committed_contents.clone())],
9191 "deadbeef",
9192 );
9193 fs.set_index_for_repo(
9194 "/dir/.git".as_ref(),
9195 &[("file.txt", committed_contents.clone())],
9196 );
9197
9198 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9199
9200 let buffer = project
9201 .update(cx, |project, cx| {
9202 project.open_local_buffer("/dir/file.txt", cx)
9203 })
9204 .await
9205 .unwrap();
9206 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9207 let uncommitted_diff = project
9208 .update(cx, |project, cx| {
9209 project.open_uncommitted_diff(buffer.clone(), cx)
9210 })
9211 .await
9212 .unwrap();
9213
9214 // The hunks are initially unstaged.
9215 uncommitted_diff.read_with(cx, |diff, cx| {
9216 assert_hunks(
9217 diff.snapshot(cx).hunks(&snapshot),
9218 &snapshot,
9219 &diff.base_text_string(cx).unwrap(),
9220 &[
9221 (
9222 0..0,
9223 "zero\n",
9224 "",
9225 DiffHunkStatus::deleted(HasSecondaryHunk),
9226 ),
9227 (
9228 1..2,
9229 "two\n",
9230 "TWO\n",
9231 DiffHunkStatus::modified(HasSecondaryHunk),
9232 ),
9233 (
9234 3..4,
9235 "four\n",
9236 "FOUR\n",
9237 DiffHunkStatus::modified(HasSecondaryHunk),
9238 ),
9239 ],
9240 );
9241 });
9242
9243 // Pause IO events
9244 fs.pause_events();
9245
9246 // Stage the first hunk.
9247 uncommitted_diff.update(cx, |diff, cx| {
9248 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9249 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9250 assert_hunks(
9251 diff.snapshot(cx).hunks(&snapshot),
9252 &snapshot,
9253 &diff.base_text_string(cx).unwrap(),
9254 &[
9255 (
9256 0..0,
9257 "zero\n",
9258 "",
9259 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9260 ),
9261 (
9262 1..2,
9263 "two\n",
9264 "TWO\n",
9265 DiffHunkStatus::modified(HasSecondaryHunk),
9266 ),
9267 (
9268 3..4,
9269 "four\n",
9270 "FOUR\n",
9271 DiffHunkStatus::modified(HasSecondaryHunk),
9272 ),
9273 ],
9274 );
9275 });
9276
9277 // Stage the second hunk *before* receiving the FS event for the first hunk.
9278 cx.run_until_parked();
9279 uncommitted_diff.update(cx, |diff, cx| {
9280 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9281 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9282 assert_hunks(
9283 diff.snapshot(cx).hunks(&snapshot),
9284 &snapshot,
9285 &diff.base_text_string(cx).unwrap(),
9286 &[
9287 (
9288 0..0,
9289 "zero\n",
9290 "",
9291 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9292 ),
9293 (
9294 1..2,
9295 "two\n",
9296 "TWO\n",
9297 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9298 ),
9299 (
9300 3..4,
9301 "four\n",
9302 "FOUR\n",
9303 DiffHunkStatus::modified(HasSecondaryHunk),
9304 ),
9305 ],
9306 );
9307 });
9308
9309 // Process the FS event for staging the first hunk (second event is still pending).
9310 fs.flush_events(1);
9311 cx.run_until_parked();
9312
9313 // Stage the third hunk before receiving the second FS event.
9314 uncommitted_diff.update(cx, |diff, cx| {
9315 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9316 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9317 });
9318
9319 // Wait for all remaining IO.
9320 cx.run_until_parked();
9321 fs.flush_events(fs.buffered_event_count());
9322
9323 // Now all hunks are staged.
9324 cx.run_until_parked();
9325 uncommitted_diff.update(cx, |diff, cx| {
9326 assert_hunks(
9327 diff.snapshot(cx).hunks(&snapshot),
9328 &snapshot,
9329 &diff.base_text_string(cx).unwrap(),
9330 &[
9331 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9332 (
9333 1..2,
9334 "two\n",
9335 "TWO\n",
9336 DiffHunkStatus::modified(NoSecondaryHunk),
9337 ),
9338 (
9339 3..4,
9340 "four\n",
9341 "FOUR\n",
9342 DiffHunkStatus::modified(NoSecondaryHunk),
9343 ),
9344 ],
9345 );
9346 });
9347}
9348
9349#[gpui::test(iterations = 25)]
9350async fn test_staging_random_hunks(
9351 mut rng: StdRng,
9352 _executor: BackgroundExecutor,
9353 cx: &mut gpui::TestAppContext,
9354) {
9355 let operations = env::var("OPERATIONS")
9356 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9357 .unwrap_or(20);
9358
9359 use DiffHunkSecondaryStatus::*;
9360 init_test(cx);
9361
9362 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9363 let index_text = committed_text.clone();
9364 let buffer_text = (0..30)
9365 .map(|i| match i % 5 {
9366 0 => format!("line {i} (modified)\n"),
9367 _ => format!("line {i}\n"),
9368 })
9369 .collect::<String>();
9370
9371 let fs = FakeFs::new(cx.background_executor.clone());
9372 fs.insert_tree(
9373 path!("/dir"),
9374 json!({
9375 ".git": {},
9376 "file.txt": buffer_text.clone()
9377 }),
9378 )
9379 .await;
9380 fs.set_head_for_repo(
9381 path!("/dir/.git").as_ref(),
9382 &[("file.txt", committed_text.clone())],
9383 "deadbeef",
9384 );
9385 fs.set_index_for_repo(
9386 path!("/dir/.git").as_ref(),
9387 &[("file.txt", index_text.clone())],
9388 );
9389 let repo = fs
9390 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9391 .unwrap();
9392
9393 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9394 let buffer = project
9395 .update(cx, |project, cx| {
9396 project.open_local_buffer(path!("/dir/file.txt"), cx)
9397 })
9398 .await
9399 .unwrap();
9400 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9401 let uncommitted_diff = project
9402 .update(cx, |project, cx| {
9403 project.open_uncommitted_diff(buffer.clone(), cx)
9404 })
9405 .await
9406 .unwrap();
9407
9408 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9409 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9410 });
9411 assert_eq!(hunks.len(), 6);
9412
9413 for _i in 0..operations {
9414 let hunk_ix = rng.random_range(0..hunks.len());
9415 let hunk = &mut hunks[hunk_ix];
9416 let row = hunk.range.start.row;
9417
9418 if hunk.status().has_secondary_hunk() {
9419 log::info!("staging hunk at {row}");
9420 uncommitted_diff.update(cx, |diff, cx| {
9421 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9422 });
9423 hunk.secondary_status = SecondaryHunkRemovalPending;
9424 } else {
9425 log::info!("unstaging hunk at {row}");
9426 uncommitted_diff.update(cx, |diff, cx| {
9427 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9428 });
9429 hunk.secondary_status = SecondaryHunkAdditionPending;
9430 }
9431
9432 for _ in 0..rng.random_range(0..10) {
9433 log::info!("yielding");
9434 cx.executor().simulate_random_delay().await;
9435 }
9436 }
9437
9438 cx.executor().run_until_parked();
9439
9440 for hunk in &mut hunks {
9441 if hunk.secondary_status == SecondaryHunkRemovalPending {
9442 hunk.secondary_status = NoSecondaryHunk;
9443 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9444 hunk.secondary_status = HasSecondaryHunk;
9445 }
9446 }
9447
9448 log::info!(
9449 "index text:\n{}",
9450 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9451 .await
9452 .unwrap()
9453 );
9454
9455 uncommitted_diff.update(cx, |diff, cx| {
9456 let expected_hunks = hunks
9457 .iter()
9458 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9459 .collect::<Vec<_>>();
9460 let actual_hunks = diff
9461 .snapshot(cx)
9462 .hunks(&snapshot)
9463 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9464 .collect::<Vec<_>>();
9465 assert_eq!(actual_hunks, expected_hunks);
9466 });
9467}
9468
9469#[gpui::test]
9470async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9471 init_test(cx);
9472
9473 let committed_contents = r#"
9474 fn main() {
9475 println!("hello from HEAD");
9476 }
9477 "#
9478 .unindent();
9479 let file_contents = r#"
9480 fn main() {
9481 println!("hello from the working copy");
9482 }
9483 "#
9484 .unindent();
9485
9486 let fs = FakeFs::new(cx.background_executor.clone());
9487 fs.insert_tree(
9488 "/dir",
9489 json!({
9490 ".git": {},
9491 "src": {
9492 "main.rs": file_contents,
9493 }
9494 }),
9495 )
9496 .await;
9497
9498 fs.set_head_for_repo(
9499 Path::new("/dir/.git"),
9500 &[("src/main.rs", committed_contents.clone())],
9501 "deadbeef",
9502 );
9503 fs.set_index_for_repo(
9504 Path::new("/dir/.git"),
9505 &[("src/main.rs", committed_contents.clone())],
9506 );
9507
9508 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9509
9510 let buffer = project
9511 .update(cx, |project, cx| {
9512 project.open_local_buffer("/dir/src/main.rs", cx)
9513 })
9514 .await
9515 .unwrap();
9516 let uncommitted_diff = project
9517 .update(cx, |project, cx| {
9518 project.open_uncommitted_diff(buffer.clone(), cx)
9519 })
9520 .await
9521 .unwrap();
9522
9523 cx.run_until_parked();
9524 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9525 let snapshot = buffer.read(cx).snapshot();
9526 assert_hunks(
9527 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9528 &snapshot,
9529 &uncommitted_diff.base_text_string(cx).unwrap(),
9530 &[(
9531 1..2,
9532 " println!(\"hello from HEAD\");\n",
9533 " println!(\"hello from the working copy\");\n",
9534 DiffHunkStatus {
9535 kind: DiffHunkStatusKind::Modified,
9536 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9537 },
9538 )],
9539 );
9540 });
9541}
9542
9543// TODO: Should we test this on Windows also?
9544#[gpui::test]
9545#[cfg(not(windows))]
9546async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9547 use std::os::unix::fs::PermissionsExt;
9548 init_test(cx);
9549 cx.executor().allow_parking();
9550 let committed_contents = "bar\n";
9551 let file_contents = "baz\n";
9552 let root = TempTree::new(json!({
9553 "project": {
9554 "foo": committed_contents
9555 },
9556 }));
9557
9558 let work_dir = root.path().join("project");
9559 let file_path = work_dir.join("foo");
9560 let repo = git_init(work_dir.as_path());
9561 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9562 perms.set_mode(0o755);
9563 std::fs::set_permissions(&file_path, perms).unwrap();
9564 git_add("foo", &repo);
9565 git_commit("Initial commit", &repo);
9566 std::fs::write(&file_path, file_contents).unwrap();
9567
9568 let project = Project::test(
9569 Arc::new(RealFs::new(None, cx.executor())),
9570 [root.path()],
9571 cx,
9572 )
9573 .await;
9574
9575 let buffer = project
9576 .update(cx, |project, cx| {
9577 project.open_local_buffer(file_path.as_path(), cx)
9578 })
9579 .await
9580 .unwrap();
9581
9582 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9583
9584 let uncommitted_diff = project
9585 .update(cx, |project, cx| {
9586 project.open_uncommitted_diff(buffer.clone(), cx)
9587 })
9588 .await
9589 .unwrap();
9590
9591 uncommitted_diff.update(cx, |diff, cx| {
9592 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9593 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9594 });
9595
9596 cx.run_until_parked();
9597
9598 let output = smol::process::Command::new("git")
9599 .current_dir(&work_dir)
9600 .args(["diff", "--staged"])
9601 .output()
9602 .await
9603 .unwrap();
9604
9605 let staged_diff = String::from_utf8_lossy(&output.stdout);
9606
9607 assert!(
9608 !staged_diff.contains("new mode 100644"),
9609 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9610 staged_diff
9611 );
9612
9613 let output = smol::process::Command::new("git")
9614 .current_dir(&work_dir)
9615 .args(["ls-files", "-s"])
9616 .output()
9617 .await
9618 .unwrap();
9619 let index_contents = String::from_utf8_lossy(&output.stdout);
9620
9621 assert!(
9622 index_contents.contains("100755"),
9623 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9624 index_contents
9625 );
9626}
9627
9628#[gpui::test]
9629async fn test_repository_and_path_for_project_path(
9630 background_executor: BackgroundExecutor,
9631 cx: &mut gpui::TestAppContext,
9632) {
9633 init_test(cx);
9634 let fs = FakeFs::new(background_executor);
9635 fs.insert_tree(
9636 path!("/root"),
9637 json!({
9638 "c.txt": "",
9639 "dir1": {
9640 ".git": {},
9641 "deps": {
9642 "dep1": {
9643 ".git": {},
9644 "src": {
9645 "a.txt": ""
9646 }
9647 }
9648 },
9649 "src": {
9650 "b.txt": ""
9651 }
9652 },
9653 }),
9654 )
9655 .await;
9656
9657 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9658 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9659 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9660 project
9661 .update(cx, |project, cx| project.git_scans_complete(cx))
9662 .await;
9663 cx.run_until_parked();
9664
9665 project.read_with(cx, |project, cx| {
9666 let git_store = project.git_store().read(cx);
9667 let pairs = [
9668 ("c.txt", None),
9669 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9670 (
9671 "dir1/deps/dep1/src/a.txt",
9672 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9673 ),
9674 ];
9675 let expected = pairs
9676 .iter()
9677 .map(|(path, result)| {
9678 (
9679 path,
9680 result.map(|(repo, repo_path)| {
9681 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9682 }),
9683 )
9684 })
9685 .collect::<Vec<_>>();
9686 let actual = pairs
9687 .iter()
9688 .map(|(path, _)| {
9689 let project_path = (tree_id, rel_path(path)).into();
9690 let result = maybe!({
9691 let (repo, repo_path) =
9692 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9693 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9694 });
9695 (path, result)
9696 })
9697 .collect::<Vec<_>>();
9698 pretty_assertions::assert_eq!(expected, actual);
9699 });
9700
9701 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9702 .await
9703 .unwrap();
9704 cx.run_until_parked();
9705
9706 project.read_with(cx, |project, cx| {
9707 let git_store = project.git_store().read(cx);
9708 assert_eq!(
9709 git_store.repository_and_path_for_project_path(
9710 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9711 cx
9712 ),
9713 None
9714 );
9715 });
9716}
9717
9718#[gpui::test]
9719async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9720 init_test(cx);
9721 let fs = FakeFs::new(cx.background_executor.clone());
9722 let home = paths::home_dir();
9723 fs.insert_tree(
9724 home,
9725 json!({
9726 ".git": {},
9727 "project": {
9728 "a.txt": "A"
9729 },
9730 }),
9731 )
9732 .await;
9733
9734 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9735 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9736 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9737
9738 project
9739 .update(cx, |project, cx| project.git_scans_complete(cx))
9740 .await;
9741 tree.flush_fs_events(cx).await;
9742
9743 project.read_with(cx, |project, cx| {
9744 let containing = project
9745 .git_store()
9746 .read(cx)
9747 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9748 assert!(containing.is_none());
9749 });
9750
9751 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9752 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9753 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9754 project
9755 .update(cx, |project, cx| project.git_scans_complete(cx))
9756 .await;
9757 tree.flush_fs_events(cx).await;
9758
9759 project.read_with(cx, |project, cx| {
9760 let containing = project
9761 .git_store()
9762 .read(cx)
9763 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9764 assert_eq!(
9765 containing
9766 .unwrap()
9767 .0
9768 .read(cx)
9769 .work_directory_abs_path
9770 .as_ref(),
9771 home,
9772 );
9773 });
9774}
9775
9776#[gpui::test]
9777async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9778 init_test(cx);
9779 cx.executor().allow_parking();
9780
9781 let root = TempTree::new(json!({
9782 "project": {
9783 "a.txt": "a", // Modified
9784 "b.txt": "bb", // Added
9785 "c.txt": "ccc", // Unchanged
9786 "d.txt": "dddd", // Deleted
9787 },
9788 }));
9789
9790 // Set up git repository before creating the project.
9791 let work_dir = root.path().join("project");
9792 let repo = git_init(work_dir.as_path());
9793 git_add("a.txt", &repo);
9794 git_add("c.txt", &repo);
9795 git_add("d.txt", &repo);
9796 git_commit("Initial commit", &repo);
9797 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9798 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9799
9800 let project = Project::test(
9801 Arc::new(RealFs::new(None, cx.executor())),
9802 [root.path()],
9803 cx,
9804 )
9805 .await;
9806
9807 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9808 tree.flush_fs_events(cx).await;
9809 project
9810 .update(cx, |project, cx| project.git_scans_complete(cx))
9811 .await;
9812 cx.executor().run_until_parked();
9813
9814 let repository = project.read_with(cx, |project, cx| {
9815 project.repositories(cx).values().next().unwrap().clone()
9816 });
9817
9818 // Check that the right git state is observed on startup
9819 repository.read_with(cx, |repository, _| {
9820 let entries = repository.cached_status().collect::<Vec<_>>();
9821 assert_eq!(
9822 entries,
9823 [
9824 StatusEntry {
9825 repo_path: repo_path("a.txt"),
9826 status: StatusCode::Modified.worktree(),
9827 diff_stat: Some(DiffStat {
9828 added: 1,
9829 deleted: 1,
9830 }),
9831 },
9832 StatusEntry {
9833 repo_path: repo_path("b.txt"),
9834 status: FileStatus::Untracked,
9835 diff_stat: None,
9836 },
9837 StatusEntry {
9838 repo_path: repo_path("d.txt"),
9839 status: StatusCode::Deleted.worktree(),
9840 diff_stat: Some(DiffStat {
9841 added: 0,
9842 deleted: 1,
9843 }),
9844 },
9845 ]
9846 );
9847 });
9848
9849 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9850
9851 tree.flush_fs_events(cx).await;
9852 project
9853 .update(cx, |project, cx| project.git_scans_complete(cx))
9854 .await;
9855 cx.executor().run_until_parked();
9856
9857 repository.read_with(cx, |repository, _| {
9858 let entries = repository.cached_status().collect::<Vec<_>>();
9859 assert_eq!(
9860 entries,
9861 [
9862 StatusEntry {
9863 repo_path: repo_path("a.txt"),
9864 status: StatusCode::Modified.worktree(),
9865 diff_stat: Some(DiffStat {
9866 added: 1,
9867 deleted: 1,
9868 }),
9869 },
9870 StatusEntry {
9871 repo_path: repo_path("b.txt"),
9872 status: FileStatus::Untracked,
9873 diff_stat: None,
9874 },
9875 StatusEntry {
9876 repo_path: repo_path("c.txt"),
9877 status: StatusCode::Modified.worktree(),
9878 diff_stat: Some(DiffStat {
9879 added: 1,
9880 deleted: 1,
9881 }),
9882 },
9883 StatusEntry {
9884 repo_path: repo_path("d.txt"),
9885 status: StatusCode::Deleted.worktree(),
9886 diff_stat: Some(DiffStat {
9887 added: 0,
9888 deleted: 1,
9889 }),
9890 },
9891 ]
9892 );
9893 });
9894
9895 git_add("a.txt", &repo);
9896 git_add("c.txt", &repo);
9897 git_remove_index(Path::new("d.txt"), &repo);
9898 git_commit("Another commit", &repo);
9899 tree.flush_fs_events(cx).await;
9900 project
9901 .update(cx, |project, cx| project.git_scans_complete(cx))
9902 .await;
9903 cx.executor().run_until_parked();
9904
9905 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9906 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9907 tree.flush_fs_events(cx).await;
9908 project
9909 .update(cx, |project, cx| project.git_scans_complete(cx))
9910 .await;
9911 cx.executor().run_until_parked();
9912
9913 repository.read_with(cx, |repository, _cx| {
9914 let entries = repository.cached_status().collect::<Vec<_>>();
9915
9916 // Deleting an untracked entry, b.txt, should leave no status
9917 // a.txt was tracked, and so should have a status
9918 assert_eq!(
9919 entries,
9920 [StatusEntry {
9921 repo_path: repo_path("a.txt"),
9922 status: StatusCode::Deleted.worktree(),
9923 diff_stat: Some(DiffStat {
9924 added: 0,
9925 deleted: 1,
9926 }),
9927 }]
9928 );
9929 });
9930}
9931
9932#[gpui::test]
9933#[ignore]
9934async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9935 init_test(cx);
9936 cx.executor().allow_parking();
9937
9938 let root = TempTree::new(json!({
9939 "project": {
9940 "sub": {},
9941 "a.txt": "",
9942 },
9943 }));
9944
9945 let work_dir = root.path().join("project");
9946 let repo = git_init(work_dir.as_path());
9947 // a.txt exists in HEAD and the working copy but is deleted in the index.
9948 git_add("a.txt", &repo);
9949 git_commit("Initial commit", &repo);
9950 git_remove_index("a.txt".as_ref(), &repo);
9951 // `sub` is a nested git repository.
9952 let _sub = git_init(&work_dir.join("sub"));
9953
9954 let project = Project::test(
9955 Arc::new(RealFs::new(None, cx.executor())),
9956 [root.path()],
9957 cx,
9958 )
9959 .await;
9960
9961 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9962 tree.flush_fs_events(cx).await;
9963 project
9964 .update(cx, |project, cx| project.git_scans_complete(cx))
9965 .await;
9966 cx.executor().run_until_parked();
9967
9968 let repository = project.read_with(cx, |project, cx| {
9969 project
9970 .repositories(cx)
9971 .values()
9972 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9973 .unwrap()
9974 .clone()
9975 });
9976
9977 repository.read_with(cx, |repository, _cx| {
9978 let entries = repository.cached_status().collect::<Vec<_>>();
9979
9980 // `sub` doesn't appear in our computed statuses.
9981 // a.txt appears with a combined `DA` status.
9982 assert_eq!(
9983 entries,
9984 [StatusEntry {
9985 repo_path: repo_path("a.txt"),
9986 status: TrackedStatus {
9987 index_status: StatusCode::Deleted,
9988 worktree_status: StatusCode::Added
9989 }
9990 .into(),
9991 diff_stat: None,
9992 }]
9993 )
9994 });
9995}
9996
9997#[track_caller]
9998/// We merge lhs into rhs.
9999fn merge_pending_ops_snapshots(
10000 source: Vec<pending_op::PendingOps>,
10001 mut target: Vec<pending_op::PendingOps>,
10002) -> Vec<pending_op::PendingOps> {
10003 for s_ops in source {
10004 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
10005 if ops.repo_path == s_ops.repo_path {
10006 Some(idx)
10007 } else {
10008 None
10009 }
10010 }) {
10011 let t_ops = &mut target[idx];
10012 for s_op in s_ops.ops {
10013 if let Some(op_idx) = t_ops
10014 .ops
10015 .iter()
10016 .zip(0..)
10017 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10018 {
10019 let t_op = &mut t_ops.ops[op_idx];
10020 match (s_op.job_status, t_op.job_status) {
10021 (pending_op::JobStatus::Running, _) => {}
10022 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10023 (s_st, t_st) if s_st == t_st => {}
10024 _ => unreachable!(),
10025 }
10026 } else {
10027 t_ops.ops.push(s_op);
10028 }
10029 }
10030 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10031 } else {
10032 target.push(s_ops);
10033 }
10034 }
10035 target
10036}
10037
10038#[gpui::test]
10039async fn test_repository_pending_ops_staging(
10040 executor: gpui::BackgroundExecutor,
10041 cx: &mut gpui::TestAppContext,
10042) {
10043 init_test(cx);
10044
10045 let fs = FakeFs::new(executor);
10046 fs.insert_tree(
10047 path!("/root"),
10048 json!({
10049 "my-repo": {
10050 ".git": {},
10051 "a.txt": "a",
10052 }
10053
10054 }),
10055 )
10056 .await;
10057
10058 fs.set_status_for_repo(
10059 path!("/root/my-repo/.git").as_ref(),
10060 &[("a.txt", FileStatus::Untracked)],
10061 );
10062
10063 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10064 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10065 project.update(cx, |project, cx| {
10066 let pending_ops_all = pending_ops_all.clone();
10067 cx.subscribe(project.git_store(), move |_, _, e, _| {
10068 if let GitStoreEvent::RepositoryUpdated(
10069 _,
10070 RepositoryEvent::PendingOpsChanged { pending_ops },
10071 _,
10072 ) = e
10073 {
10074 let merged = merge_pending_ops_snapshots(
10075 pending_ops.items(()),
10076 pending_ops_all.lock().items(()),
10077 );
10078 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10079 }
10080 })
10081 .detach();
10082 });
10083 project
10084 .update(cx, |project, cx| project.git_scans_complete(cx))
10085 .await;
10086
10087 let repo = project.read_with(cx, |project, cx| {
10088 project.repositories(cx).values().next().unwrap().clone()
10089 });
10090
10091 // Ensure we have no pending ops for any of the untracked files
10092 repo.read_with(cx, |repo, _cx| {
10093 assert!(repo.pending_ops().next().is_none());
10094 });
10095
10096 let mut id = 1u16;
10097
10098 let mut assert_stage = async |path: RepoPath, stage| {
10099 let git_status = if stage {
10100 pending_op::GitStatus::Staged
10101 } else {
10102 pending_op::GitStatus::Unstaged
10103 };
10104 repo.update(cx, |repo, cx| {
10105 let task = if stage {
10106 repo.stage_entries(vec![path.clone()], cx)
10107 } else {
10108 repo.unstage_entries(vec![path.clone()], cx)
10109 };
10110 let ops = repo.pending_ops_for_path(&path).unwrap();
10111 assert_eq!(
10112 ops.ops.last(),
10113 Some(&pending_op::PendingOp {
10114 id: id.into(),
10115 git_status,
10116 job_status: pending_op::JobStatus::Running
10117 })
10118 );
10119 task
10120 })
10121 .await
10122 .unwrap();
10123
10124 repo.read_with(cx, |repo, _cx| {
10125 let ops = repo.pending_ops_for_path(&path).unwrap();
10126 assert_eq!(
10127 ops.ops.last(),
10128 Some(&pending_op::PendingOp {
10129 id: id.into(),
10130 git_status,
10131 job_status: pending_op::JobStatus::Finished
10132 })
10133 );
10134 });
10135
10136 id += 1;
10137 };
10138
10139 assert_stage(repo_path("a.txt"), true).await;
10140 assert_stage(repo_path("a.txt"), false).await;
10141 assert_stage(repo_path("a.txt"), true).await;
10142 assert_stage(repo_path("a.txt"), false).await;
10143 assert_stage(repo_path("a.txt"), true).await;
10144
10145 cx.run_until_parked();
10146
10147 assert_eq!(
10148 pending_ops_all
10149 .lock()
10150 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10151 .unwrap()
10152 .ops,
10153 vec![
10154 pending_op::PendingOp {
10155 id: 1u16.into(),
10156 git_status: pending_op::GitStatus::Staged,
10157 job_status: pending_op::JobStatus::Finished
10158 },
10159 pending_op::PendingOp {
10160 id: 2u16.into(),
10161 git_status: pending_op::GitStatus::Unstaged,
10162 job_status: pending_op::JobStatus::Finished
10163 },
10164 pending_op::PendingOp {
10165 id: 3u16.into(),
10166 git_status: pending_op::GitStatus::Staged,
10167 job_status: pending_op::JobStatus::Finished
10168 },
10169 pending_op::PendingOp {
10170 id: 4u16.into(),
10171 git_status: pending_op::GitStatus::Unstaged,
10172 job_status: pending_op::JobStatus::Finished
10173 },
10174 pending_op::PendingOp {
10175 id: 5u16.into(),
10176 git_status: pending_op::GitStatus::Staged,
10177 job_status: pending_op::JobStatus::Finished
10178 }
10179 ],
10180 );
10181
10182 repo.update(cx, |repo, _cx| {
10183 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10184
10185 assert_eq!(
10186 git_statuses,
10187 [StatusEntry {
10188 repo_path: repo_path("a.txt"),
10189 status: TrackedStatus {
10190 index_status: StatusCode::Added,
10191 worktree_status: StatusCode::Unmodified
10192 }
10193 .into(),
10194 diff_stat: Some(DiffStat {
10195 added: 1,
10196 deleted: 0,
10197 }),
10198 }]
10199 );
10200 });
10201}
10202
10203#[gpui::test]
10204async fn test_repository_pending_ops_long_running_staging(
10205 executor: gpui::BackgroundExecutor,
10206 cx: &mut gpui::TestAppContext,
10207) {
10208 init_test(cx);
10209
10210 let fs = FakeFs::new(executor);
10211 fs.insert_tree(
10212 path!("/root"),
10213 json!({
10214 "my-repo": {
10215 ".git": {},
10216 "a.txt": "a",
10217 }
10218
10219 }),
10220 )
10221 .await;
10222
10223 fs.set_status_for_repo(
10224 path!("/root/my-repo/.git").as_ref(),
10225 &[("a.txt", FileStatus::Untracked)],
10226 );
10227
10228 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10229 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10230 project.update(cx, |project, cx| {
10231 let pending_ops_all = pending_ops_all.clone();
10232 cx.subscribe(project.git_store(), move |_, _, e, _| {
10233 if let GitStoreEvent::RepositoryUpdated(
10234 _,
10235 RepositoryEvent::PendingOpsChanged { pending_ops },
10236 _,
10237 ) = e
10238 {
10239 let merged = merge_pending_ops_snapshots(
10240 pending_ops.items(()),
10241 pending_ops_all.lock().items(()),
10242 );
10243 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10244 }
10245 })
10246 .detach();
10247 });
10248
10249 project
10250 .update(cx, |project, cx| project.git_scans_complete(cx))
10251 .await;
10252
10253 let repo = project.read_with(cx, |project, cx| {
10254 project.repositories(cx).values().next().unwrap().clone()
10255 });
10256
10257 repo.update(cx, |repo, cx| {
10258 repo.stage_entries(vec![repo_path("a.txt")], cx)
10259 })
10260 .detach();
10261
10262 repo.update(cx, |repo, cx| {
10263 repo.stage_entries(vec![repo_path("a.txt")], cx)
10264 })
10265 .unwrap()
10266 .with_timeout(Duration::from_secs(1), &cx.executor())
10267 .await
10268 .unwrap();
10269
10270 cx.run_until_parked();
10271
10272 assert_eq!(
10273 pending_ops_all
10274 .lock()
10275 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10276 .unwrap()
10277 .ops,
10278 vec![
10279 pending_op::PendingOp {
10280 id: 1u16.into(),
10281 git_status: pending_op::GitStatus::Staged,
10282 job_status: pending_op::JobStatus::Skipped
10283 },
10284 pending_op::PendingOp {
10285 id: 2u16.into(),
10286 git_status: pending_op::GitStatus::Staged,
10287 job_status: pending_op::JobStatus::Finished
10288 }
10289 ],
10290 );
10291
10292 repo.update(cx, |repo, _cx| {
10293 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10294
10295 assert_eq!(
10296 git_statuses,
10297 [StatusEntry {
10298 repo_path: repo_path("a.txt"),
10299 status: TrackedStatus {
10300 index_status: StatusCode::Added,
10301 worktree_status: StatusCode::Unmodified
10302 }
10303 .into(),
10304 diff_stat: Some(DiffStat {
10305 added: 1,
10306 deleted: 0,
10307 }),
10308 }]
10309 );
10310 });
10311}
10312
10313#[gpui::test]
10314async fn test_repository_pending_ops_stage_all(
10315 executor: gpui::BackgroundExecutor,
10316 cx: &mut gpui::TestAppContext,
10317) {
10318 init_test(cx);
10319
10320 let fs = FakeFs::new(executor);
10321 fs.insert_tree(
10322 path!("/root"),
10323 json!({
10324 "my-repo": {
10325 ".git": {},
10326 "a.txt": "a",
10327 "b.txt": "b"
10328 }
10329
10330 }),
10331 )
10332 .await;
10333
10334 fs.set_status_for_repo(
10335 path!("/root/my-repo/.git").as_ref(),
10336 &[
10337 ("a.txt", FileStatus::Untracked),
10338 ("b.txt", FileStatus::Untracked),
10339 ],
10340 );
10341
10342 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10343 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10344 project.update(cx, |project, cx| {
10345 let pending_ops_all = pending_ops_all.clone();
10346 cx.subscribe(project.git_store(), move |_, _, e, _| {
10347 if let GitStoreEvent::RepositoryUpdated(
10348 _,
10349 RepositoryEvent::PendingOpsChanged { pending_ops },
10350 _,
10351 ) = e
10352 {
10353 let merged = merge_pending_ops_snapshots(
10354 pending_ops.items(()),
10355 pending_ops_all.lock().items(()),
10356 );
10357 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10358 }
10359 })
10360 .detach();
10361 });
10362 project
10363 .update(cx, |project, cx| project.git_scans_complete(cx))
10364 .await;
10365
10366 let repo = project.read_with(cx, |project, cx| {
10367 project.repositories(cx).values().next().unwrap().clone()
10368 });
10369
10370 repo.update(cx, |repo, cx| {
10371 repo.stage_entries(vec![repo_path("a.txt")], cx)
10372 })
10373 .await
10374 .unwrap();
10375 repo.update(cx, |repo, cx| repo.stage_all(cx))
10376 .await
10377 .unwrap();
10378 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10379 .await
10380 .unwrap();
10381
10382 cx.run_until_parked();
10383
10384 assert_eq!(
10385 pending_ops_all
10386 .lock()
10387 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10388 .unwrap()
10389 .ops,
10390 vec![
10391 pending_op::PendingOp {
10392 id: 1u16.into(),
10393 git_status: pending_op::GitStatus::Staged,
10394 job_status: pending_op::JobStatus::Finished
10395 },
10396 pending_op::PendingOp {
10397 id: 2u16.into(),
10398 git_status: pending_op::GitStatus::Unstaged,
10399 job_status: pending_op::JobStatus::Finished
10400 },
10401 ],
10402 );
10403 assert_eq!(
10404 pending_ops_all
10405 .lock()
10406 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10407 .unwrap()
10408 .ops,
10409 vec![
10410 pending_op::PendingOp {
10411 id: 1u16.into(),
10412 git_status: pending_op::GitStatus::Staged,
10413 job_status: pending_op::JobStatus::Finished
10414 },
10415 pending_op::PendingOp {
10416 id: 2u16.into(),
10417 git_status: pending_op::GitStatus::Unstaged,
10418 job_status: pending_op::JobStatus::Finished
10419 },
10420 ],
10421 );
10422
10423 repo.update(cx, |repo, _cx| {
10424 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10425
10426 assert_eq!(
10427 git_statuses,
10428 [
10429 StatusEntry {
10430 repo_path: repo_path("a.txt"),
10431 status: FileStatus::Untracked,
10432 diff_stat: None,
10433 },
10434 StatusEntry {
10435 repo_path: repo_path("b.txt"),
10436 status: FileStatus::Untracked,
10437 diff_stat: None,
10438 },
10439 ]
10440 );
10441 });
10442}
10443
10444#[gpui::test]
10445async fn test_repository_subfolder_git_status(
10446 executor: gpui::BackgroundExecutor,
10447 cx: &mut gpui::TestAppContext,
10448) {
10449 init_test(cx);
10450
10451 let fs = FakeFs::new(executor);
10452 fs.insert_tree(
10453 path!("/root"),
10454 json!({
10455 "my-repo": {
10456 ".git": {},
10457 "a.txt": "a",
10458 "sub-folder-1": {
10459 "sub-folder-2": {
10460 "c.txt": "cc",
10461 "d": {
10462 "e.txt": "eee"
10463 }
10464 },
10465 }
10466 },
10467 }),
10468 )
10469 .await;
10470
10471 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10472 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10473
10474 fs.set_status_for_repo(
10475 path!("/root/my-repo/.git").as_ref(),
10476 &[(E_TXT, FileStatus::Untracked)],
10477 );
10478
10479 let project = Project::test(
10480 fs.clone(),
10481 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10482 cx,
10483 )
10484 .await;
10485
10486 project
10487 .update(cx, |project, cx| project.git_scans_complete(cx))
10488 .await;
10489 cx.run_until_parked();
10490
10491 let repository = project.read_with(cx, |project, cx| {
10492 project.repositories(cx).values().next().unwrap().clone()
10493 });
10494
10495 // Ensure that the git status is loaded correctly
10496 repository.read_with(cx, |repository, _cx| {
10497 assert_eq!(
10498 repository.work_directory_abs_path,
10499 Path::new(path!("/root/my-repo")).into()
10500 );
10501
10502 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10503 assert_eq!(
10504 repository
10505 .status_for_path(&repo_path(E_TXT))
10506 .unwrap()
10507 .status,
10508 FileStatus::Untracked
10509 );
10510 });
10511
10512 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10513 project
10514 .update(cx, |project, cx| project.git_scans_complete(cx))
10515 .await;
10516 cx.run_until_parked();
10517
10518 repository.read_with(cx, |repository, _cx| {
10519 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10520 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10521 });
10522}
10523
10524// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10525#[cfg(any())]
10526#[gpui::test]
10527async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10528 init_test(cx);
10529 cx.executor().allow_parking();
10530
10531 let root = TempTree::new(json!({
10532 "project": {
10533 "a.txt": "a",
10534 },
10535 }));
10536 let root_path = root.path();
10537
10538 let repo = git_init(&root_path.join("project"));
10539 git_add("a.txt", &repo);
10540 git_commit("init", &repo);
10541
10542 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10543
10544 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10545 tree.flush_fs_events(cx).await;
10546 project
10547 .update(cx, |project, cx| project.git_scans_complete(cx))
10548 .await;
10549 cx.executor().run_until_parked();
10550
10551 let repository = project.read_with(cx, |project, cx| {
10552 project.repositories(cx).values().next().unwrap().clone()
10553 });
10554
10555 git_branch("other-branch", &repo);
10556 git_checkout("refs/heads/other-branch", &repo);
10557 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10558 git_add("a.txt", &repo);
10559 git_commit("capitalize", &repo);
10560 let commit = repo
10561 .head()
10562 .expect("Failed to get HEAD")
10563 .peel_to_commit()
10564 .expect("HEAD is not a commit");
10565 git_checkout("refs/heads/main", &repo);
10566 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10567 git_add("a.txt", &repo);
10568 git_commit("improve letter", &repo);
10569 git_cherry_pick(&commit, &repo);
10570 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10571 .expect("No CHERRY_PICK_HEAD");
10572 pretty_assertions::assert_eq!(
10573 git_status(&repo),
10574 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10575 );
10576 tree.flush_fs_events(cx).await;
10577 project
10578 .update(cx, |project, cx| project.git_scans_complete(cx))
10579 .await;
10580 cx.executor().run_until_parked();
10581 let conflicts = repository.update(cx, |repository, _| {
10582 repository
10583 .merge_conflicts
10584 .iter()
10585 .cloned()
10586 .collect::<Vec<_>>()
10587 });
10588 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10589
10590 git_add("a.txt", &repo);
10591 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10592 git_commit("whatevs", &repo);
10593 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10594 .expect("Failed to remove CHERRY_PICK_HEAD");
10595 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10596 tree.flush_fs_events(cx).await;
10597 let conflicts = repository.update(cx, |repository, _| {
10598 repository
10599 .merge_conflicts
10600 .iter()
10601 .cloned()
10602 .collect::<Vec<_>>()
10603 });
10604 pretty_assertions::assert_eq!(conflicts, []);
10605}
10606
10607#[gpui::test]
10608async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10609 init_test(cx);
10610 let fs = FakeFs::new(cx.background_executor.clone());
10611 fs.insert_tree(
10612 path!("/root"),
10613 json!({
10614 ".git": {},
10615 ".gitignore": "*.txt\n",
10616 "a.xml": "<a></a>",
10617 "b.txt": "Some text"
10618 }),
10619 )
10620 .await;
10621
10622 fs.set_head_and_index_for_repo(
10623 path!("/root/.git").as_ref(),
10624 &[
10625 (".gitignore", "*.txt\n".into()),
10626 ("a.xml", "<a></a>".into()),
10627 ],
10628 );
10629
10630 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10631
10632 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10633 tree.flush_fs_events(cx).await;
10634 project
10635 .update(cx, |project, cx| project.git_scans_complete(cx))
10636 .await;
10637 cx.executor().run_until_parked();
10638
10639 let repository = project.read_with(cx, |project, cx| {
10640 project.repositories(cx).values().next().unwrap().clone()
10641 });
10642
10643 // One file is unmodified, the other is ignored.
10644 cx.read(|cx| {
10645 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10646 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10647 });
10648
10649 // Change the gitignore, and stage the newly non-ignored file.
10650 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10651 .await
10652 .unwrap();
10653 fs.set_index_for_repo(
10654 Path::new(path!("/root/.git")),
10655 &[
10656 (".gitignore", "*.txt\n".into()),
10657 ("a.xml", "<a></a>".into()),
10658 ("b.txt", "Some text".into()),
10659 ],
10660 );
10661
10662 cx.executor().run_until_parked();
10663 cx.read(|cx| {
10664 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10665 assert_entry_git_state(
10666 tree.read(cx),
10667 repository.read(cx),
10668 "b.txt",
10669 Some(StatusCode::Added),
10670 false,
10671 );
10672 });
10673}
10674
10675// NOTE:
10676// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10677// a directory which some program has already open.
10678// This is a limitation of the Windows.
10679// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10680// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10681#[gpui::test]
10682#[cfg_attr(target_os = "windows", ignore)]
10683async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10684 init_test(cx);
10685 cx.executor().allow_parking();
10686 let root = TempTree::new(json!({
10687 "projects": {
10688 "project1": {
10689 "a": "",
10690 "b": "",
10691 }
10692 },
10693
10694 }));
10695 let root_path = root.path();
10696
10697 let repo = git_init(&root_path.join("projects/project1"));
10698 git_add("a", &repo);
10699 git_commit("init", &repo);
10700 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10701
10702 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10703
10704 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10705 tree.flush_fs_events(cx).await;
10706 project
10707 .update(cx, |project, cx| project.git_scans_complete(cx))
10708 .await;
10709 cx.executor().run_until_parked();
10710
10711 let repository = project.read_with(cx, |project, cx| {
10712 project.repositories(cx).values().next().unwrap().clone()
10713 });
10714
10715 repository.read_with(cx, |repository, _| {
10716 assert_eq!(
10717 repository.work_directory_abs_path.as_ref(),
10718 root_path.join("projects/project1").as_path()
10719 );
10720 assert_eq!(
10721 repository
10722 .status_for_path(&repo_path("a"))
10723 .map(|entry| entry.status),
10724 Some(StatusCode::Modified.worktree()),
10725 );
10726 assert_eq!(
10727 repository
10728 .status_for_path(&repo_path("b"))
10729 .map(|entry| entry.status),
10730 Some(FileStatus::Untracked),
10731 );
10732 });
10733
10734 std::fs::rename(
10735 root_path.join("projects/project1"),
10736 root_path.join("projects/project2"),
10737 )
10738 .unwrap();
10739 tree.flush_fs_events(cx).await;
10740
10741 repository.read_with(cx, |repository, _| {
10742 assert_eq!(
10743 repository.work_directory_abs_path.as_ref(),
10744 root_path.join("projects/project2").as_path()
10745 );
10746 assert_eq!(
10747 repository.status_for_path(&repo_path("a")).unwrap().status,
10748 StatusCode::Modified.worktree(),
10749 );
10750 assert_eq!(
10751 repository.status_for_path(&repo_path("b")).unwrap().status,
10752 FileStatus::Untracked,
10753 );
10754 });
10755}
10756
10757// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10758// you can't rename a directory which some program has already open. This is a
10759// limitation of the Windows. See:
10760// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10761// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10762#[gpui::test]
10763#[cfg_attr(target_os = "windows", ignore)]
10764async fn test_file_status(cx: &mut gpui::TestAppContext) {
10765 init_test(cx);
10766 cx.executor().allow_parking();
10767 const IGNORE_RULE: &str = "**/target";
10768
10769 let root = TempTree::new(json!({
10770 "project": {
10771 "a.txt": "a",
10772 "b.txt": "bb",
10773 "c": {
10774 "d": {
10775 "e.txt": "eee"
10776 }
10777 },
10778 "f.txt": "ffff",
10779 "target": {
10780 "build_file": "???"
10781 },
10782 ".gitignore": IGNORE_RULE
10783 },
10784
10785 }));
10786 let root_path = root.path();
10787
10788 const A_TXT: &str = "a.txt";
10789 const B_TXT: &str = "b.txt";
10790 const E_TXT: &str = "c/d/e.txt";
10791 const F_TXT: &str = "f.txt";
10792 const DOTGITIGNORE: &str = ".gitignore";
10793 const BUILD_FILE: &str = "target/build_file";
10794
10795 // Set up git repository before creating the worktree.
10796 let work_dir = root.path().join("project");
10797 let mut repo = git_init(work_dir.as_path());
10798 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10799 git_add(A_TXT, &repo);
10800 git_add(E_TXT, &repo);
10801 git_add(DOTGITIGNORE, &repo);
10802 git_commit("Initial commit", &repo);
10803
10804 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10805
10806 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10807 tree.flush_fs_events(cx).await;
10808 project
10809 .update(cx, |project, cx| project.git_scans_complete(cx))
10810 .await;
10811 cx.executor().run_until_parked();
10812
10813 let repository = project.read_with(cx, |project, cx| {
10814 project.repositories(cx).values().next().unwrap().clone()
10815 });
10816
10817 // Check that the right git state is observed on startup
10818 repository.read_with(cx, |repository, _cx| {
10819 assert_eq!(
10820 repository.work_directory_abs_path.as_ref(),
10821 root_path.join("project").as_path()
10822 );
10823
10824 assert_eq!(
10825 repository
10826 .status_for_path(&repo_path(B_TXT))
10827 .unwrap()
10828 .status,
10829 FileStatus::Untracked,
10830 );
10831 assert_eq!(
10832 repository
10833 .status_for_path(&repo_path(F_TXT))
10834 .unwrap()
10835 .status,
10836 FileStatus::Untracked,
10837 );
10838 });
10839
10840 // Modify a file in the working copy.
10841 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10842 tree.flush_fs_events(cx).await;
10843 project
10844 .update(cx, |project, cx| project.git_scans_complete(cx))
10845 .await;
10846 cx.executor().run_until_parked();
10847
10848 // The worktree detects that the file's git status has changed.
10849 repository.read_with(cx, |repository, _| {
10850 assert_eq!(
10851 repository
10852 .status_for_path(&repo_path(A_TXT))
10853 .unwrap()
10854 .status,
10855 StatusCode::Modified.worktree(),
10856 );
10857 });
10858
10859 // Create a commit in the git repository.
10860 git_add(A_TXT, &repo);
10861 git_add(B_TXT, &repo);
10862 git_commit("Committing modified and added", &repo);
10863 tree.flush_fs_events(cx).await;
10864 project
10865 .update(cx, |project, cx| project.git_scans_complete(cx))
10866 .await;
10867 cx.executor().run_until_parked();
10868
10869 // The worktree detects that the files' git status have changed.
10870 repository.read_with(cx, |repository, _cx| {
10871 assert_eq!(
10872 repository
10873 .status_for_path(&repo_path(F_TXT))
10874 .unwrap()
10875 .status,
10876 FileStatus::Untracked,
10877 );
10878 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10879 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10880 });
10881
10882 // Modify files in the working copy and perform git operations on other files.
10883 git_reset(0, &repo);
10884 git_remove_index(Path::new(B_TXT), &repo);
10885 git_stash(&mut repo);
10886 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10887 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10888 tree.flush_fs_events(cx).await;
10889 project
10890 .update(cx, |project, cx| project.git_scans_complete(cx))
10891 .await;
10892 cx.executor().run_until_parked();
10893
10894 // Check that more complex repo changes are tracked
10895 repository.read_with(cx, |repository, _cx| {
10896 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10897 assert_eq!(
10898 repository
10899 .status_for_path(&repo_path(B_TXT))
10900 .unwrap()
10901 .status,
10902 FileStatus::Untracked,
10903 );
10904 assert_eq!(
10905 repository
10906 .status_for_path(&repo_path(E_TXT))
10907 .unwrap()
10908 .status,
10909 StatusCode::Modified.worktree(),
10910 );
10911 });
10912
10913 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10914 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10915 std::fs::write(
10916 work_dir.join(DOTGITIGNORE),
10917 [IGNORE_RULE, "f.txt"].join("\n"),
10918 )
10919 .unwrap();
10920
10921 git_add(Path::new(DOTGITIGNORE), &repo);
10922 git_commit("Committing modified git ignore", &repo);
10923
10924 tree.flush_fs_events(cx).await;
10925 cx.executor().run_until_parked();
10926
10927 let mut renamed_dir_name = "first_directory/second_directory";
10928 const RENAMED_FILE: &str = "rf.txt";
10929
10930 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10931 std::fs::write(
10932 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10933 "new-contents",
10934 )
10935 .unwrap();
10936
10937 tree.flush_fs_events(cx).await;
10938 project
10939 .update(cx, |project, cx| project.git_scans_complete(cx))
10940 .await;
10941 cx.executor().run_until_parked();
10942
10943 repository.read_with(cx, |repository, _cx| {
10944 assert_eq!(
10945 repository
10946 .status_for_path(&RepoPath::from_rel_path(
10947 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10948 ))
10949 .unwrap()
10950 .status,
10951 FileStatus::Untracked,
10952 );
10953 });
10954
10955 renamed_dir_name = "new_first_directory/second_directory";
10956
10957 std::fs::rename(
10958 work_dir.join("first_directory"),
10959 work_dir.join("new_first_directory"),
10960 )
10961 .unwrap();
10962
10963 tree.flush_fs_events(cx).await;
10964 project
10965 .update(cx, |project, cx| project.git_scans_complete(cx))
10966 .await;
10967 cx.executor().run_until_parked();
10968
10969 repository.read_with(cx, |repository, _cx| {
10970 assert_eq!(
10971 repository
10972 .status_for_path(&RepoPath::from_rel_path(
10973 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10974 ))
10975 .unwrap()
10976 .status,
10977 FileStatus::Untracked,
10978 );
10979 });
10980}
10981
10982#[gpui::test]
10983#[ignore]
10984async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10985 init_test(cx);
10986 cx.executor().allow_parking();
10987
10988 const IGNORE_RULE: &str = "**/target";
10989
10990 let root = TempTree::new(json!({
10991 "project": {
10992 "src": {
10993 "main.rs": "fn main() {}"
10994 },
10995 "target": {
10996 "debug": {
10997 "important_text.txt": "important text",
10998 },
10999 },
11000 ".gitignore": IGNORE_RULE
11001 },
11002
11003 }));
11004 let root_path = root.path();
11005
11006 // Set up git repository before creating the worktree.
11007 let work_dir = root.path().join("project");
11008 let repo = git_init(work_dir.as_path());
11009 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11010 git_add("src/main.rs", &repo);
11011 git_add(".gitignore", &repo);
11012 git_commit("Initial commit", &repo);
11013
11014 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11015 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11016 let project_events = Arc::new(Mutex::new(Vec::new()));
11017 project.update(cx, |project, cx| {
11018 let repo_events = repository_updates.clone();
11019 cx.subscribe(project.git_store(), move |_, _, e, _| {
11020 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11021 repo_events.lock().push(e.clone());
11022 }
11023 })
11024 .detach();
11025 let project_events = project_events.clone();
11026 cx.subscribe_self(move |_, e, _| {
11027 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11028 project_events.lock().extend(
11029 updates
11030 .iter()
11031 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11032 .filter(|(path, _)| path != "fs-event-sentinel"),
11033 );
11034 }
11035 })
11036 .detach();
11037 });
11038
11039 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11040 tree.flush_fs_events(cx).await;
11041 tree.update(cx, |tree, cx| {
11042 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11043 })
11044 .await
11045 .unwrap();
11046 tree.update(cx, |tree, _| {
11047 assert_eq!(
11048 tree.entries(true, 0)
11049 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11050 .collect::<Vec<_>>(),
11051 vec![
11052 (rel_path(""), false),
11053 (rel_path("project/"), false),
11054 (rel_path("project/.gitignore"), false),
11055 (rel_path("project/src"), false),
11056 (rel_path("project/src/main.rs"), false),
11057 (rel_path("project/target"), true),
11058 (rel_path("project/target/debug"), true),
11059 (rel_path("project/target/debug/important_text.txt"), true),
11060 ]
11061 );
11062 });
11063
11064 assert_eq!(
11065 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11066 vec![RepositoryEvent::StatusesChanged,],
11067 "Initial worktree scan should produce a repo update event"
11068 );
11069 assert_eq!(
11070 project_events.lock().drain(..).collect::<Vec<_>>(),
11071 vec![
11072 ("project/target".to_string(), PathChange::Loaded),
11073 ("project/target/debug".to_string(), PathChange::Loaded),
11074 (
11075 "project/target/debug/important_text.txt".to_string(),
11076 PathChange::Loaded
11077 ),
11078 ],
11079 "Initial project changes should show that all not-ignored and all opened files are loaded"
11080 );
11081
11082 let deps_dir = work_dir.join("target").join("debug").join("deps");
11083 std::fs::create_dir_all(&deps_dir).unwrap();
11084 tree.flush_fs_events(cx).await;
11085 project
11086 .update(cx, |project, cx| project.git_scans_complete(cx))
11087 .await;
11088 cx.executor().run_until_parked();
11089 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11090 tree.flush_fs_events(cx).await;
11091 project
11092 .update(cx, |project, cx| project.git_scans_complete(cx))
11093 .await;
11094 cx.executor().run_until_parked();
11095 std::fs::remove_dir_all(&deps_dir).unwrap();
11096 tree.flush_fs_events(cx).await;
11097 project
11098 .update(cx, |project, cx| project.git_scans_complete(cx))
11099 .await;
11100 cx.executor().run_until_parked();
11101
11102 tree.update(cx, |tree, _| {
11103 assert_eq!(
11104 tree.entries(true, 0)
11105 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11106 .collect::<Vec<_>>(),
11107 vec![
11108 (rel_path(""), false),
11109 (rel_path("project/"), false),
11110 (rel_path("project/.gitignore"), false),
11111 (rel_path("project/src"), false),
11112 (rel_path("project/src/main.rs"), false),
11113 (rel_path("project/target"), true),
11114 (rel_path("project/target/debug"), true),
11115 (rel_path("project/target/debug/important_text.txt"), true),
11116 ],
11117 "No stray temp files should be left after the flycheck changes"
11118 );
11119 });
11120
11121 assert_eq!(
11122 repository_updates
11123 .lock()
11124 .iter()
11125 .cloned()
11126 .collect::<Vec<_>>(),
11127 Vec::new(),
11128 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11129 );
11130 assert_eq!(
11131 project_events.lock().as_slice(),
11132 vec![
11133 ("project/target/debug/deps".to_string(), PathChange::Added),
11134 ("project/target/debug/deps".to_string(), PathChange::Removed),
11135 ],
11136 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11137 No updates for more nested directories should happen as those are ignored",
11138 );
11139}
11140
11141// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11142// to different timings/ordering of events.
11143#[ignore]
11144#[gpui::test]
11145async fn test_odd_events_for_ignored_dirs(
11146 executor: BackgroundExecutor,
11147 cx: &mut gpui::TestAppContext,
11148) {
11149 init_test(cx);
11150 let fs = FakeFs::new(executor);
11151 fs.insert_tree(
11152 path!("/root"),
11153 json!({
11154 ".git": {},
11155 ".gitignore": "**/target/",
11156 "src": {
11157 "main.rs": "fn main() {}",
11158 },
11159 "target": {
11160 "debug": {
11161 "foo.txt": "foo",
11162 "deps": {}
11163 }
11164 }
11165 }),
11166 )
11167 .await;
11168 fs.set_head_and_index_for_repo(
11169 path!("/root/.git").as_ref(),
11170 &[
11171 (".gitignore", "**/target/".into()),
11172 ("src/main.rs", "fn main() {}".into()),
11173 ],
11174 );
11175
11176 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11177 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11178 let project_events = Arc::new(Mutex::new(Vec::new()));
11179 project.update(cx, |project, cx| {
11180 let repository_updates = repository_updates.clone();
11181 cx.subscribe(project.git_store(), move |_, _, e, _| {
11182 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11183 repository_updates.lock().push(e.clone());
11184 }
11185 })
11186 .detach();
11187 let project_events = project_events.clone();
11188 cx.subscribe_self(move |_, e, _| {
11189 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11190 project_events.lock().extend(
11191 updates
11192 .iter()
11193 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11194 .filter(|(path, _)| path != "fs-event-sentinel"),
11195 );
11196 }
11197 })
11198 .detach();
11199 });
11200
11201 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11202 tree.update(cx, |tree, cx| {
11203 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11204 })
11205 .await
11206 .unwrap();
11207 tree.flush_fs_events(cx).await;
11208 project
11209 .update(cx, |project, cx| project.git_scans_complete(cx))
11210 .await;
11211 cx.run_until_parked();
11212 tree.update(cx, |tree, _| {
11213 assert_eq!(
11214 tree.entries(true, 0)
11215 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11216 .collect::<Vec<_>>(),
11217 vec![
11218 (rel_path(""), false),
11219 (rel_path(".gitignore"), false),
11220 (rel_path("src"), false),
11221 (rel_path("src/main.rs"), false),
11222 (rel_path("target"), true),
11223 (rel_path("target/debug"), true),
11224 (rel_path("target/debug/deps"), true),
11225 (rel_path("target/debug/foo.txt"), true),
11226 ]
11227 );
11228 });
11229
11230 assert_eq!(
11231 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11232 vec![
11233 RepositoryEvent::HeadChanged,
11234 RepositoryEvent::StatusesChanged,
11235 RepositoryEvent::StatusesChanged,
11236 ],
11237 "Initial worktree scan should produce a repo update event"
11238 );
11239 assert_eq!(
11240 project_events.lock().drain(..).collect::<Vec<_>>(),
11241 vec![
11242 ("target".to_string(), PathChange::Loaded),
11243 ("target/debug".to_string(), PathChange::Loaded),
11244 ("target/debug/deps".to_string(), PathChange::Loaded),
11245 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11246 ],
11247 "All non-ignored entries and all opened firs should be getting a project event",
11248 );
11249
11250 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11251 // This may happen multiple times during a single flycheck, but once is enough for testing.
11252 fs.emit_fs_event("/root/target/debug/deps", None);
11253 tree.flush_fs_events(cx).await;
11254 project
11255 .update(cx, |project, cx| project.git_scans_complete(cx))
11256 .await;
11257 cx.executor().run_until_parked();
11258
11259 assert_eq!(
11260 repository_updates
11261 .lock()
11262 .iter()
11263 .cloned()
11264 .collect::<Vec<_>>(),
11265 Vec::new(),
11266 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11267 );
11268 assert_eq!(
11269 project_events.lock().as_slice(),
11270 Vec::new(),
11271 "No further project events should happen, as only ignored dirs received FS events",
11272 );
11273}
11274
11275#[gpui::test]
11276async fn test_repos_in_invisible_worktrees(
11277 executor: BackgroundExecutor,
11278 cx: &mut gpui::TestAppContext,
11279) {
11280 init_test(cx);
11281 let fs = FakeFs::new(executor);
11282 fs.insert_tree(
11283 path!("/root"),
11284 json!({
11285 "dir1": {
11286 ".git": {},
11287 "dep1": {
11288 ".git": {},
11289 "src": {
11290 "a.txt": "",
11291 },
11292 },
11293 "b.txt": "",
11294 },
11295 }),
11296 )
11297 .await;
11298
11299 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11300 let _visible_worktree =
11301 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11302 project
11303 .update(cx, |project, cx| project.git_scans_complete(cx))
11304 .await;
11305
11306 let repos = project.read_with(cx, |project, cx| {
11307 project
11308 .repositories(cx)
11309 .values()
11310 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11311 .collect::<Vec<_>>()
11312 });
11313 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11314
11315 let (_invisible_worktree, _) = project
11316 .update(cx, |project, cx| {
11317 project.worktree_store().update(cx, |worktree_store, cx| {
11318 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11319 })
11320 })
11321 .await
11322 .expect("failed to create worktree");
11323 project
11324 .update(cx, |project, cx| project.git_scans_complete(cx))
11325 .await;
11326
11327 let repos = project.read_with(cx, |project, cx| {
11328 project
11329 .repositories(cx)
11330 .values()
11331 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11332 .collect::<Vec<_>>()
11333 });
11334 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11335}
11336
11337#[gpui::test(iterations = 10)]
11338async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11339 init_test(cx);
11340 cx.update(|cx| {
11341 cx.update_global::<SettingsStore, _>(|store, cx| {
11342 store.update_user_settings(cx, |settings| {
11343 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11344 });
11345 });
11346 });
11347 let fs = FakeFs::new(cx.background_executor.clone());
11348 fs.insert_tree(
11349 path!("/root"),
11350 json!({
11351 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11352 "tree": {
11353 ".git": {},
11354 ".gitignore": "ignored-dir\n",
11355 "tracked-dir": {
11356 "tracked-file1": "",
11357 "ancestor-ignored-file1": "",
11358 },
11359 "ignored-dir": {
11360 "ignored-file1": ""
11361 }
11362 }
11363 }),
11364 )
11365 .await;
11366 fs.set_head_and_index_for_repo(
11367 path!("/root/tree/.git").as_ref(),
11368 &[
11369 (".gitignore", "ignored-dir\n".into()),
11370 ("tracked-dir/tracked-file1", "".into()),
11371 ],
11372 );
11373
11374 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11375
11376 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11377 tree.flush_fs_events(cx).await;
11378 project
11379 .update(cx, |project, cx| project.git_scans_complete(cx))
11380 .await;
11381 cx.executor().run_until_parked();
11382
11383 let repository = project.read_with(cx, |project, cx| {
11384 project.repositories(cx).values().next().unwrap().clone()
11385 });
11386
11387 tree.read_with(cx, |tree, _| {
11388 tree.as_local()
11389 .unwrap()
11390 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11391 })
11392 .recv()
11393 .await;
11394
11395 cx.read(|cx| {
11396 assert_entry_git_state(
11397 tree.read(cx),
11398 repository.read(cx),
11399 "tracked-dir/tracked-file1",
11400 None,
11401 false,
11402 );
11403 assert_entry_git_state(
11404 tree.read(cx),
11405 repository.read(cx),
11406 "tracked-dir/ancestor-ignored-file1",
11407 None,
11408 false,
11409 );
11410 assert_entry_git_state(
11411 tree.read(cx),
11412 repository.read(cx),
11413 "ignored-dir/ignored-file1",
11414 None,
11415 true,
11416 );
11417 });
11418
11419 fs.create_file(
11420 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11421 Default::default(),
11422 )
11423 .await
11424 .unwrap();
11425 fs.set_index_for_repo(
11426 path!("/root/tree/.git").as_ref(),
11427 &[
11428 (".gitignore", "ignored-dir\n".into()),
11429 ("tracked-dir/tracked-file1", "".into()),
11430 ("tracked-dir/tracked-file2", "".into()),
11431 ],
11432 );
11433 fs.create_file(
11434 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11435 Default::default(),
11436 )
11437 .await
11438 .unwrap();
11439 fs.create_file(
11440 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11441 Default::default(),
11442 )
11443 .await
11444 .unwrap();
11445
11446 cx.executor().run_until_parked();
11447 cx.read(|cx| {
11448 assert_entry_git_state(
11449 tree.read(cx),
11450 repository.read(cx),
11451 "tracked-dir/tracked-file2",
11452 Some(StatusCode::Added),
11453 false,
11454 );
11455 assert_entry_git_state(
11456 tree.read(cx),
11457 repository.read(cx),
11458 "tracked-dir/ancestor-ignored-file2",
11459 None,
11460 false,
11461 );
11462 assert_entry_git_state(
11463 tree.read(cx),
11464 repository.read(cx),
11465 "ignored-dir/ignored-file2",
11466 None,
11467 true,
11468 );
11469 assert!(
11470 tree.read(cx)
11471 .entry_for_path(&rel_path(".git"))
11472 .unwrap()
11473 .is_ignored
11474 );
11475 });
11476}
11477
11478#[gpui::test]
11479async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11480 init_test(cx);
11481
11482 let fs = FakeFs::new(cx.executor());
11483 fs.insert_tree(
11484 path!("/project"),
11485 json!({
11486 ".git": {
11487 "worktrees": {
11488 "some-worktree": {
11489 "commondir": "../..\n",
11490 // For is_git_dir
11491 "HEAD": "",
11492 "config": ""
11493 }
11494 },
11495 "modules": {
11496 "subdir": {
11497 "some-submodule": {
11498 // For is_git_dir
11499 "HEAD": "",
11500 "config": "",
11501 }
11502 }
11503 }
11504 },
11505 "src": {
11506 "a.txt": "A",
11507 },
11508 "some-worktree": {
11509 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11510 "src": {
11511 "b.txt": "B",
11512 }
11513 },
11514 "subdir": {
11515 "some-submodule": {
11516 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11517 "c.txt": "C",
11518 }
11519 }
11520 }),
11521 )
11522 .await;
11523
11524 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11525 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11526 scan_complete.await;
11527
11528 let mut repositories = project.update(cx, |project, cx| {
11529 project
11530 .repositories(cx)
11531 .values()
11532 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11533 .collect::<Vec<_>>()
11534 });
11535 repositories.sort();
11536 pretty_assertions::assert_eq!(
11537 repositories,
11538 [
11539 Path::new(path!("/project")).into(),
11540 Path::new(path!("/project/some-worktree")).into(),
11541 Path::new(path!("/project/subdir/some-submodule")).into(),
11542 ]
11543 );
11544
11545 // Generate a git-related event for the worktree and check that it's refreshed.
11546 fs.with_git_state(
11547 path!("/project/some-worktree/.git").as_ref(),
11548 true,
11549 |state| {
11550 state
11551 .head_contents
11552 .insert(repo_path("src/b.txt"), "b".to_owned());
11553 state
11554 .index_contents
11555 .insert(repo_path("src/b.txt"), "b".to_owned());
11556 },
11557 )
11558 .unwrap();
11559 cx.run_until_parked();
11560
11561 let buffer = project
11562 .update(cx, |project, cx| {
11563 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11564 })
11565 .await
11566 .unwrap();
11567 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11568 let (repo, _) = project
11569 .git_store()
11570 .read(cx)
11571 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11572 .unwrap();
11573 pretty_assertions::assert_eq!(
11574 repo.read(cx).work_directory_abs_path,
11575 Path::new(path!("/project/some-worktree")).into(),
11576 );
11577 pretty_assertions::assert_eq!(
11578 repo.read(cx).original_repo_abs_path,
11579 Path::new(path!("/project")).into(),
11580 );
11581 assert!(
11582 repo.read(cx).linked_worktree_path().is_some(),
11583 "linked worktree should be detected as a linked worktree"
11584 );
11585 let barrier = repo.update(cx, |repo, _| repo.barrier());
11586 (repo.clone(), barrier)
11587 });
11588 barrier.await.unwrap();
11589 worktree_repo.update(cx, |repo, _| {
11590 pretty_assertions::assert_eq!(
11591 repo.status_for_path(&repo_path("src/b.txt"))
11592 .unwrap()
11593 .status,
11594 StatusCode::Modified.worktree(),
11595 );
11596 });
11597
11598 // The same for the submodule.
11599 fs.with_git_state(
11600 path!("/project/subdir/some-submodule/.git").as_ref(),
11601 true,
11602 |state| {
11603 state
11604 .head_contents
11605 .insert(repo_path("c.txt"), "c".to_owned());
11606 state
11607 .index_contents
11608 .insert(repo_path("c.txt"), "c".to_owned());
11609 },
11610 )
11611 .unwrap();
11612 cx.run_until_parked();
11613
11614 let buffer = project
11615 .update(cx, |project, cx| {
11616 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11617 })
11618 .await
11619 .unwrap();
11620 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11621 let (repo, _) = project
11622 .git_store()
11623 .read(cx)
11624 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11625 .unwrap();
11626 pretty_assertions::assert_eq!(
11627 repo.read(cx).work_directory_abs_path,
11628 Path::new(path!("/project/subdir/some-submodule")).into(),
11629 );
11630 pretty_assertions::assert_eq!(
11631 repo.read(cx).original_repo_abs_path,
11632 Path::new(path!("/project/subdir/some-submodule")).into(),
11633 );
11634 assert!(
11635 repo.read(cx).linked_worktree_path().is_none(),
11636 "submodule should not be detected as a linked worktree"
11637 );
11638 let barrier = repo.update(cx, |repo, _| repo.barrier());
11639 (repo.clone(), barrier)
11640 });
11641 barrier.await.unwrap();
11642 submodule_repo.update(cx, |repo, _| {
11643 pretty_assertions::assert_eq!(
11644 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11645 StatusCode::Modified.worktree(),
11646 );
11647 });
11648}
11649
11650#[gpui::test]
11651async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11652 init_test(cx);
11653 let fs = FakeFs::new(cx.background_executor.clone());
11654 fs.insert_tree(
11655 path!("/root"),
11656 json!({
11657 "project": {
11658 ".git": {},
11659 "child1": {
11660 "a.txt": "A",
11661 },
11662 "child2": {
11663 "b.txt": "B",
11664 }
11665 }
11666 }),
11667 )
11668 .await;
11669
11670 let project = Project::test(
11671 fs.clone(),
11672 [
11673 path!("/root/project/child1").as_ref(),
11674 path!("/root/project/child2").as_ref(),
11675 ],
11676 cx,
11677 )
11678 .await;
11679
11680 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11681 tree.flush_fs_events(cx).await;
11682 project
11683 .update(cx, |project, cx| project.git_scans_complete(cx))
11684 .await;
11685 cx.executor().run_until_parked();
11686
11687 let repos = project.read_with(cx, |project, cx| {
11688 project
11689 .repositories(cx)
11690 .values()
11691 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11692 .collect::<Vec<_>>()
11693 });
11694 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11695}
11696
11697#[gpui::test]
11698async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11699 init_test(cx);
11700
11701 let file_1_committed = String::from(r#"file_1_committed"#);
11702 let file_1_staged = String::from(r#"file_1_staged"#);
11703 let file_2_committed = String::from(r#"file_2_committed"#);
11704 let file_2_staged = String::from(r#"file_2_staged"#);
11705 let buffer_contents = String::from(r#"buffer"#);
11706
11707 let fs = FakeFs::new(cx.background_executor.clone());
11708 fs.insert_tree(
11709 path!("/dir"),
11710 json!({
11711 ".git": {},
11712 "src": {
11713 "file_1.rs": file_1_committed.clone(),
11714 "file_2.rs": file_2_committed.clone(),
11715 }
11716 }),
11717 )
11718 .await;
11719
11720 fs.set_head_for_repo(
11721 path!("/dir/.git").as_ref(),
11722 &[
11723 ("src/file_1.rs", file_1_committed.clone()),
11724 ("src/file_2.rs", file_2_committed.clone()),
11725 ],
11726 "deadbeef",
11727 );
11728 fs.set_index_for_repo(
11729 path!("/dir/.git").as_ref(),
11730 &[
11731 ("src/file_1.rs", file_1_staged.clone()),
11732 ("src/file_2.rs", file_2_staged.clone()),
11733 ],
11734 );
11735
11736 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11737
11738 let buffer = project
11739 .update(cx, |project, cx| {
11740 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11741 })
11742 .await
11743 .unwrap();
11744
11745 buffer.update(cx, |buffer, cx| {
11746 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11747 });
11748
11749 let unstaged_diff = project
11750 .update(cx, |project, cx| {
11751 project.open_unstaged_diff(buffer.clone(), cx)
11752 })
11753 .await
11754 .unwrap();
11755
11756 cx.run_until_parked();
11757
11758 unstaged_diff.update(cx, |unstaged_diff, cx| {
11759 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11760 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11761 });
11762
11763 // Save the buffer as `file_2.rs`, which should trigger the
11764 // `BufferChangedFilePath` event.
11765 project
11766 .update(cx, |project, cx| {
11767 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11768 let path = ProjectPath {
11769 worktree_id,
11770 path: rel_path("src/file_2.rs").into(),
11771 };
11772 project.save_buffer_as(buffer.clone(), path, cx)
11773 })
11774 .await
11775 .unwrap();
11776
11777 cx.run_until_parked();
11778
11779 // Verify that the diff bases have been updated to file_2's contents due to
11780 // the `BufferChangedFilePath` event being handled.
11781 unstaged_diff.update(cx, |unstaged_diff, cx| {
11782 let snapshot = buffer.read(cx).snapshot();
11783 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11784 assert_eq!(
11785 base_text, file_2_staged,
11786 "Diff bases should be automatically updated to file_2 staged content"
11787 );
11788
11789 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11790 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11791 });
11792
11793 let uncommitted_diff = project
11794 .update(cx, |project, cx| {
11795 project.open_uncommitted_diff(buffer.clone(), cx)
11796 })
11797 .await
11798 .unwrap();
11799
11800 cx.run_until_parked();
11801
11802 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11803 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11804 assert_eq!(
11805 base_text, file_2_committed,
11806 "Uncommitted diff should compare against file_2 committed content"
11807 );
11808 });
11809}
11810
11811async fn search(
11812 project: &Entity<Project>,
11813 query: SearchQuery,
11814 cx: &mut gpui::TestAppContext,
11815) -> Result<HashMap<String, Vec<Range<usize>>>> {
11816 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11817 let mut results = HashMap::default();
11818 while let Ok(search_result) = search_rx.rx.recv().await {
11819 match search_result {
11820 SearchResult::Buffer { buffer, ranges } => {
11821 results.entry(buffer).or_insert(ranges);
11822 }
11823 SearchResult::LimitReached => {}
11824 }
11825 }
11826 Ok(results
11827 .into_iter()
11828 .map(|(buffer, ranges)| {
11829 buffer.update(cx, |buffer, cx| {
11830 let path = buffer
11831 .file()
11832 .unwrap()
11833 .full_path(cx)
11834 .to_string_lossy()
11835 .to_string();
11836 let ranges = ranges
11837 .into_iter()
11838 .map(|range| range.to_offset(buffer))
11839 .collect::<Vec<_>>();
11840 (path, ranges)
11841 })
11842 })
11843 .collect())
11844}
11845
11846#[gpui::test]
11847async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11848 init_test(cx);
11849
11850 let fs = FakeFs::new(cx.executor());
11851
11852 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11853 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11854 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11855 fs.insert_tree(path!("/dir"), json!({})).await;
11856 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11857
11858 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11859
11860 let buffer = project
11861 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11862 .await
11863 .unwrap();
11864
11865 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11866 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11867 });
11868 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11869 assert_eq!(initial_text, "Hi");
11870 assert!(!initial_dirty);
11871
11872 let reload_receiver = buffer.update(cx, |buffer, cx| {
11873 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11874 });
11875 cx.executor().run_until_parked();
11876
11877 // Wait for reload to complete
11878 let _ = reload_receiver.await;
11879
11880 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11881 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11882 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11883 });
11884 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11885 assert_eq!(reloaded_text, "楈");
11886 assert!(!reloaded_dirty);
11887
11888 // Undo the reload
11889 buffer.update(cx, |buffer, cx| {
11890 buffer.undo(cx);
11891 });
11892
11893 buffer.read_with(cx, |buffer, _| {
11894 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11895 assert_eq!(buffer.text(), "Hi");
11896 assert!(!buffer.is_dirty());
11897 });
11898
11899 buffer.update(cx, |buffer, cx| {
11900 buffer.redo(cx);
11901 });
11902
11903 buffer.read_with(cx, |buffer, _| {
11904 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11905 assert_ne!(buffer.text(), "Hi");
11906 assert!(!buffer.is_dirty());
11907 });
11908}
11909
11910#[gpui::test]
11911async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11912 init_test(cx);
11913
11914 let fs = FakeFs::new(cx.executor());
11915 fs.insert_tree(
11916 path!("/root"),
11917 json!({
11918 "a": {
11919 ".git": {},
11920 ".zed": {
11921 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11922 },
11923 "src": { "main.rs": "" }
11924 },
11925 "b": {
11926 ".git": {},
11927 ".zed": {
11928 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11929 },
11930 "src": { "lib.rs": "" }
11931 },
11932 }),
11933 )
11934 .await;
11935
11936 let repos_created = Rc::new(RefCell::new(Vec::new()));
11937 let _observe = {
11938 let repos_created = repos_created.clone();
11939 cx.update(|cx| {
11940 cx.observe_new::<Repository>(move |repo, _, cx| {
11941 repos_created.borrow_mut().push(cx.entity().downgrade());
11942 let _ = repo;
11943 })
11944 })
11945 };
11946
11947 let project = Project::test(
11948 fs.clone(),
11949 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11950 cx,
11951 )
11952 .await;
11953
11954 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11955 scan_complete.await;
11956
11957 project.read_with(cx, |project, cx| {
11958 assert!(
11959 project.worktree_store().read(cx).initial_scan_completed(),
11960 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11961 );
11962 });
11963
11964 let created_repos_len = repos_created.borrow().len();
11965 assert_eq!(
11966 created_repos_len, 2,
11967 "Expected 2 repositories to be created during scan, got {}",
11968 created_repos_len
11969 );
11970
11971 project.read_with(cx, |project, cx| {
11972 let git_store = project.git_store().read(cx);
11973 assert_eq!(
11974 git_store.repositories().len(),
11975 2,
11976 "Expected 2 repositories in GitStore"
11977 );
11978 });
11979}
11980
11981pub fn init_test(cx: &mut gpui::TestAppContext) {
11982 zlog::init_test();
11983
11984 cx.update(|cx| {
11985 let settings_store = SettingsStore::test(cx);
11986 cx.set_global(settings_store);
11987 release_channel::init(semver::Version::new(0, 0, 0), cx);
11988 });
11989}
11990
11991fn json_lang() -> Arc<Language> {
11992 Arc::new(Language::new(
11993 LanguageConfig {
11994 name: "JSON".into(),
11995 matcher: LanguageMatcher {
11996 path_suffixes: vec!["json".to_string()],
11997 ..Default::default()
11998 },
11999 ..Default::default()
12000 },
12001 None,
12002 ))
12003}
12004
12005fn js_lang() -> Arc<Language> {
12006 Arc::new(Language::new(
12007 LanguageConfig {
12008 name: "JavaScript".into(),
12009 matcher: LanguageMatcher {
12010 path_suffixes: vec!["js".to_string()],
12011 ..Default::default()
12012 },
12013 ..Default::default()
12014 },
12015 None,
12016 ))
12017}
12018
12019fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
12020 struct PythonMootToolchainLister(Arc<FakeFs>);
12021 #[async_trait]
12022 impl ToolchainLister for PythonMootToolchainLister {
12023 async fn list(
12024 &self,
12025 worktree_root: PathBuf,
12026 subroot_relative_path: Arc<RelPath>,
12027 _: Option<HashMap<String, String>>,
12028 ) -> ToolchainList {
12029 // This lister will always return a path .venv directories within ancestors
12030 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
12031 let mut toolchains = vec![];
12032 for ancestor in ancestors {
12033 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
12034 if self.0.is_dir(&venv_path).await {
12035 toolchains.push(Toolchain {
12036 name: SharedString::new_static("Python Venv"),
12037 path: venv_path.to_string_lossy().into_owned().into(),
12038 language_name: LanguageName(SharedString::new_static("Python")),
12039 as_json: serde_json::Value::Null,
12040 })
12041 }
12042 }
12043 ToolchainList {
12044 toolchains,
12045 ..Default::default()
12046 }
12047 }
12048 async fn resolve(
12049 &self,
12050 _: PathBuf,
12051 _: Option<HashMap<String, String>>,
12052 ) -> anyhow::Result<Toolchain> {
12053 Err(anyhow::anyhow!("Not implemented"))
12054 }
12055 fn meta(&self) -> ToolchainMetadata {
12056 ToolchainMetadata {
12057 term: SharedString::new_static("Virtual Environment"),
12058 new_toolchain_placeholder: SharedString::new_static(
12059 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12060 ),
12061 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12062 }
12063 }
12064 fn activation_script(
12065 &self,
12066 _: &Toolchain,
12067 _: ShellKind,
12068 _: &gpui::App,
12069 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12070 Box::pin(async { vec![] })
12071 }
12072 }
12073 Arc::new(
12074 Language::new(
12075 LanguageConfig {
12076 name: "Python".into(),
12077 matcher: LanguageMatcher {
12078 path_suffixes: vec!["py".to_string()],
12079 ..Default::default()
12080 },
12081 ..Default::default()
12082 },
12083 None, // We're not testing Python parsing with this language.
12084 )
12085 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12086 "pyproject.toml",
12087 ))))
12088 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12089 )
12090}
12091
12092fn typescript_lang() -> Arc<Language> {
12093 Arc::new(Language::new(
12094 LanguageConfig {
12095 name: "TypeScript".into(),
12096 matcher: LanguageMatcher {
12097 path_suffixes: vec!["ts".to_string()],
12098 ..Default::default()
12099 },
12100 ..Default::default()
12101 },
12102 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12103 ))
12104}
12105
12106fn tsx_lang() -> Arc<Language> {
12107 Arc::new(Language::new(
12108 LanguageConfig {
12109 name: "tsx".into(),
12110 matcher: LanguageMatcher {
12111 path_suffixes: vec!["tsx".to_string()],
12112 ..Default::default()
12113 },
12114 ..Default::default()
12115 },
12116 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12117 ))
12118}
12119
12120fn get_all_tasks(
12121 project: &Entity<Project>,
12122 task_contexts: Arc<TaskContexts>,
12123 cx: &mut App,
12124) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12125 let new_tasks = project.update(cx, |project, cx| {
12126 project.task_store().update(cx, |task_store, cx| {
12127 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12128 this.used_and_current_resolved_tasks(task_contexts, cx)
12129 })
12130 })
12131 });
12132
12133 cx.background_spawn(async move {
12134 let (mut old, new) = new_tasks.await;
12135 old.extend(new);
12136 old
12137 })
12138}
12139
12140#[track_caller]
12141fn assert_entry_git_state(
12142 tree: &Worktree,
12143 repository: &Repository,
12144 path: &str,
12145 index_status: Option<StatusCode>,
12146 is_ignored: bool,
12147) {
12148 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12149 let entry = tree
12150 .entry_for_path(&rel_path(path))
12151 .unwrap_or_else(|| panic!("entry {path} not found"));
12152 let status = repository
12153 .status_for_path(&repo_path(path))
12154 .map(|entry| entry.status);
12155 let expected = index_status.map(|index_status| {
12156 TrackedStatus {
12157 index_status,
12158 worktree_status: StatusCode::Unmodified,
12159 }
12160 .into()
12161 });
12162 assert_eq!(
12163 status, expected,
12164 "expected {path} to have git status: {expected:?}"
12165 );
12166 assert_eq!(
12167 entry.is_ignored, is_ignored,
12168 "expected {path} to have is_ignored: {is_ignored}"
12169 );
12170}
12171
12172#[track_caller]
12173fn git_init(path: &Path) -> git2::Repository {
12174 let mut init_opts = RepositoryInitOptions::new();
12175 init_opts.initial_head("main");
12176 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12177}
12178
12179#[track_caller]
12180fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12181 let path = path.as_ref();
12182 let mut index = repo.index().expect("Failed to get index");
12183 index.add_path(path).expect("Failed to add file");
12184 index.write().expect("Failed to write index");
12185}
12186
12187#[track_caller]
12188fn git_remove_index(path: &Path, repo: &git2::Repository) {
12189 let mut index = repo.index().expect("Failed to get index");
12190 index.remove_path(path).expect("Failed to add file");
12191 index.write().expect("Failed to write index");
12192}
12193
12194#[track_caller]
12195fn git_commit(msg: &'static str, repo: &git2::Repository) {
12196 use git2::Signature;
12197
12198 let signature = Signature::now("test", "test@zed.dev").unwrap();
12199 let oid = repo.index().unwrap().write_tree().unwrap();
12200 let tree = repo.find_tree(oid).unwrap();
12201 if let Ok(head) = repo.head() {
12202 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12203
12204 let parent_commit = parent_obj.as_commit().unwrap();
12205
12206 repo.commit(
12207 Some("HEAD"),
12208 &signature,
12209 &signature,
12210 msg,
12211 &tree,
12212 &[parent_commit],
12213 )
12214 .expect("Failed to commit with parent");
12215 } else {
12216 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12217 .expect("Failed to commit");
12218 }
12219}
12220
12221#[cfg(any())]
12222#[track_caller]
12223fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12224 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12225}
12226
12227#[track_caller]
12228fn git_stash(repo: &mut git2::Repository) {
12229 use git2::Signature;
12230
12231 let signature = Signature::now("test", "test@zed.dev").unwrap();
12232 repo.stash_save(&signature, "N/A", None)
12233 .expect("Failed to stash");
12234}
12235
12236#[track_caller]
12237fn git_reset(offset: usize, repo: &git2::Repository) {
12238 let head = repo.head().expect("Couldn't get repo head");
12239 let object = head.peel(git2::ObjectType::Commit).unwrap();
12240 let commit = object.as_commit().unwrap();
12241 let new_head = commit
12242 .parents()
12243 .inspect(|parnet| {
12244 parnet.message();
12245 })
12246 .nth(offset)
12247 .expect("Not enough history");
12248 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12249 .expect("Could not reset");
12250}
12251
12252#[cfg(any())]
12253#[track_caller]
12254fn git_branch(name: &str, repo: &git2::Repository) {
12255 let head = repo
12256 .head()
12257 .expect("Couldn't get repo head")
12258 .peel_to_commit()
12259 .expect("HEAD is not a commit");
12260 repo.branch(name, &head, false).expect("Failed to commit");
12261}
12262
12263#[cfg(any())]
12264#[track_caller]
12265fn git_checkout(name: &str, repo: &git2::Repository) {
12266 repo.set_head(name).expect("Failed to set head");
12267 repo.checkout_head(None).expect("Failed to check out head");
12268}
12269
12270#[cfg(any())]
12271#[track_caller]
12272fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12273 repo.statuses(None)
12274 .unwrap()
12275 .iter()
12276 .map(|status| (status.path().unwrap().to_string(), status.status()))
12277 .collect()
12278}
12279
12280#[gpui::test]
12281async fn test_find_project_path_abs(
12282 background_executor: BackgroundExecutor,
12283 cx: &mut gpui::TestAppContext,
12284) {
12285 // find_project_path should work with absolute paths
12286 init_test(cx);
12287
12288 let fs = FakeFs::new(background_executor);
12289 fs.insert_tree(
12290 path!("/root"),
12291 json!({
12292 "project1": {
12293 "file1.txt": "content1",
12294 "subdir": {
12295 "file2.txt": "content2"
12296 }
12297 },
12298 "project2": {
12299 "file3.txt": "content3"
12300 }
12301 }),
12302 )
12303 .await;
12304
12305 let project = Project::test(
12306 fs.clone(),
12307 [
12308 path!("/root/project1").as_ref(),
12309 path!("/root/project2").as_ref(),
12310 ],
12311 cx,
12312 )
12313 .await;
12314
12315 // Make sure the worktrees are fully initialized
12316 project
12317 .update(cx, |project, cx| project.git_scans_complete(cx))
12318 .await;
12319 cx.run_until_parked();
12320
12321 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12322 project.read_with(cx, |project, cx| {
12323 let worktrees: Vec<_> = project.worktrees(cx).collect();
12324 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12325 let id1 = worktrees[0].read(cx).id();
12326 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12327 let id2 = worktrees[1].read(cx).id();
12328 (abs_path1, id1, abs_path2, id2)
12329 });
12330
12331 project.update(cx, |project, cx| {
12332 let abs_path = project1_abs_path.join("file1.txt");
12333 let found_path = project.find_project_path(abs_path, cx).unwrap();
12334 assert_eq!(found_path.worktree_id, project1_id);
12335 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12336
12337 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12338 let found_path = project.find_project_path(abs_path, cx).unwrap();
12339 assert_eq!(found_path.worktree_id, project1_id);
12340 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12341
12342 let abs_path = project2_abs_path.join("file3.txt");
12343 let found_path = project.find_project_path(abs_path, cx).unwrap();
12344 assert_eq!(found_path.worktree_id, project2_id);
12345 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12346
12347 let abs_path = project1_abs_path.join("nonexistent.txt");
12348 let found_path = project.find_project_path(abs_path, cx);
12349 assert!(
12350 found_path.is_some(),
12351 "Should find project path for nonexistent file in worktree"
12352 );
12353
12354 // Test with an absolute path outside any worktree
12355 let abs_path = Path::new("/some/other/path");
12356 let found_path = project.find_project_path(abs_path, cx);
12357 assert!(
12358 found_path.is_none(),
12359 "Should not find project path for path outside any worktree"
12360 );
12361 });
12362}
12363
12364#[gpui::test]
12365async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12366 init_test(cx);
12367
12368 let fs = FakeFs::new(cx.executor());
12369 fs.insert_tree(
12370 path!("/root"),
12371 json!({
12372 "a": {
12373 ".git": {},
12374 "src": {
12375 "main.rs": "fn main() {}",
12376 }
12377 },
12378 "b": {
12379 ".git": {},
12380 "src": {
12381 "main.rs": "fn main() {}",
12382 },
12383 "script": {
12384 "run.sh": "#!/bin/bash"
12385 }
12386 }
12387 }),
12388 )
12389 .await;
12390
12391 let project = Project::test(
12392 fs.clone(),
12393 [
12394 path!("/root/a").as_ref(),
12395 path!("/root/b/script").as_ref(),
12396 path!("/root/b").as_ref(),
12397 ],
12398 cx,
12399 )
12400 .await;
12401 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12402 scan_complete.await;
12403
12404 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12405 assert_eq!(worktrees.len(), 3);
12406
12407 let worktree_id_by_abs_path = worktrees
12408 .into_iter()
12409 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12410 .collect::<HashMap<_, _>>();
12411 let worktree_id = worktree_id_by_abs_path
12412 .get(Path::new(path!("/root/b/script")))
12413 .unwrap();
12414
12415 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12416 assert_eq!(repos.len(), 2);
12417
12418 project.update(cx, |project, cx| {
12419 project.remove_worktree(*worktree_id, cx);
12420 });
12421 cx.run_until_parked();
12422
12423 let mut repo_paths = project
12424 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12425 .values()
12426 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12427 .collect::<Vec<_>>();
12428 repo_paths.sort();
12429
12430 pretty_assertions::assert_eq!(
12431 repo_paths,
12432 [
12433 Path::new(path!("/root/a")).into(),
12434 Path::new(path!("/root/b")).into(),
12435 ]
12436 );
12437
12438 let active_repo_path = project
12439 .read_with(cx, |p, cx| {
12440 p.active_repository(cx)
12441 .map(|r| r.read(cx).work_directory_abs_path.clone())
12442 })
12443 .unwrap();
12444 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12445
12446 let worktree_id = worktree_id_by_abs_path
12447 .get(Path::new(path!("/root/a")))
12448 .unwrap();
12449 project.update(cx, |project, cx| {
12450 project.remove_worktree(*worktree_id, cx);
12451 });
12452 cx.run_until_parked();
12453
12454 let active_repo_path = project
12455 .read_with(cx, |p, cx| {
12456 p.active_repository(cx)
12457 .map(|r| r.read(cx).work_directory_abs_path.clone())
12458 })
12459 .unwrap();
12460 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12461
12462 let worktree_id = worktree_id_by_abs_path
12463 .get(Path::new(path!("/root/b")))
12464 .unwrap();
12465 project.update(cx, |project, cx| {
12466 project.remove_worktree(*worktree_id, cx);
12467 });
12468 cx.run_until_parked();
12469
12470 let active_repo_path = project.read_with(cx, |p, cx| {
12471 p.active_repository(cx)
12472 .map(|r| r.read(cx).work_directory_abs_path.clone())
12473 });
12474 assert!(active_repo_path.is_none());
12475}
12476
12477#[gpui::test]
12478async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12479 use DiffHunkSecondaryStatus::*;
12480 init_test(cx);
12481
12482 let committed_contents = r#"
12483 one
12484 two
12485 three
12486 "#
12487 .unindent();
12488 let file_contents = r#"
12489 one
12490 TWO
12491 three
12492 "#
12493 .unindent();
12494
12495 let fs = FakeFs::new(cx.background_executor.clone());
12496 fs.insert_tree(
12497 path!("/dir"),
12498 json!({
12499 ".git": {},
12500 "file.txt": file_contents.clone()
12501 }),
12502 )
12503 .await;
12504
12505 fs.set_head_and_index_for_repo(
12506 path!("/dir/.git").as_ref(),
12507 &[("file.txt", committed_contents.clone())],
12508 );
12509
12510 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12511
12512 let buffer = project
12513 .update(cx, |project, cx| {
12514 project.open_local_buffer(path!("/dir/file.txt"), cx)
12515 })
12516 .await
12517 .unwrap();
12518 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12519 let uncommitted_diff = project
12520 .update(cx, |project, cx| {
12521 project.open_uncommitted_diff(buffer.clone(), cx)
12522 })
12523 .await
12524 .unwrap();
12525
12526 // The hunk is initially unstaged.
12527 uncommitted_diff.read_with(cx, |diff, cx| {
12528 assert_hunks(
12529 diff.snapshot(cx).hunks(&snapshot),
12530 &snapshot,
12531 &diff.base_text_string(cx).unwrap(),
12532 &[(
12533 1..2,
12534 "two\n",
12535 "TWO\n",
12536 DiffHunkStatus::modified(HasSecondaryHunk),
12537 )],
12538 );
12539 });
12540
12541 // Get the repository handle.
12542 let repo = project.read_with(cx, |project, cx| {
12543 project.repositories(cx).values().next().unwrap().clone()
12544 });
12545
12546 // Stage the file.
12547 let stage_task = repo.update(cx, |repo, cx| {
12548 repo.stage_entries(vec![repo_path("file.txt")], cx)
12549 });
12550
12551 // Run a few ticks to let the job start and mark hunks as pending,
12552 // but don't run_until_parked which would complete the entire operation.
12553 for _ in 0..10 {
12554 cx.executor().tick();
12555 let [hunk]: [_; 1] = uncommitted_diff
12556 .read_with(cx, |diff, cx| {
12557 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12558 })
12559 .try_into()
12560 .unwrap();
12561 match hunk.secondary_status {
12562 HasSecondaryHunk => {}
12563 SecondaryHunkRemovalPending => break,
12564 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12565 _ => panic!("unexpected hunk state"),
12566 }
12567 }
12568 uncommitted_diff.read_with(cx, |diff, cx| {
12569 assert_hunks(
12570 diff.snapshot(cx).hunks(&snapshot),
12571 &snapshot,
12572 &diff.base_text_string(cx).unwrap(),
12573 &[(
12574 1..2,
12575 "two\n",
12576 "TWO\n",
12577 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12578 )],
12579 );
12580 });
12581
12582 // Let the staging complete.
12583 stage_task.await.unwrap();
12584 cx.run_until_parked();
12585
12586 // The hunk is now fully staged.
12587 uncommitted_diff.read_with(cx, |diff, cx| {
12588 assert_hunks(
12589 diff.snapshot(cx).hunks(&snapshot),
12590 &snapshot,
12591 &diff.base_text_string(cx).unwrap(),
12592 &[(
12593 1..2,
12594 "two\n",
12595 "TWO\n",
12596 DiffHunkStatus::modified(NoSecondaryHunk),
12597 )],
12598 );
12599 });
12600
12601 // Simulate a commit by updating HEAD to match the current file contents.
12602 // The FakeGitRepository's commit method is a no-op, so we need to manually
12603 // update HEAD to simulate the commit completing.
12604 fs.set_head_for_repo(
12605 path!("/dir/.git").as_ref(),
12606 &[("file.txt", file_contents.clone())],
12607 "newhead",
12608 );
12609 cx.run_until_parked();
12610
12611 // After committing, there are no more hunks.
12612 uncommitted_diff.read_with(cx, |diff, cx| {
12613 assert_hunks(
12614 diff.snapshot(cx).hunks(&snapshot),
12615 &snapshot,
12616 &diff.base_text_string(cx).unwrap(),
12617 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12618 );
12619 });
12620}
12621
12622#[gpui::test]
12623async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12624 init_test(cx);
12625
12626 // Configure read_only_files setting
12627 cx.update(|cx| {
12628 cx.update_global::<SettingsStore, _>(|store, cx| {
12629 store.update_user_settings(cx, |settings| {
12630 settings.project.worktree.read_only_files = Some(vec![
12631 "**/generated/**".to_string(),
12632 "**/*.gen.rs".to_string(),
12633 ]);
12634 });
12635 });
12636 });
12637
12638 let fs = FakeFs::new(cx.background_executor.clone());
12639 fs.insert_tree(
12640 path!("/root"),
12641 json!({
12642 "src": {
12643 "main.rs": "fn main() {}",
12644 "types.gen.rs": "// Generated file",
12645 },
12646 "generated": {
12647 "schema.rs": "// Auto-generated schema",
12648 }
12649 }),
12650 )
12651 .await;
12652
12653 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12654
12655 // Open a regular file - should be read-write
12656 let regular_buffer = project
12657 .update(cx, |project, cx| {
12658 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12659 })
12660 .await
12661 .unwrap();
12662
12663 regular_buffer.read_with(cx, |buffer, _| {
12664 assert!(!buffer.read_only(), "Regular file should not be read-only");
12665 });
12666
12667 // Open a file matching *.gen.rs pattern - should be read-only
12668 let gen_buffer = project
12669 .update(cx, |project, cx| {
12670 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12671 })
12672 .await
12673 .unwrap();
12674
12675 gen_buffer.read_with(cx, |buffer, _| {
12676 assert!(
12677 buffer.read_only(),
12678 "File matching *.gen.rs pattern should be read-only"
12679 );
12680 });
12681
12682 // Open a file in generated directory - should be read-only
12683 let generated_buffer = project
12684 .update(cx, |project, cx| {
12685 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12686 })
12687 .await
12688 .unwrap();
12689
12690 generated_buffer.read_with(cx, |buffer, _| {
12691 assert!(
12692 buffer.read_only(),
12693 "File in generated directory should be read-only"
12694 );
12695 });
12696}
12697
12698#[gpui::test]
12699async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12700 init_test(cx);
12701
12702 // Explicitly set read_only_files to empty (default behavior)
12703 cx.update(|cx| {
12704 cx.update_global::<SettingsStore, _>(|store, cx| {
12705 store.update_user_settings(cx, |settings| {
12706 settings.project.worktree.read_only_files = Some(vec![]);
12707 });
12708 });
12709 });
12710
12711 let fs = FakeFs::new(cx.background_executor.clone());
12712 fs.insert_tree(
12713 path!("/root"),
12714 json!({
12715 "src": {
12716 "main.rs": "fn main() {}",
12717 },
12718 "generated": {
12719 "schema.rs": "// Auto-generated schema",
12720 }
12721 }),
12722 )
12723 .await;
12724
12725 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12726
12727 // All files should be read-write when read_only_files is empty
12728 let main_buffer = project
12729 .update(cx, |project, cx| {
12730 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12731 })
12732 .await
12733 .unwrap();
12734
12735 main_buffer.read_with(cx, |buffer, _| {
12736 assert!(
12737 !buffer.read_only(),
12738 "Files should not be read-only when read_only_files is empty"
12739 );
12740 });
12741
12742 let generated_buffer = project
12743 .update(cx, |project, cx| {
12744 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12745 })
12746 .await
12747 .unwrap();
12748
12749 generated_buffer.read_with(cx, |buffer, _| {
12750 assert!(
12751 !buffer.read_only(),
12752 "Generated files should not be read-only when read_only_files is empty"
12753 );
12754 });
12755}
12756
12757#[gpui::test]
12758async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12759 init_test(cx);
12760
12761 // Configure to make lock files read-only
12762 cx.update(|cx| {
12763 cx.update_global::<SettingsStore, _>(|store, cx| {
12764 store.update_user_settings(cx, |settings| {
12765 settings.project.worktree.read_only_files = Some(vec![
12766 "**/*.lock".to_string(),
12767 "**/package-lock.json".to_string(),
12768 ]);
12769 });
12770 });
12771 });
12772
12773 let fs = FakeFs::new(cx.background_executor.clone());
12774 fs.insert_tree(
12775 path!("/root"),
12776 json!({
12777 "Cargo.lock": "# Lock file",
12778 "Cargo.toml": "[package]",
12779 "package-lock.json": "{}",
12780 "package.json": "{}",
12781 }),
12782 )
12783 .await;
12784
12785 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12786
12787 // Cargo.lock should be read-only
12788 let cargo_lock = project
12789 .update(cx, |project, cx| {
12790 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12791 })
12792 .await
12793 .unwrap();
12794
12795 cargo_lock.read_with(cx, |buffer, _| {
12796 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12797 });
12798
12799 // Cargo.toml should be read-write
12800 let cargo_toml = project
12801 .update(cx, |project, cx| {
12802 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12803 })
12804 .await
12805 .unwrap();
12806
12807 cargo_toml.read_with(cx, |buffer, _| {
12808 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12809 });
12810
12811 // package-lock.json should be read-only
12812 let package_lock = project
12813 .update(cx, |project, cx| {
12814 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12815 })
12816 .await
12817 .unwrap();
12818
12819 package_lock.read_with(cx, |buffer, _| {
12820 assert!(buffer.read_only(), "package-lock.json should be read-only");
12821 });
12822
12823 // package.json should be read-write
12824 let package_json = project
12825 .update(cx, |project, cx| {
12826 project.open_local_buffer(path!("/root/package.json"), cx)
12827 })
12828 .await
12829 .unwrap();
12830
12831 package_json.read_with(cx, |buffer, _| {
12832 assert!(!buffer.read_only(), "package.json should not be read-only");
12833 });
12834}
12835
12836mod disable_ai_settings_tests {
12837 use gpui::TestAppContext;
12838 use project::*;
12839 use settings::{Settings, SettingsStore};
12840
12841 #[gpui::test]
12842 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12843 cx.update(|cx| {
12844 settings::init(cx);
12845
12846 // Test 1: Default is false (AI enabled)
12847 assert!(
12848 !DisableAiSettings::get_global(cx).disable_ai,
12849 "Default should allow AI"
12850 );
12851 });
12852
12853 let disable_true = serde_json::json!({
12854 "disable_ai": true
12855 })
12856 .to_string();
12857 let disable_false = serde_json::json!({
12858 "disable_ai": false
12859 })
12860 .to_string();
12861
12862 cx.update_global::<SettingsStore, _>(|store, cx| {
12863 store.set_user_settings(&disable_false, cx).unwrap();
12864 store.set_global_settings(&disable_true, cx).unwrap();
12865 });
12866 cx.update(|cx| {
12867 assert!(
12868 DisableAiSettings::get_global(cx).disable_ai,
12869 "Local false cannot override global true"
12870 );
12871 });
12872
12873 cx.update_global::<SettingsStore, _>(|store, cx| {
12874 store.set_global_settings(&disable_false, cx).unwrap();
12875 store.set_user_settings(&disable_true, cx).unwrap();
12876 });
12877
12878 cx.update(|cx| {
12879 assert!(
12880 DisableAiSettings::get_global(cx).disable_ai,
12881 "Local false cannot override global true"
12882 );
12883 });
12884 }
12885
12886 #[gpui::test]
12887 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12888 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12889 use worktree::WorktreeId;
12890
12891 cx.update(|cx| {
12892 settings::init(cx);
12893
12894 // Default should allow AI
12895 assert!(
12896 !DisableAiSettings::get_global(cx).disable_ai,
12897 "Default should allow AI"
12898 );
12899 });
12900
12901 let worktree_id = WorktreeId::from_usize(1);
12902 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12903 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12904 };
12905 let project_path = rel_path("project");
12906 let settings_location = SettingsLocation {
12907 worktree_id,
12908 path: project_path.as_ref(),
12909 };
12910
12911 // Test: Project-level disable_ai=true should disable AI for files in that project
12912 cx.update_global::<SettingsStore, _>(|store, cx| {
12913 store
12914 .set_local_settings(
12915 worktree_id,
12916 LocalSettingsPath::InWorktree(project_path.clone()),
12917 LocalSettingsKind::Settings,
12918 Some(r#"{ "disable_ai": true }"#),
12919 cx,
12920 )
12921 .unwrap();
12922 });
12923
12924 cx.update(|cx| {
12925 let settings = DisableAiSettings::get(Some(settings_location), cx);
12926 assert!(
12927 settings.disable_ai,
12928 "Project-level disable_ai=true should disable AI for files in that project"
12929 );
12930 // Global should now also be true since project-level disable_ai is merged into global
12931 assert!(
12932 DisableAiSettings::get_global(cx).disable_ai,
12933 "Global setting should be affected by project-level disable_ai=true"
12934 );
12935 });
12936
12937 // Test: Setting project-level to false should allow AI for that project
12938 cx.update_global::<SettingsStore, _>(|store, cx| {
12939 store
12940 .set_local_settings(
12941 worktree_id,
12942 LocalSettingsPath::InWorktree(project_path.clone()),
12943 LocalSettingsKind::Settings,
12944 Some(r#"{ "disable_ai": false }"#),
12945 cx,
12946 )
12947 .unwrap();
12948 });
12949
12950 cx.update(|cx| {
12951 let settings = DisableAiSettings::get(Some(settings_location), cx);
12952 assert!(
12953 !settings.disable_ai,
12954 "Project-level disable_ai=false should allow AI"
12955 );
12956 // Global should also be false now
12957 assert!(
12958 !DisableAiSettings::get_global(cx).disable_ai,
12959 "Global setting should be false when project-level is false"
12960 );
12961 });
12962
12963 // Test: User-level true + project-level false = AI disabled (saturation)
12964 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12965 cx.update_global::<SettingsStore, _>(|store, cx| {
12966 store.set_user_settings(&disable_true, cx).unwrap();
12967 store
12968 .set_local_settings(
12969 worktree_id,
12970 LocalSettingsPath::InWorktree(project_path.clone()),
12971 LocalSettingsKind::Settings,
12972 Some(r#"{ "disable_ai": false }"#),
12973 cx,
12974 )
12975 .unwrap();
12976 });
12977
12978 cx.update(|cx| {
12979 let settings = DisableAiSettings::get(Some(settings_location), cx);
12980 assert!(
12981 settings.disable_ai,
12982 "Project-level false cannot override user-level true (SaturatingBool)"
12983 );
12984 });
12985 }
12986}