1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
45 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
46 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
47 ToolchainMetadata,
48 language_settings::{LanguageSettings, LanguageSettingsContent},
49 markdown_lang, rust_lang, tree_sitter_typescript,
50};
51use lsp::{
52 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
53 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
54 Uri, WillRenameFiles, notification::DidRenameFiles,
55};
56use parking_lot::Mutex;
57use paths::{config_dir, global_gitignore_path, tasks_file};
58use postage::stream::Stream as _;
59use pretty_assertions::{assert_eq, assert_matches};
60use project::{
61 Event, TaskContexts,
62 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
63 search::{SearchQuery, SearchResult},
64 task_store::{TaskSettingsLocation, TaskStore},
65 *,
66};
67use rand::{Rng as _, rngs::StdRng};
68use serde_json::json;
69use settings::SettingsStore;
70#[cfg(not(windows))]
71use std::os;
72use std::{
73 cell::RefCell,
74 env, mem,
75 num::NonZeroU32,
76 ops::Range,
77 path::{Path, PathBuf},
78 rc::Rc,
79 str::FromStr,
80 sync::{Arc, OnceLock, atomic},
81 task::Poll,
82 time::Duration,
83};
84use sum_tree::SumTree;
85use task::{ResolvedTask, ShellKind, TaskContext};
86use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
87use unindent::Unindent as _;
88use util::{
89 TryFutureExt as _, assert_set_eq, maybe, path,
90 paths::{PathMatcher, PathStyle},
91 rel_path::{RelPath, rel_path},
92 test::{TempTree, marked_text_offsets},
93 uri,
94};
95use worktree::WorktreeModelHandle as _;
96
97#[gpui::test]
98async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
99 cx.executor().allow_parking();
100
101 let (tx, mut rx) = futures::channel::mpsc::unbounded();
102 let _thread = std::thread::spawn(move || {
103 #[cfg(not(target_os = "windows"))]
104 std::fs::metadata("/tmp").unwrap();
105 #[cfg(target_os = "windows")]
106 std::fs::metadata("C:/Windows").unwrap();
107 std::thread::sleep(Duration::from_millis(1000));
108 tx.unbounded_send(1).unwrap();
109 });
110 rx.next().await.unwrap();
111}
112
113#[gpui::test]
114async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
115 cx.executor().allow_parking();
116
117 let io_task = smol::unblock(move || {
118 println!("sleeping on thread {:?}", std::thread::current().id());
119 std::thread::sleep(Duration::from_millis(10));
120 1
121 });
122
123 let task = cx.foreground_executor().spawn(async move {
124 io_task.await;
125 });
126
127 task.await;
128}
129
130#[gpui::test]
131async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
132 cx: &mut gpui::TestAppContext,
133) {
134 init_test(cx);
135
136 let fs = FakeFs::new(cx.executor());
137 fs.insert_tree(
138 path!("/root"),
139 json!({
140 "dir-project": {
141 "src": {
142 "main.rs": "fn main() {}"
143 }
144 },
145 "single-file.rs": "fn helper() {}"
146 }),
147 )
148 .await;
149
150 let project = Project::test(
151 fs,
152 [
153 Path::new(path!("/root/single-file.rs")),
154 Path::new(path!("/root/dir-project")),
155 ],
156 cx,
157 )
158 .await;
159
160 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
161 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
162
163 assert_eq!(
164 ordered_paths,
165 vec![
166 PathBuf::from(path!("/root/dir-project")),
167 PathBuf::from(path!("/root")),
168 ]
169 );
170}
171
172#[gpui::test]
173async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
174 cx: &mut gpui::TestAppContext,
175) {
176 init_test(cx);
177
178 let fs = FakeFs::new(cx.executor());
179 let project = Project::test(fs, [], cx).await;
180
181 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
182 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
183
184 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
185}
186
187// NOTE:
188// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
189// we assume that they are not supported out of the box.
190#[cfg(not(windows))]
191#[gpui::test]
192async fn test_symlinks(cx: &mut gpui::TestAppContext) {
193 init_test(cx);
194 cx.executor().allow_parking();
195
196 let dir = TempTree::new(json!({
197 "root": {
198 "apple": "",
199 "banana": {
200 "carrot": {
201 "date": "",
202 "endive": "",
203 }
204 },
205 "fennel": {
206 "grape": "",
207 }
208 }
209 }));
210
211 let root_link_path = dir.path().join("root_link");
212 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
213 os::unix::fs::symlink(
214 dir.path().join("root/fennel"),
215 dir.path().join("root/finnochio"),
216 )
217 .unwrap();
218
219 let project = Project::test(
220 Arc::new(RealFs::new(None, cx.executor())),
221 [root_link_path.as_ref()],
222 cx,
223 )
224 .await;
225
226 project.update(cx, |project, cx| {
227 let tree = project.worktrees(cx).next().unwrap().read(cx);
228 assert_eq!(tree.file_count(), 5);
229 assert_eq!(
230 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
231 tree.entry_for_path(rel_path("finnochio/grape"))
232 .unwrap()
233 .inode
234 );
235 });
236}
237
238#[gpui::test]
239async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
240 init_test(cx);
241
242 let dir = TempTree::new(json!({
243 ".editorconfig": r#"
244 root = true
245 [*.rs]
246 indent_style = tab
247 indent_size = 3
248 end_of_line = lf
249 insert_final_newline = true
250 trim_trailing_whitespace = true
251 max_line_length = 120
252 [*.js]
253 tab_width = 10
254 max_line_length = off
255 "#,
256 ".zed": {
257 "settings.json": r#"{
258 "tab_size": 8,
259 "hard_tabs": false,
260 "ensure_final_newline_on_save": false,
261 "remove_trailing_whitespace_on_save": false,
262 "preferred_line_length": 64,
263 "soft_wrap": "editor_width",
264 }"#,
265 },
266 "a.rs": "fn a() {\n A\n}",
267 "b": {
268 ".editorconfig": r#"
269 [*.rs]
270 indent_size = 2
271 max_line_length = off,
272 "#,
273 "b.rs": "fn b() {\n B\n}",
274 },
275 "c.js": "def c\n C\nend",
276 "d": {
277 ".editorconfig": r#"
278 [*.rs]
279 indent_size = 1
280 "#,
281 "d.rs": "fn d() {\n D\n}",
282 },
283 "README.json": "tabs are better\n",
284 }));
285
286 let path = dir.path();
287 let fs = FakeFs::new(cx.executor());
288 fs.insert_tree_from_real_fs(path, path).await;
289 let project = Project::test(fs, [path], cx).await;
290
291 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
292 language_registry.add(js_lang());
293 language_registry.add(json_lang());
294 language_registry.add(rust_lang());
295
296 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
297
298 cx.executor().run_until_parked();
299
300 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
301 let buffer = project
302 .update(cx, |project, cx| {
303 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
304 })
305 .await
306 .unwrap();
307 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
308 };
309
310 let settings_a = settings_for("a.rs", cx).await;
311 let settings_b = settings_for("b/b.rs", cx).await;
312 let settings_c = settings_for("c.js", cx).await;
313 let settings_d = settings_for("d/d.rs", cx).await;
314 let settings_readme = settings_for("README.json", cx).await;
315 // .editorconfig overrides .zed/settings
316 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
317 assert_eq!(settings_a.hard_tabs, true);
318 assert_eq!(settings_a.ensure_final_newline_on_save, true);
319 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
320 assert_eq!(settings_a.preferred_line_length, 120);
321
322 // .editorconfig in b/ overrides .editorconfig in root
323 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
324
325 // .editorconfig in subdirectory overrides .editorconfig in root
326 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
327
328 // "indent_size" is not set, so "tab_width" is used
329 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
330
331 // When max_line_length is "off", default to .zed/settings.json
332 assert_eq!(settings_b.preferred_line_length, 64);
333 assert_eq!(settings_c.preferred_line_length, 64);
334
335 // README.md should not be affected by .editorconfig's globe "*.rs"
336 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
337}
338
339#[gpui::test]
340async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
341 init_test(cx);
342
343 let fs = FakeFs::new(cx.executor());
344 fs.insert_tree(
345 path!("/grandparent"),
346 json!({
347 ".editorconfig": "[*]\nindent_size = 4\n",
348 "parent": {
349 ".editorconfig": "[*.rs]\nindent_size = 2\n",
350 "worktree": {
351 ".editorconfig": "[*.md]\nindent_size = 3\n",
352 "main.rs": "fn main() {}",
353 "README.md": "# README",
354 "other.txt": "other content",
355 }
356 }
357 }),
358 )
359 .await;
360
361 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
362
363 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
364 language_registry.add(rust_lang());
365 language_registry.add(markdown_lang());
366
367 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
368
369 cx.executor().run_until_parked();
370 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
371 let buffer = project
372 .update(cx, |project, cx| {
373 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
374 })
375 .await
376 .unwrap();
377 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
378 };
379
380 let settings_rs = settings_for("main.rs", cx).await;
381 let settings_md = settings_for("README.md", cx).await;
382 let settings_txt = settings_for("other.txt", cx).await;
383
384 // main.rs gets indent_size = 2 from parent's external .editorconfig
385 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
386
387 // README.md gets indent_size = 3 from internal worktree .editorconfig
388 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
389
390 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
391 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
392}
393
394#[gpui::test]
395async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
396 init_test(cx);
397
398 let fs = FakeFs::new(cx.executor());
399 fs.insert_tree(
400 path!("/worktree"),
401 json!({
402 ".editorconfig": "[*]\nindent_size = 99\n",
403 "src": {
404 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
405 "file.rs": "fn main() {}",
406 }
407 }),
408 )
409 .await;
410
411 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
412
413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
414 language_registry.add(rust_lang());
415
416 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
417
418 cx.executor().run_until_parked();
419
420 let buffer = project
421 .update(cx, |project, cx| {
422 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
423 })
424 .await
425 .unwrap();
426 cx.update(|cx| {
427 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
428 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
429 });
430}
431
432#[gpui::test]
433async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
434 init_test(cx);
435
436 let fs = FakeFs::new(cx.executor());
437 fs.insert_tree(
438 path!("/parent"),
439 json!({
440 ".editorconfig": "[*]\nindent_size = 99\n",
441 "worktree": {
442 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
443 "file.rs": "fn main() {}",
444 }
445 }),
446 )
447 .await;
448
449 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
450
451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
452 language_registry.add(rust_lang());
453
454 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
455
456 cx.executor().run_until_parked();
457
458 let buffer = project
459 .update(cx, |project, cx| {
460 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
461 })
462 .await
463 .unwrap();
464
465 cx.update(|cx| {
466 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
467
468 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
469 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
470 });
471}
472
473#[gpui::test]
474async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
475 init_test(cx);
476
477 let fs = FakeFs::new(cx.executor());
478 fs.insert_tree(
479 path!("/grandparent"),
480 json!({
481 ".editorconfig": "[*]\nindent_size = 99\n",
482 "parent": {
483 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
484 "worktree": {
485 "file.rs": "fn main() {}",
486 }
487 }
488 }),
489 )
490 .await;
491
492 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
493
494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
495 language_registry.add(rust_lang());
496
497 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
498
499 cx.executor().run_until_parked();
500
501 let buffer = project
502 .update(cx, |project, cx| {
503 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
504 })
505 .await
506 .unwrap();
507
508 cx.update(|cx| {
509 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
510
511 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
512 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
513 });
514}
515
516#[gpui::test]
517async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/parent"),
523 json!({
524 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
525 "worktree_a": {
526 "file.rs": "fn a() {}",
527 ".editorconfig": "[*]\ninsert_final_newline = true\n",
528 },
529 "worktree_b": {
530 "file.rs": "fn b() {}",
531 ".editorconfig": "[*]\ninsert_final_newline = false\n",
532 }
533 }),
534 )
535 .await;
536
537 let project = Project::test(
538 fs,
539 [
540 path!("/parent/worktree_a").as_ref(),
541 path!("/parent/worktree_b").as_ref(),
542 ],
543 cx,
544 )
545 .await;
546
547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
548 language_registry.add(rust_lang());
549
550 cx.executor().run_until_parked();
551
552 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
553 assert_eq!(worktrees.len(), 2);
554
555 for worktree in worktrees {
556 let buffer = project
557 .update(cx, |project, cx| {
558 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
559 })
560 .await
561 .unwrap();
562
563 cx.update(|cx| {
564 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
565
566 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
567 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
568 });
569 }
570}
571
572#[gpui::test]
573async fn test_external_editorconfig_not_loaded_without_internal_config(
574 cx: &mut gpui::TestAppContext,
575) {
576 init_test(cx);
577
578 let fs = FakeFs::new(cx.executor());
579 fs.insert_tree(
580 path!("/parent"),
581 json!({
582 ".editorconfig": "[*]\nindent_size = 99\n",
583 "worktree": {
584 "file.rs": "fn main() {}",
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
591
592 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
593 language_registry.add(rust_lang());
594
595 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
596
597 cx.executor().run_until_parked();
598
599 let buffer = project
600 .update(cx, |project, cx| {
601 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
602 })
603 .await
604 .unwrap();
605
606 cx.update(|cx| {
607 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
608
609 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
610 // because without an internal .editorconfig, external configs are not loaded
611 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
612 });
613}
614
615#[gpui::test]
616async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 4\n",
624 "worktree": {
625 ".editorconfig": "[*]\n",
626 "file.rs": "fn main() {}",
627 }
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
633
634 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
635 language_registry.add(rust_lang());
636
637 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
638
639 cx.executor().run_until_parked();
640
641 let buffer = project
642 .update(cx, |project, cx| {
643 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
644 })
645 .await
646 .unwrap();
647
648 cx.update(|cx| {
649 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
650
651 // Test initial settings: tab_size = 4 from parent's external .editorconfig
652 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
653 });
654
655 fs.atomic_write(
656 PathBuf::from(path!("/parent/.editorconfig")),
657 "[*]\nindent_size = 8\n".to_owned(),
658 )
659 .await
660 .unwrap();
661
662 cx.executor().run_until_parked();
663
664 let buffer = project
665 .update(cx, |project, cx| {
666 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
667 })
668 .await
669 .unwrap();
670
671 cx.update(|cx| {
672 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
673
674 // Test settings updated: tab_size = 8
675 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
676 });
677}
678
679#[gpui::test]
680async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
681 init_test(cx);
682
683 let fs = FakeFs::new(cx.executor());
684 fs.insert_tree(
685 path!("/parent"),
686 json!({
687 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
688 "existing_worktree": {
689 ".editorconfig": "[*]\n",
690 "file.rs": "fn a() {}",
691 },
692 "new_worktree": {
693 ".editorconfig": "[*]\n",
694 "file.rs": "fn b() {}",
695 }
696 }),
697 )
698 .await;
699
700 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
701
702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
703 language_registry.add(rust_lang());
704
705 cx.executor().run_until_parked();
706
707 let buffer = project
708 .update(cx, |project, cx| {
709 let id = project.worktrees(cx).next().unwrap().read(cx).id();
710 project.open_buffer((id, rel_path("file.rs")), cx)
711 })
712 .await
713 .unwrap();
714
715 cx.update(|cx| {
716 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
717
718 // Test existing worktree has tab_size = 7
719 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
720 });
721
722 let (new_worktree, _) = project
723 .update(cx, |project, cx| {
724 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
725 })
726 .await
727 .unwrap();
728
729 cx.executor().run_until_parked();
730
731 let buffer = project
732 .update(cx, |project, cx| {
733 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
734 })
735 .await
736 .unwrap();
737
738 cx.update(|cx| {
739 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
740
741 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
742 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
743 });
744}
745
746#[gpui::test]
747async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
748 init_test(cx);
749
750 let fs = FakeFs::new(cx.executor());
751 fs.insert_tree(
752 path!("/parent"),
753 json!({
754 ".editorconfig": "[*]\nindent_size = 6\n",
755 "worktree": {
756 ".editorconfig": "[*]\n",
757 "file.rs": "fn main() {}",
758 }
759 }),
760 )
761 .await;
762
763 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
764
765 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
766 language_registry.add(rust_lang());
767
768 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
769 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
770
771 cx.executor().run_until_parked();
772
773 cx.update(|cx| {
774 let store = cx.global::<SettingsStore>();
775 let (worktree_ids, external_paths, watcher_paths) =
776 store.editorconfig_store.read(cx).test_state();
777
778 // Test external config is loaded
779 assert!(worktree_ids.contains(&worktree_id));
780 assert!(!external_paths.is_empty());
781 assert!(!watcher_paths.is_empty());
782 });
783
784 project.update(cx, |project, cx| {
785 project.remove_worktree(worktree_id, cx);
786 });
787
788 cx.executor().run_until_parked();
789
790 cx.update(|cx| {
791 let store = cx.global::<SettingsStore>();
792 let (worktree_ids, external_paths, watcher_paths) =
793 store.editorconfig_store.read(cx).test_state();
794
795 // Test worktree state, external configs, and watchers all removed
796 assert!(!worktree_ids.contains(&worktree_id));
797 assert!(external_paths.is_empty());
798 assert!(watcher_paths.is_empty());
799 });
800}
801
802#[gpui::test]
803async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
804 cx: &mut gpui::TestAppContext,
805) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor());
809 fs.insert_tree(
810 path!("/parent"),
811 json!({
812 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
813 "worktree_a": {
814 ".editorconfig": "[*]\n",
815 "file.rs": "fn a() {}",
816 },
817 "worktree_b": {
818 ".editorconfig": "[*]\n",
819 "file.rs": "fn b() {}",
820 }
821 }),
822 )
823 .await;
824
825 let project = Project::test(
826 fs,
827 [
828 path!("/parent/worktree_a").as_ref(),
829 path!("/parent/worktree_b").as_ref(),
830 ],
831 cx,
832 )
833 .await;
834
835 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
836 language_registry.add(rust_lang());
837
838 cx.executor().run_until_parked();
839
840 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
841 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
842 assert_eq!(worktrees.len(), 2);
843
844 let worktree_a = &worktrees[0];
845 let worktree_b = &worktrees[1];
846 let worktree_a_id = worktree_a.read(cx).id();
847 let worktree_b_id = worktree_b.read(cx).id();
848 (worktree_a_id, worktree_b.clone(), worktree_b_id)
849 });
850
851 cx.update(|cx| {
852 let store = cx.global::<SettingsStore>();
853 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
854
855 // Test both worktrees have settings and share external config
856 assert!(worktree_ids.contains(&worktree_a_id));
857 assert!(worktree_ids.contains(&worktree_b_id));
858 assert_eq!(external_paths.len(), 1); // single shared external config
859 });
860
861 project.update(cx, |project, cx| {
862 project.remove_worktree(worktree_a_id, cx);
863 });
864
865 cx.executor().run_until_parked();
866
867 cx.update(|cx| {
868 let store = cx.global::<SettingsStore>();
869 let (worktree_ids, external_paths, watcher_paths) =
870 store.editorconfig_store.read(cx).test_state();
871
872 // Test worktree_a is gone but external config remains for worktree_b
873 assert!(!worktree_ids.contains(&worktree_a_id));
874 assert!(worktree_ids.contains(&worktree_b_id));
875 // External config should still exist because worktree_b uses it
876 assert_eq!(external_paths.len(), 1);
877 assert_eq!(watcher_paths.len(), 1);
878 });
879
880 let buffer = project
881 .update(cx, |project, cx| {
882 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
883 })
884 .await
885 .unwrap();
886
887 cx.update(|cx| {
888 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
889
890 // Test worktree_b still has correct settings
891 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
892 });
893}
894
895#[gpui::test]
896async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
897 init_test(cx);
898 cx.update(|cx| {
899 GitHostingProviderRegistry::default_global(cx);
900 git_hosting_providers::init(cx);
901 });
902
903 let fs = FakeFs::new(cx.executor());
904 let str_path = path!("/dir");
905 let path = Path::new(str_path);
906
907 fs.insert_tree(
908 path!("/dir"),
909 json!({
910 ".zed": {
911 "settings.json": r#"{
912 "git_hosting_providers": [
913 {
914 "provider": "gitlab",
915 "base_url": "https://google.com",
916 "name": "foo"
917 }
918 ]
919 }"#
920 },
921 }),
922 )
923 .await;
924
925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
926 let (_worktree, _) =
927 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
928 cx.executor().run_until_parked();
929
930 cx.update(|cx| {
931 let provider = GitHostingProviderRegistry::global(cx);
932 assert!(
933 provider
934 .list_hosting_providers()
935 .into_iter()
936 .any(|provider| provider.name() == "foo")
937 );
938 });
939
940 fs.atomic_write(
941 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
942 "{}".into(),
943 )
944 .await
945 .unwrap();
946
947 cx.run_until_parked();
948
949 cx.update(|cx| {
950 let provider = GitHostingProviderRegistry::global(cx);
951 assert!(
952 !provider
953 .list_hosting_providers()
954 .into_iter()
955 .any(|provider| provider.name() == "foo")
956 );
957 });
958}
959
960#[gpui::test]
961async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
962 init_test(cx);
963 TaskStore::init(None);
964
965 let fs = FakeFs::new(cx.executor());
966 fs.insert_tree(
967 path!("/dir"),
968 json!({
969 ".zed": {
970 "settings.json": r#"{ "tab_size": 8 }"#,
971 "tasks.json": r#"[{
972 "label": "cargo check all",
973 "command": "cargo",
974 "args": ["check", "--all"]
975 },]"#,
976 },
977 "a": {
978 "a.rs": "fn a() {\n A\n}"
979 },
980 "b": {
981 ".zed": {
982 "settings.json": r#"{ "tab_size": 2 }"#,
983 "tasks.json": r#"[{
984 "label": "cargo check",
985 "command": "cargo",
986 "args": ["check"]
987 },]"#,
988 },
989 "b.rs": "fn b() {\n B\n}"
990 }
991 }),
992 )
993 .await;
994
995 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
996 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
997
998 cx.executor().run_until_parked();
999 let worktree_id = cx.update(|cx| {
1000 project.update(cx, |project, cx| {
1001 project.worktrees(cx).next().unwrap().read(cx).id()
1002 })
1003 });
1004
1005 let mut task_contexts = TaskContexts::default();
1006 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1007 let task_contexts = Arc::new(task_contexts);
1008
1009 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1010 id: worktree_id,
1011 directory_in_worktree: rel_path(".zed").into(),
1012 id_base: "local worktree tasks from directory \".zed\"".into(),
1013 };
1014
1015 let buffer_a = project
1016 .update(cx, |project, cx| {
1017 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1018 })
1019 .await
1020 .unwrap();
1021 let buffer_b = project
1022 .update(cx, |project, cx| {
1023 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1024 })
1025 .await
1026 .unwrap();
1027 cx.update(|cx| {
1028 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1029 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1030
1031 assert_eq!(settings_a.tab_size.get(), 8);
1032 assert_eq!(settings_b.tab_size.get(), 2);
1033 });
1034
1035 let all_tasks = cx
1036 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1037 .await
1038 .into_iter()
1039 .map(|(source_kind, task)| {
1040 let resolved = task.resolved;
1041 (
1042 source_kind,
1043 task.resolved_label,
1044 resolved.args,
1045 resolved.env,
1046 )
1047 })
1048 .collect::<Vec<_>>();
1049 assert_eq!(
1050 all_tasks,
1051 vec![
1052 (
1053 TaskSourceKind::Worktree {
1054 id: worktree_id,
1055 directory_in_worktree: rel_path("b/.zed").into(),
1056 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1057 },
1058 "cargo check".to_string(),
1059 vec!["check".to_string()],
1060 HashMap::default(),
1061 ),
1062 (
1063 topmost_local_task_source_kind.clone(),
1064 "cargo check all".to_string(),
1065 vec!["check".to_string(), "--all".to_string()],
1066 HashMap::default(),
1067 ),
1068 ]
1069 );
1070
1071 let (_, resolved_task) = cx
1072 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1073 .await
1074 .into_iter()
1075 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1076 .expect("should have one global task");
1077 project.update(cx, |project, cx| {
1078 let task_inventory = project
1079 .task_store()
1080 .read(cx)
1081 .task_inventory()
1082 .cloned()
1083 .unwrap();
1084 task_inventory.update(cx, |inventory, _| {
1085 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1086 inventory
1087 .update_file_based_tasks(
1088 TaskSettingsLocation::Global(tasks_file()),
1089 Some(
1090 &json!([{
1091 "label": "cargo check unstable",
1092 "command": "cargo",
1093 "args": [
1094 "check",
1095 "--all",
1096 "--all-targets"
1097 ],
1098 "env": {
1099 "RUSTFLAGS": "-Zunstable-options"
1100 }
1101 }])
1102 .to_string(),
1103 ),
1104 )
1105 .unwrap();
1106 });
1107 });
1108 cx.run_until_parked();
1109
1110 let all_tasks = cx
1111 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1112 .await
1113 .into_iter()
1114 .map(|(source_kind, task)| {
1115 let resolved = task.resolved;
1116 (
1117 source_kind,
1118 task.resolved_label,
1119 resolved.args,
1120 resolved.env,
1121 )
1122 })
1123 .collect::<Vec<_>>();
1124 assert_eq!(
1125 all_tasks,
1126 vec![
1127 (
1128 topmost_local_task_source_kind.clone(),
1129 "cargo check all".to_string(),
1130 vec!["check".to_string(), "--all".to_string()],
1131 HashMap::default(),
1132 ),
1133 (
1134 TaskSourceKind::Worktree {
1135 id: worktree_id,
1136 directory_in_worktree: rel_path("b/.zed").into(),
1137 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1138 },
1139 "cargo check".to_string(),
1140 vec!["check".to_string()],
1141 HashMap::default(),
1142 ),
1143 (
1144 TaskSourceKind::AbsPath {
1145 abs_path: paths::tasks_file().clone(),
1146 id_base: "global tasks.json".into(),
1147 },
1148 "cargo check unstable".to_string(),
1149 vec![
1150 "check".to_string(),
1151 "--all".to_string(),
1152 "--all-targets".to_string(),
1153 ],
1154 HashMap::from_iter(Some((
1155 "RUSTFLAGS".to_string(),
1156 "-Zunstable-options".to_string()
1157 ))),
1158 ),
1159 ]
1160 );
1161}
1162
1163#[gpui::test]
1164async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1165 init_test(cx);
1166 TaskStore::init(None);
1167
1168 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1169 // event is emitted before we havd a chance to setup the event subscription.
1170 let fs = FakeFs::new(cx.executor());
1171 fs.insert_tree(
1172 path!("/dir"),
1173 json!({
1174 ".zed": {
1175 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1176 },
1177 "file.rs": ""
1178 }),
1179 )
1180 .await;
1181
1182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1183 let saw_toast = Rc::new(RefCell::new(false));
1184
1185 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1186 // later assert that the `Event::Toast` even is emitted.
1187 fs.save(
1188 path!("/dir/.zed/tasks.json").as_ref(),
1189 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1190 Default::default(),
1191 )
1192 .await
1193 .unwrap();
1194
1195 project.update(cx, |_, cx| {
1196 let saw_toast = saw_toast.clone();
1197
1198 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1199 Event::Toast {
1200 notification_id,
1201 message,
1202 link: Some(ToastLink { url, .. }),
1203 } => {
1204 assert!(notification_id.starts_with("local-tasks-"));
1205 assert!(message.contains("ZED_FOO"));
1206 assert_eq!(*url, "https://zed.dev/docs/tasks");
1207 *saw_toast.borrow_mut() = true;
1208 }
1209 _ => {}
1210 })
1211 .detach();
1212 });
1213
1214 cx.run_until_parked();
1215 assert!(
1216 *saw_toast.borrow(),
1217 "Expected `Event::Toast` was never emitted"
1218 );
1219}
1220
1221#[gpui::test]
1222async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1223 init_test(cx);
1224 TaskStore::init(None);
1225
1226 let fs = FakeFs::new(cx.executor());
1227 fs.insert_tree(
1228 path!("/dir"),
1229 json!({
1230 ".zed": {
1231 "tasks.json": r#"[{
1232 "label": "test worktree root",
1233 "command": "echo $ZED_WORKTREE_ROOT"
1234 }]"#,
1235 },
1236 "a": {
1237 "a.rs": "fn a() {\n A\n}"
1238 },
1239 }),
1240 )
1241 .await;
1242
1243 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1244 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1245
1246 cx.executor().run_until_parked();
1247 let worktree_id = cx.update(|cx| {
1248 project.update(cx, |project, cx| {
1249 project.worktrees(cx).next().unwrap().read(cx).id()
1250 })
1251 });
1252
1253 let active_non_worktree_item_tasks = cx
1254 .update(|cx| {
1255 get_all_tasks(
1256 &project,
1257 Arc::new(TaskContexts {
1258 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1259 active_worktree_context: None,
1260 other_worktree_contexts: Vec::new(),
1261 lsp_task_sources: HashMap::default(),
1262 latest_selection: None,
1263 }),
1264 cx,
1265 )
1266 })
1267 .await;
1268 assert!(
1269 active_non_worktree_item_tasks.is_empty(),
1270 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1271 );
1272
1273 let active_worktree_tasks = cx
1274 .update(|cx| {
1275 get_all_tasks(
1276 &project,
1277 Arc::new(TaskContexts {
1278 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1279 active_worktree_context: Some((worktree_id, {
1280 let mut worktree_context = TaskContext::default();
1281 worktree_context
1282 .task_variables
1283 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1284 worktree_context
1285 })),
1286 other_worktree_contexts: Vec::new(),
1287 lsp_task_sources: HashMap::default(),
1288 latest_selection: None,
1289 }),
1290 cx,
1291 )
1292 })
1293 .await;
1294 assert_eq!(
1295 active_worktree_tasks
1296 .into_iter()
1297 .map(|(source_kind, task)| {
1298 let resolved = task.resolved;
1299 (source_kind, resolved.command.unwrap())
1300 })
1301 .collect::<Vec<_>>(),
1302 vec![(
1303 TaskSourceKind::Worktree {
1304 id: worktree_id,
1305 directory_in_worktree: rel_path(".zed").into(),
1306 id_base: "local worktree tasks from directory \".zed\"".into(),
1307 },
1308 "echo /dir".to_string(),
1309 )]
1310 );
1311}
1312
1313#[gpui::test]
1314async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1315 cx: &mut gpui::TestAppContext,
1316) {
1317 pub(crate) struct PyprojectTomlManifestProvider;
1318
1319 impl ManifestProvider for PyprojectTomlManifestProvider {
1320 fn name(&self) -> ManifestName {
1321 SharedString::new_static("pyproject.toml").into()
1322 }
1323
1324 fn search(
1325 &self,
1326 ManifestQuery {
1327 path,
1328 depth,
1329 delegate,
1330 }: ManifestQuery,
1331 ) -> Option<Arc<RelPath>> {
1332 for path in path.ancestors().take(depth) {
1333 let p = path.join(rel_path("pyproject.toml"));
1334 if delegate.exists(&p, Some(false)) {
1335 return Some(path.into());
1336 }
1337 }
1338
1339 None
1340 }
1341 }
1342
1343 init_test(cx);
1344 let fs = FakeFs::new(cx.executor());
1345
1346 fs.insert_tree(
1347 path!("/the-root"),
1348 json!({
1349 ".zed": {
1350 "settings.json": r#"
1351 {
1352 "languages": {
1353 "Python": {
1354 "language_servers": ["ty"]
1355 }
1356 }
1357 }"#
1358 },
1359 "project-a": {
1360 ".venv": {},
1361 "file.py": "",
1362 "pyproject.toml": ""
1363 },
1364 "project-b": {
1365 ".venv": {},
1366 "source_file.py":"",
1367 "another_file.py": "",
1368 "pyproject.toml": ""
1369 }
1370 }),
1371 )
1372 .await;
1373 cx.update(|cx| {
1374 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1375 });
1376
1377 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1379 let _fake_python_server = language_registry.register_fake_lsp(
1380 "Python",
1381 FakeLspAdapter {
1382 name: "ty",
1383 capabilities: lsp::ServerCapabilities {
1384 ..Default::default()
1385 },
1386 ..Default::default()
1387 },
1388 );
1389
1390 language_registry.add(python_lang(fs.clone()));
1391 let (first_buffer, _handle) = project
1392 .update(cx, |project, cx| {
1393 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1394 })
1395 .await
1396 .unwrap();
1397 cx.executor().run_until_parked();
1398 let servers = project.update(cx, |project, cx| {
1399 project.lsp_store().update(cx, |this, cx| {
1400 first_buffer.update(cx, |buffer, cx| {
1401 this.running_language_servers_for_local_buffer(buffer, cx)
1402 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1403 .collect::<Vec<_>>()
1404 })
1405 })
1406 });
1407 cx.executor().run_until_parked();
1408 assert_eq!(servers.len(), 1);
1409 let (adapter, server) = servers.into_iter().next().unwrap();
1410 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1411 assert_eq!(server.server_id(), LanguageServerId(0));
1412 // `workspace_folders` are set to the rooting point.
1413 assert_eq!(
1414 server.workspace_folders(),
1415 BTreeSet::from_iter(
1416 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1417 )
1418 );
1419
1420 let (second_project_buffer, _other_handle) = project
1421 .update(cx, |project, cx| {
1422 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1423 })
1424 .await
1425 .unwrap();
1426 cx.executor().run_until_parked();
1427 let servers = project.update(cx, |project, cx| {
1428 project.lsp_store().update(cx, |this, cx| {
1429 second_project_buffer.update(cx, |buffer, cx| {
1430 this.running_language_servers_for_local_buffer(buffer, cx)
1431 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1432 .collect::<Vec<_>>()
1433 })
1434 })
1435 });
1436 cx.executor().run_until_parked();
1437 assert_eq!(servers.len(), 1);
1438 let (adapter, server) = servers.into_iter().next().unwrap();
1439 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1440 // We're not using venvs at all here, so both folders should fall under the same root.
1441 assert_eq!(server.server_id(), LanguageServerId(0));
1442 // Now, let's select a different toolchain for one of subprojects.
1443
1444 let Toolchains {
1445 toolchains: available_toolchains_for_b,
1446 root_path,
1447 ..
1448 } = project
1449 .update(cx, |this, cx| {
1450 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1451 this.available_toolchains(
1452 ProjectPath {
1453 worktree_id,
1454 path: rel_path("project-b/source_file.py").into(),
1455 },
1456 LanguageName::new_static("Python"),
1457 cx,
1458 )
1459 })
1460 .await
1461 .expect("A toolchain to be discovered");
1462 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1463 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1464 let currently_active_toolchain = project
1465 .update(cx, |this, cx| {
1466 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1467 this.active_toolchain(
1468 ProjectPath {
1469 worktree_id,
1470 path: rel_path("project-b/source_file.py").into(),
1471 },
1472 LanguageName::new_static("Python"),
1473 cx,
1474 )
1475 })
1476 .await;
1477
1478 assert!(currently_active_toolchain.is_none());
1479 let _ = project
1480 .update(cx, |this, cx| {
1481 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1482 this.activate_toolchain(
1483 ProjectPath {
1484 worktree_id,
1485 path: root_path,
1486 },
1487 available_toolchains_for_b
1488 .toolchains
1489 .into_iter()
1490 .next()
1491 .unwrap(),
1492 cx,
1493 )
1494 })
1495 .await
1496 .unwrap();
1497 cx.run_until_parked();
1498 let servers = project.update(cx, |project, cx| {
1499 project.lsp_store().update(cx, |this, cx| {
1500 second_project_buffer.update(cx, |buffer, cx| {
1501 this.running_language_servers_for_local_buffer(buffer, cx)
1502 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1503 .collect::<Vec<_>>()
1504 })
1505 })
1506 });
1507 cx.executor().run_until_parked();
1508 assert_eq!(servers.len(), 1);
1509 let (adapter, server) = servers.into_iter().next().unwrap();
1510 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1511 // There's a new language server in town.
1512 assert_eq!(server.server_id(), LanguageServerId(1));
1513}
1514
1515#[gpui::test]
1516async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree(
1521 path!("/dir"),
1522 json!({
1523 "test.rs": "const A: i32 = 1;",
1524 "test2.rs": "",
1525 "Cargo.toml": "a = 1",
1526 "package.json": "{\"a\": 1}",
1527 }),
1528 )
1529 .await;
1530
1531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1533
1534 let mut fake_rust_servers = language_registry.register_fake_lsp(
1535 "Rust",
1536 FakeLspAdapter {
1537 name: "the-rust-language-server",
1538 capabilities: lsp::ServerCapabilities {
1539 completion_provider: Some(lsp::CompletionOptions {
1540 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1541 ..Default::default()
1542 }),
1543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1544 lsp::TextDocumentSyncOptions {
1545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1546 ..Default::default()
1547 },
1548 )),
1549 ..Default::default()
1550 },
1551 ..Default::default()
1552 },
1553 );
1554 let mut fake_json_servers = language_registry.register_fake_lsp(
1555 "JSON",
1556 FakeLspAdapter {
1557 name: "the-json-language-server",
1558 capabilities: lsp::ServerCapabilities {
1559 completion_provider: Some(lsp::CompletionOptions {
1560 trigger_characters: Some(vec![":".to_string()]),
1561 ..Default::default()
1562 }),
1563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1564 lsp::TextDocumentSyncOptions {
1565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1566 ..Default::default()
1567 },
1568 )),
1569 ..Default::default()
1570 },
1571 ..Default::default()
1572 },
1573 );
1574
1575 // Open a buffer without an associated language server.
1576 let (toml_buffer, _handle) = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 // Open a buffer with an associated language server before the language for it has been loaded.
1584 let (rust_buffer, _handle2) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590 rust_buffer.update(cx, |buffer, _| {
1591 assert_eq!(buffer.language().map(|l| l.name()), None);
1592 });
1593
1594 // Now we add the languages to the project, and ensure they get assigned to all
1595 // the relevant open buffers.
1596 language_registry.add(json_lang());
1597 language_registry.add(rust_lang());
1598 cx.executor().run_until_parked();
1599 rust_buffer.update(cx, |buffer, _| {
1600 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1601 });
1602
1603 // A server is started up, and it is notified about Rust files.
1604 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1605 assert_eq!(
1606 fake_rust_server
1607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1608 .await
1609 .text_document,
1610 lsp::TextDocumentItem {
1611 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1612 version: 0,
1613 text: "const A: i32 = 1;".to_string(),
1614 language_id: "rust".to_string(),
1615 }
1616 );
1617
1618 // The buffer is configured based on the language server's capabilities.
1619 rust_buffer.update(cx, |buffer, _| {
1620 assert_eq!(
1621 buffer
1622 .completion_triggers()
1623 .iter()
1624 .cloned()
1625 .collect::<Vec<_>>(),
1626 &[".".to_string(), "::".to_string()]
1627 );
1628 });
1629 toml_buffer.update(cx, |buffer, _| {
1630 assert!(buffer.completion_triggers().is_empty());
1631 });
1632
1633 // Edit a buffer. The changes are reported to the language server.
1634 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1635 assert_eq!(
1636 fake_rust_server
1637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1638 .await
1639 .text_document,
1640 lsp::VersionedTextDocumentIdentifier::new(
1641 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1642 1
1643 )
1644 );
1645
1646 // Open a third buffer with a different associated language server.
1647 let (json_buffer, _json_handle) = project
1648 .update(cx, |project, cx| {
1649 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1650 })
1651 .await
1652 .unwrap();
1653
1654 // A json language server is started up and is only notified about the json buffer.
1655 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1656 assert_eq!(
1657 fake_json_server
1658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1659 .await
1660 .text_document,
1661 lsp::TextDocumentItem {
1662 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1663 version: 0,
1664 text: "{\"a\": 1}".to_string(),
1665 language_id: "json".to_string(),
1666 }
1667 );
1668
1669 // This buffer is configured based on the second language server's
1670 // capabilities.
1671 json_buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .completion_triggers()
1675 .iter()
1676 .cloned()
1677 .collect::<Vec<_>>(),
1678 &[":".to_string()]
1679 );
1680 });
1681
1682 // When opening another buffer whose language server is already running,
1683 // it is also configured based on the existing language server's capabilities.
1684 let (rust_buffer2, _handle4) = project
1685 .update(cx, |project, cx| {
1686 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1687 })
1688 .await
1689 .unwrap();
1690 rust_buffer2.update(cx, |buffer, _| {
1691 assert_eq!(
1692 buffer
1693 .completion_triggers()
1694 .iter()
1695 .cloned()
1696 .collect::<Vec<_>>(),
1697 &[".".to_string(), "::".to_string()]
1698 );
1699 });
1700
1701 // Changes are reported only to servers matching the buffer's language.
1702 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1703 rust_buffer2.update(cx, |buffer, cx| {
1704 buffer.edit([(0..0, "let x = 1;")], None, cx)
1705 });
1706 assert_eq!(
1707 fake_rust_server
1708 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1709 .await
1710 .text_document,
1711 lsp::VersionedTextDocumentIdentifier::new(
1712 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1713 1
1714 )
1715 );
1716
1717 // Save notifications are reported to all servers.
1718 project
1719 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1720 .await
1721 .unwrap();
1722 assert_eq!(
1723 fake_rust_server
1724 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1725 .await
1726 .text_document,
1727 lsp::TextDocumentIdentifier::new(
1728 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1729 )
1730 );
1731 assert_eq!(
1732 fake_json_server
1733 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1734 .await
1735 .text_document,
1736 lsp::TextDocumentIdentifier::new(
1737 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1738 )
1739 );
1740
1741 // Renames are reported only to servers matching the buffer's language.
1742 fs.rename(
1743 Path::new(path!("/dir/test2.rs")),
1744 Path::new(path!("/dir/test3.rs")),
1745 Default::default(),
1746 )
1747 .await
1748 .unwrap();
1749 assert_eq!(
1750 fake_rust_server
1751 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1752 .await
1753 .text_document,
1754 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1755 );
1756 assert_eq!(
1757 fake_rust_server
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document,
1761 lsp::TextDocumentItem {
1762 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1763 version: 0,
1764 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1765 language_id: "rust".to_string(),
1766 },
1767 );
1768
1769 rust_buffer2.update(cx, |buffer, cx| {
1770 buffer.update_diagnostics(
1771 LanguageServerId(0),
1772 DiagnosticSet::from_sorted_entries(
1773 vec![DiagnosticEntry {
1774 diagnostic: Default::default(),
1775 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1776 }],
1777 &buffer.snapshot(),
1778 ),
1779 cx,
1780 );
1781 assert_eq!(
1782 buffer
1783 .snapshot()
1784 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1785 .count(),
1786 1
1787 );
1788 });
1789
1790 // When the rename changes the extension of the file, the buffer gets closed on the old
1791 // language server and gets opened on the new one.
1792 fs.rename(
1793 Path::new(path!("/dir/test3.rs")),
1794 Path::new(path!("/dir/test3.json")),
1795 Default::default(),
1796 )
1797 .await
1798 .unwrap();
1799 assert_eq!(
1800 fake_rust_server
1801 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1802 .await
1803 .text_document,
1804 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1805 );
1806 assert_eq!(
1807 fake_json_server
1808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1809 .await
1810 .text_document,
1811 lsp::TextDocumentItem {
1812 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1813 version: 0,
1814 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1815 language_id: "json".to_string(),
1816 },
1817 );
1818
1819 // We clear the diagnostics, since the language has changed.
1820 rust_buffer2.update(cx, |buffer, _| {
1821 assert_eq!(
1822 buffer
1823 .snapshot()
1824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1825 .count(),
1826 0
1827 );
1828 });
1829
1830 // The renamed file's version resets after changing language server.
1831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1832 assert_eq!(
1833 fake_json_server
1834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1835 .await
1836 .text_document,
1837 lsp::VersionedTextDocumentIdentifier::new(
1838 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1839 1
1840 )
1841 );
1842
1843 // Restart language servers
1844 project.update(cx, |project, cx| {
1845 project.restart_language_servers_for_buffers(
1846 vec![rust_buffer.clone(), json_buffer.clone()],
1847 HashSet::default(),
1848 true,
1849 cx,
1850 );
1851 });
1852
1853 let mut rust_shutdown_requests = fake_rust_server
1854 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1855 let mut json_shutdown_requests = fake_json_server
1856 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1857 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1858
1859 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1860 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1861
1862 // Ensure rust document is reopened in new rust language server
1863 assert_eq!(
1864 fake_rust_server
1865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1866 .await
1867 .text_document,
1868 lsp::TextDocumentItem {
1869 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1870 version: 0,
1871 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1872 language_id: "rust".to_string(),
1873 }
1874 );
1875
1876 // Ensure json documents are reopened in new json language server
1877 assert_set_eq!(
1878 [
1879 fake_json_server
1880 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1881 .await
1882 .text_document,
1883 fake_json_server
1884 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1885 .await
1886 .text_document,
1887 ],
1888 [
1889 lsp::TextDocumentItem {
1890 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1891 version: 0,
1892 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1893 language_id: "json".to_string(),
1894 },
1895 lsp::TextDocumentItem {
1896 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1897 version: 0,
1898 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1899 language_id: "json".to_string(),
1900 }
1901 ]
1902 );
1903
1904 // Close notifications are reported only to servers matching the buffer's language.
1905 cx.update(|_| drop(_json_handle));
1906 let close_message = lsp::DidCloseTextDocumentParams {
1907 text_document: lsp::TextDocumentIdentifier::new(
1908 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1909 ),
1910 };
1911 assert_eq!(
1912 fake_json_server
1913 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1914 .await,
1915 close_message,
1916 );
1917}
1918
1919#[gpui::test]
1920async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1921 init_test(cx);
1922
1923 let settings_json_contents = json!({
1924 "languages": {
1925 "Rust": {
1926 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1927 }
1928 },
1929 "lsp": {
1930 "my_fake_lsp": {
1931 "binary": {
1932 // file exists, so this is treated as a relative path
1933 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1934 }
1935 },
1936 "lsp_on_path": {
1937 "binary": {
1938 // file doesn't exist, so it will fall back on PATH env var
1939 "path": path!("lsp_on_path.exe").to_string(),
1940 }
1941 }
1942 },
1943 });
1944
1945 let fs = FakeFs::new(cx.executor());
1946 fs.insert_tree(
1947 path!("/the-root"),
1948 json!({
1949 ".zed": {
1950 "settings.json": settings_json_contents.to_string(),
1951 },
1952 ".relative_path": {
1953 "to": {
1954 "my_fake_lsp.exe": "",
1955 },
1956 },
1957 "src": {
1958 "main.rs": "",
1959 }
1960 }),
1961 )
1962 .await;
1963
1964 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1965 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1966 language_registry.add(rust_lang());
1967
1968 let mut my_fake_lsp = language_registry.register_fake_lsp(
1969 "Rust",
1970 FakeLspAdapter {
1971 name: "my_fake_lsp",
1972 ..Default::default()
1973 },
1974 );
1975 let mut lsp_on_path = language_registry.register_fake_lsp(
1976 "Rust",
1977 FakeLspAdapter {
1978 name: "lsp_on_path",
1979 ..Default::default()
1980 },
1981 );
1982
1983 cx.run_until_parked();
1984
1985 // Start the language server by opening a buffer with a compatible file extension.
1986 project
1987 .update(cx, |project, cx| {
1988 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1989 })
1990 .await
1991 .unwrap();
1992
1993 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1994 assert_eq!(
1995 lsp_path.to_string_lossy(),
1996 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1997 );
1998
1999 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2000 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2001}
2002
2003#[gpui::test]
2004async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2005 init_test(cx);
2006
2007 let settings_json_contents = json!({
2008 "languages": {
2009 "Rust": {
2010 "language_servers": ["tilde_lsp"]
2011 }
2012 },
2013 "lsp": {
2014 "tilde_lsp": {
2015 "binary": {
2016 "path": "~/.local/bin/rust-analyzer",
2017 }
2018 }
2019 },
2020 });
2021
2022 let fs = FakeFs::new(cx.executor());
2023 fs.insert_tree(
2024 path!("/root"),
2025 json!({
2026 ".zed": {
2027 "settings.json": settings_json_contents.to_string(),
2028 },
2029 "src": {
2030 "main.rs": "fn main() {}",
2031 }
2032 }),
2033 )
2034 .await;
2035
2036 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2037 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2038 language_registry.add(rust_lang());
2039
2040 let mut tilde_lsp = language_registry.register_fake_lsp(
2041 "Rust",
2042 FakeLspAdapter {
2043 name: "tilde_lsp",
2044 ..Default::default()
2045 },
2046 );
2047 cx.run_until_parked();
2048
2049 project
2050 .update(cx, |project, cx| {
2051 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2052 })
2053 .await
2054 .unwrap();
2055
2056 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2057 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2058 assert_eq!(
2059 lsp_path, expected_path,
2060 "Tilde path should expand to home directory"
2061 );
2062}
2063
2064#[gpui::test]
2065async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2066 cx: &mut gpui::TestAppContext,
2067) {
2068 init_test(cx);
2069
2070 let fs = FakeFs::new(cx.executor());
2071 fs.insert_tree(
2072 path!("/the-root"),
2073 json!({
2074 "Cargo.lock": "",
2075 "src": {
2076 "a.rs": "",
2077 }
2078 }),
2079 )
2080 .await;
2081
2082 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2083 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2084 (project.languages().clone(), project.lsp_store())
2085 });
2086 language_registry.add(rust_lang());
2087 let mut fake_servers = language_registry.register_fake_lsp(
2088 "Rust",
2089 FakeLspAdapter {
2090 name: "the-language-server",
2091 ..Default::default()
2092 },
2093 );
2094
2095 cx.executor().run_until_parked();
2096
2097 project
2098 .update(cx, |project, cx| {
2099 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2100 })
2101 .await
2102 .unwrap();
2103
2104 let fake_server = fake_servers.next().await.unwrap();
2105 cx.executor().run_until_parked();
2106
2107 let file_changes = Arc::new(Mutex::new(Vec::new()));
2108 fake_server
2109 .request::<lsp::request::RegisterCapability>(
2110 lsp::RegistrationParams {
2111 registrations: vec![lsp::Registration {
2112 id: Default::default(),
2113 method: "workspace/didChangeWatchedFiles".to_string(),
2114 register_options: serde_json::to_value(
2115 lsp::DidChangeWatchedFilesRegistrationOptions {
2116 watchers: vec![lsp::FileSystemWatcher {
2117 glob_pattern: lsp::GlobPattern::String(
2118 path!("/the-root/Cargo.lock").to_string(),
2119 ),
2120 kind: None,
2121 }],
2122 },
2123 )
2124 .ok(),
2125 }],
2126 },
2127 DEFAULT_LSP_REQUEST_TIMEOUT,
2128 )
2129 .await
2130 .into_response()
2131 .unwrap();
2132 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2133 let file_changes = file_changes.clone();
2134 move |params, _| {
2135 let mut file_changes = file_changes.lock();
2136 file_changes.extend(params.changes);
2137 }
2138 });
2139
2140 cx.executor().run_until_parked();
2141 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2142
2143 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2144 cx.executor().run_until_parked();
2145
2146 assert_eq!(
2147 &*file_changes.lock(),
2148 &[lsp::FileEvent {
2149 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2150 typ: lsp::FileChangeType::CHANGED,
2151 }]
2152 );
2153}
2154
2155#[gpui::test]
2156async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2157 init_test(cx);
2158
2159 let fs = FakeFs::new(cx.executor());
2160 fs.insert_tree(
2161 path!("/the-root"),
2162 json!({
2163 ".gitignore": "target\n",
2164 "Cargo.lock": "",
2165 "src": {
2166 "a.rs": "",
2167 "b.rs": "",
2168 },
2169 "target": {
2170 "x": {
2171 "out": {
2172 "x.rs": ""
2173 }
2174 },
2175 "y": {
2176 "out": {
2177 "y.rs": "",
2178 }
2179 },
2180 "z": {
2181 "out": {
2182 "z.rs": ""
2183 }
2184 }
2185 }
2186 }),
2187 )
2188 .await;
2189 fs.insert_tree(
2190 path!("/the-registry"),
2191 json!({
2192 "dep1": {
2193 "src": {
2194 "dep1.rs": "",
2195 }
2196 },
2197 "dep2": {
2198 "src": {
2199 "dep2.rs": "",
2200 }
2201 },
2202 }),
2203 )
2204 .await;
2205 fs.insert_tree(
2206 path!("/the/stdlib"),
2207 json!({
2208 "LICENSE": "",
2209 "src": {
2210 "string.rs": "",
2211 }
2212 }),
2213 )
2214 .await;
2215
2216 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2217 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2218 (project.languages().clone(), project.lsp_store())
2219 });
2220 language_registry.add(rust_lang());
2221 let mut fake_servers = language_registry.register_fake_lsp(
2222 "Rust",
2223 FakeLspAdapter {
2224 name: "the-language-server",
2225 ..Default::default()
2226 },
2227 );
2228
2229 cx.executor().run_until_parked();
2230
2231 // Start the language server by opening a buffer with a compatible file extension.
2232 project
2233 .update(cx, |project, cx| {
2234 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2235 })
2236 .await
2237 .unwrap();
2238
2239 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2240 project.update(cx, |project, cx| {
2241 let worktree = project.worktrees(cx).next().unwrap();
2242 assert_eq!(
2243 worktree
2244 .read(cx)
2245 .snapshot()
2246 .entries(true, 0)
2247 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2248 .collect::<Vec<_>>(),
2249 &[
2250 ("", false),
2251 (".gitignore", false),
2252 ("Cargo.lock", false),
2253 ("src", false),
2254 ("src/a.rs", false),
2255 ("src/b.rs", false),
2256 ("target", true),
2257 ]
2258 );
2259 });
2260
2261 let prev_read_dir_count = fs.read_dir_call_count();
2262
2263 let fake_server = fake_servers.next().await.unwrap();
2264 cx.executor().run_until_parked();
2265 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2266 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2267 id
2268 });
2269
2270 // Simulate jumping to a definition in a dependency outside of the worktree.
2271 let _out_of_worktree_buffer = project
2272 .update(cx, |project, cx| {
2273 project.open_local_buffer_via_lsp(
2274 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2275 server_id,
2276 cx,
2277 )
2278 })
2279 .await
2280 .unwrap();
2281
2282 // Keep track of the FS events reported to the language server.
2283 let file_changes = Arc::new(Mutex::new(Vec::new()));
2284 fake_server
2285 .request::<lsp::request::RegisterCapability>(
2286 lsp::RegistrationParams {
2287 registrations: vec![lsp::Registration {
2288 id: Default::default(),
2289 method: "workspace/didChangeWatchedFiles".to_string(),
2290 register_options: serde_json::to_value(
2291 lsp::DidChangeWatchedFilesRegistrationOptions {
2292 watchers: vec![
2293 lsp::FileSystemWatcher {
2294 glob_pattern: lsp::GlobPattern::String(
2295 path!("/the-root/Cargo.toml").to_string(),
2296 ),
2297 kind: None,
2298 },
2299 lsp::FileSystemWatcher {
2300 glob_pattern: lsp::GlobPattern::String(
2301 path!("/the-root/src/*.{rs,c}").to_string(),
2302 ),
2303 kind: None,
2304 },
2305 lsp::FileSystemWatcher {
2306 glob_pattern: lsp::GlobPattern::String(
2307 path!("/the-root/target/y/**/*.rs").to_string(),
2308 ),
2309 kind: None,
2310 },
2311 lsp::FileSystemWatcher {
2312 glob_pattern: lsp::GlobPattern::String(
2313 path!("/the/stdlib/src/**/*.rs").to_string(),
2314 ),
2315 kind: None,
2316 },
2317 lsp::FileSystemWatcher {
2318 glob_pattern: lsp::GlobPattern::String(
2319 path!("**/Cargo.lock").to_string(),
2320 ),
2321 kind: None,
2322 },
2323 ],
2324 },
2325 )
2326 .ok(),
2327 }],
2328 },
2329 DEFAULT_LSP_REQUEST_TIMEOUT,
2330 )
2331 .await
2332 .into_response()
2333 .unwrap();
2334 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2335 let file_changes = file_changes.clone();
2336 move |params, _| {
2337 let mut file_changes = file_changes.lock();
2338 file_changes.extend(params.changes);
2339 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2340 }
2341 });
2342
2343 cx.executor().run_until_parked();
2344 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2345 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2346
2347 let mut new_watched_paths = fs.watched_paths();
2348 new_watched_paths.retain(|path| {
2349 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2350 });
2351 assert_eq!(
2352 &new_watched_paths,
2353 &[
2354 Path::new(path!("/the-root")),
2355 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2356 Path::new(path!("/the/stdlib/src"))
2357 ]
2358 );
2359
2360 // Now the language server has asked us to watch an ignored directory path,
2361 // so we recursively load it.
2362 project.update(cx, |project, cx| {
2363 let worktree = project.visible_worktrees(cx).next().unwrap();
2364 assert_eq!(
2365 worktree
2366 .read(cx)
2367 .snapshot()
2368 .entries(true, 0)
2369 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2370 .collect::<Vec<_>>(),
2371 &[
2372 ("", false),
2373 (".gitignore", false),
2374 ("Cargo.lock", false),
2375 ("src", false),
2376 ("src/a.rs", false),
2377 ("src/b.rs", false),
2378 ("target", true),
2379 ("target/x", true),
2380 ("target/y", true),
2381 ("target/y/out", true),
2382 ("target/y/out/y.rs", true),
2383 ("target/z", true),
2384 ]
2385 );
2386 });
2387
2388 // Perform some file system mutations, two of which match the watched patterns,
2389 // and one of which does not.
2390 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2391 .await
2392 .unwrap();
2393 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2394 .await
2395 .unwrap();
2396 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2397 .await
2398 .unwrap();
2399 fs.create_file(
2400 path!("/the-root/target/x/out/x2.rs").as_ref(),
2401 Default::default(),
2402 )
2403 .await
2404 .unwrap();
2405 fs.create_file(
2406 path!("/the-root/target/y/out/y2.rs").as_ref(),
2407 Default::default(),
2408 )
2409 .await
2410 .unwrap();
2411 fs.save(
2412 path!("/the-root/Cargo.lock").as_ref(),
2413 &"".into(),
2414 Default::default(),
2415 )
2416 .await
2417 .unwrap();
2418 fs.save(
2419 path!("/the-stdlib/LICENSE").as_ref(),
2420 &"".into(),
2421 Default::default(),
2422 )
2423 .await
2424 .unwrap();
2425 fs.save(
2426 path!("/the/stdlib/src/string.rs").as_ref(),
2427 &"".into(),
2428 Default::default(),
2429 )
2430 .await
2431 .unwrap();
2432
2433 // The language server receives events for the FS mutations that match its watch patterns.
2434 cx.executor().run_until_parked();
2435 assert_eq!(
2436 &*file_changes.lock(),
2437 &[
2438 lsp::FileEvent {
2439 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2440 typ: lsp::FileChangeType::CHANGED,
2441 },
2442 lsp::FileEvent {
2443 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2444 typ: lsp::FileChangeType::DELETED,
2445 },
2446 lsp::FileEvent {
2447 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2448 typ: lsp::FileChangeType::CREATED,
2449 },
2450 lsp::FileEvent {
2451 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2452 typ: lsp::FileChangeType::CREATED,
2453 },
2454 lsp::FileEvent {
2455 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2456 typ: lsp::FileChangeType::CHANGED,
2457 },
2458 ]
2459 );
2460}
2461
2462#[gpui::test]
2463async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2464 init_test(cx);
2465
2466 let fs = FakeFs::new(cx.executor());
2467 fs.insert_tree(
2468 path!("/dir"),
2469 json!({
2470 "a.rs": "let a = 1;",
2471 "b.rs": "let b = 2;"
2472 }),
2473 )
2474 .await;
2475
2476 let project = Project::test(
2477 fs,
2478 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2479 cx,
2480 )
2481 .await;
2482 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2483
2484 let buffer_a = project
2485 .update(cx, |project, cx| {
2486 project.open_local_buffer(path!("/dir/a.rs"), cx)
2487 })
2488 .await
2489 .unwrap();
2490 let buffer_b = project
2491 .update(cx, |project, cx| {
2492 project.open_local_buffer(path!("/dir/b.rs"), cx)
2493 })
2494 .await
2495 .unwrap();
2496
2497 lsp_store.update(cx, |lsp_store, cx| {
2498 lsp_store
2499 .update_diagnostics(
2500 LanguageServerId(0),
2501 lsp::PublishDiagnosticsParams {
2502 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2503 version: None,
2504 diagnostics: vec![lsp::Diagnostic {
2505 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2506 severity: Some(lsp::DiagnosticSeverity::ERROR),
2507 message: "error 1".to_string(),
2508 ..Default::default()
2509 }],
2510 },
2511 None,
2512 DiagnosticSourceKind::Pushed,
2513 &[],
2514 cx,
2515 )
2516 .unwrap();
2517 lsp_store
2518 .update_diagnostics(
2519 LanguageServerId(0),
2520 lsp::PublishDiagnosticsParams {
2521 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2522 version: None,
2523 diagnostics: vec![lsp::Diagnostic {
2524 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2525 severity: Some(DiagnosticSeverity::WARNING),
2526 message: "error 2".to_string(),
2527 ..Default::default()
2528 }],
2529 },
2530 None,
2531 DiagnosticSourceKind::Pushed,
2532 &[],
2533 cx,
2534 )
2535 .unwrap();
2536 });
2537
2538 buffer_a.update(cx, |buffer, _| {
2539 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2540 assert_eq!(
2541 chunks
2542 .iter()
2543 .map(|(s, d)| (s.as_str(), *d))
2544 .collect::<Vec<_>>(),
2545 &[
2546 ("let ", None),
2547 ("a", Some(DiagnosticSeverity::ERROR)),
2548 (" = 1;", None),
2549 ]
2550 );
2551 });
2552 buffer_b.update(cx, |buffer, _| {
2553 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2554 assert_eq!(
2555 chunks
2556 .iter()
2557 .map(|(s, d)| (s.as_str(), *d))
2558 .collect::<Vec<_>>(),
2559 &[
2560 ("let ", None),
2561 ("b", Some(DiagnosticSeverity::WARNING)),
2562 (" = 2;", None),
2563 ]
2564 );
2565 });
2566}
2567
2568#[gpui::test]
2569async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2570 init_test(cx);
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 path!("/root"),
2575 json!({
2576 "dir": {
2577 ".git": {
2578 "HEAD": "ref: refs/heads/main",
2579 },
2580 ".gitignore": "b.rs",
2581 "a.rs": "let a = 1;",
2582 "b.rs": "let b = 2;",
2583 },
2584 "other.rs": "let b = c;"
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2590 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2591 let (worktree, _) = project
2592 .update(cx, |project, cx| {
2593 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2594 })
2595 .await
2596 .unwrap();
2597 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2598
2599 let (worktree, _) = project
2600 .update(cx, |project, cx| {
2601 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2602 })
2603 .await
2604 .unwrap();
2605 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2606
2607 let server_id = LanguageServerId(0);
2608 lsp_store.update(cx, |lsp_store, cx| {
2609 lsp_store
2610 .update_diagnostics(
2611 server_id,
2612 lsp::PublishDiagnosticsParams {
2613 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2614 version: None,
2615 diagnostics: vec![lsp::Diagnostic {
2616 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2617 severity: Some(lsp::DiagnosticSeverity::ERROR),
2618 message: "unused variable 'b'".to_string(),
2619 ..Default::default()
2620 }],
2621 },
2622 None,
2623 DiagnosticSourceKind::Pushed,
2624 &[],
2625 cx,
2626 )
2627 .unwrap();
2628 lsp_store
2629 .update_diagnostics(
2630 server_id,
2631 lsp::PublishDiagnosticsParams {
2632 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2633 version: None,
2634 diagnostics: vec![lsp::Diagnostic {
2635 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2636 severity: Some(lsp::DiagnosticSeverity::ERROR),
2637 message: "unknown variable 'c'".to_string(),
2638 ..Default::default()
2639 }],
2640 },
2641 None,
2642 DiagnosticSourceKind::Pushed,
2643 &[],
2644 cx,
2645 )
2646 .unwrap();
2647 });
2648
2649 let main_ignored_buffer = project
2650 .update(cx, |project, cx| {
2651 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2652 })
2653 .await
2654 .unwrap();
2655 main_ignored_buffer.update(cx, |buffer, _| {
2656 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2657 assert_eq!(
2658 chunks
2659 .iter()
2660 .map(|(s, d)| (s.as_str(), *d))
2661 .collect::<Vec<_>>(),
2662 &[
2663 ("let ", None),
2664 ("b", Some(DiagnosticSeverity::ERROR)),
2665 (" = 2;", None),
2666 ],
2667 "Gigitnored buffers should still get in-buffer diagnostics",
2668 );
2669 });
2670 let other_buffer = project
2671 .update(cx, |project, cx| {
2672 project.open_buffer((other_worktree_id, rel_path("")), cx)
2673 })
2674 .await
2675 .unwrap();
2676 other_buffer.update(cx, |buffer, _| {
2677 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2678 assert_eq!(
2679 chunks
2680 .iter()
2681 .map(|(s, d)| (s.as_str(), *d))
2682 .collect::<Vec<_>>(),
2683 &[
2684 ("let b = ", None),
2685 ("c", Some(DiagnosticSeverity::ERROR)),
2686 (";", None),
2687 ],
2688 "Buffers from hidden projects should still get in-buffer diagnostics"
2689 );
2690 });
2691
2692 project.update(cx, |project, cx| {
2693 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2694 assert_eq!(
2695 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2696 vec![(
2697 ProjectPath {
2698 worktree_id: main_worktree_id,
2699 path: rel_path("b.rs").into(),
2700 },
2701 server_id,
2702 DiagnosticSummary {
2703 error_count: 1,
2704 warning_count: 0,
2705 }
2706 )]
2707 );
2708 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2709 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2710 });
2711}
2712
2713#[gpui::test]
2714async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2715 init_test(cx);
2716
2717 let progress_token = "the-progress-token";
2718
2719 let fs = FakeFs::new(cx.executor());
2720 fs.insert_tree(
2721 path!("/dir"),
2722 json!({
2723 "a.rs": "fn a() { A }",
2724 "b.rs": "const y: i32 = 1",
2725 }),
2726 )
2727 .await;
2728
2729 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2730 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2731
2732 language_registry.add(rust_lang());
2733 let mut fake_servers = language_registry.register_fake_lsp(
2734 "Rust",
2735 FakeLspAdapter {
2736 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2737 disk_based_diagnostics_sources: vec!["disk".into()],
2738 ..Default::default()
2739 },
2740 );
2741
2742 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2743
2744 // Cause worktree to start the fake language server
2745 let _ = project
2746 .update(cx, |project, cx| {
2747 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2748 })
2749 .await
2750 .unwrap();
2751
2752 let mut events = cx.events(&project);
2753
2754 let fake_server = fake_servers.next().await.unwrap();
2755 assert_eq!(
2756 events.next().await.unwrap(),
2757 Event::LanguageServerAdded(
2758 LanguageServerId(0),
2759 fake_server.server.name(),
2760 Some(worktree_id)
2761 ),
2762 );
2763
2764 fake_server
2765 .start_progress(format!("{}/0", progress_token))
2766 .await;
2767 assert_eq!(
2768 events.next().await.unwrap(),
2769 Event::DiskBasedDiagnosticsStarted {
2770 language_server_id: LanguageServerId(0),
2771 }
2772 );
2773
2774 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2775 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2776 version: None,
2777 diagnostics: vec![lsp::Diagnostic {
2778 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2779 severity: Some(lsp::DiagnosticSeverity::ERROR),
2780 message: "undefined variable 'A'".to_string(),
2781 ..Default::default()
2782 }],
2783 });
2784 assert_eq!(
2785 events.next().await.unwrap(),
2786 Event::DiagnosticsUpdated {
2787 language_server_id: LanguageServerId(0),
2788 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2789 }
2790 );
2791
2792 fake_server.end_progress(format!("{}/0", progress_token));
2793 assert_eq!(
2794 events.next().await.unwrap(),
2795 Event::DiskBasedDiagnosticsFinished {
2796 language_server_id: LanguageServerId(0)
2797 }
2798 );
2799
2800 let buffer = project
2801 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2802 .await
2803 .unwrap();
2804
2805 buffer.update(cx, |buffer, _| {
2806 let snapshot = buffer.snapshot();
2807 let diagnostics = snapshot
2808 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2809 .collect::<Vec<_>>();
2810 assert_eq!(
2811 diagnostics,
2812 &[DiagnosticEntryRef {
2813 range: Point::new(0, 9)..Point::new(0, 10),
2814 diagnostic: &Diagnostic {
2815 severity: lsp::DiagnosticSeverity::ERROR,
2816 message: "undefined variable 'A'".to_string(),
2817 group_id: 0,
2818 is_primary: true,
2819 source_kind: DiagnosticSourceKind::Pushed,
2820 ..Diagnostic::default()
2821 }
2822 }]
2823 )
2824 });
2825
2826 // Ensure publishing empty diagnostics twice only results in one update event.
2827 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2828 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2829 version: None,
2830 diagnostics: Default::default(),
2831 });
2832 assert_eq!(
2833 events.next().await.unwrap(),
2834 Event::DiagnosticsUpdated {
2835 language_server_id: LanguageServerId(0),
2836 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2837 }
2838 );
2839
2840 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2841 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2842 version: None,
2843 diagnostics: Default::default(),
2844 });
2845 cx.executor().run_until_parked();
2846 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2847}
2848
2849#[gpui::test]
2850async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2851 init_test(cx);
2852
2853 let progress_token = "the-progress-token";
2854
2855 let fs = FakeFs::new(cx.executor());
2856 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2857
2858 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2859
2860 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2861 language_registry.add(rust_lang());
2862 let mut fake_servers = language_registry.register_fake_lsp(
2863 "Rust",
2864 FakeLspAdapter {
2865 name: "the-language-server",
2866 disk_based_diagnostics_sources: vec!["disk".into()],
2867 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2868 ..FakeLspAdapter::default()
2869 },
2870 );
2871
2872 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2873
2874 let (buffer, _handle) = project
2875 .update(cx, |project, cx| {
2876 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2877 })
2878 .await
2879 .unwrap();
2880 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2881 // Simulate diagnostics starting to update.
2882 let fake_server = fake_servers.next().await.unwrap();
2883 cx.executor().run_until_parked();
2884 fake_server.start_progress(progress_token).await;
2885
2886 // Restart the server before the diagnostics finish updating.
2887 project.update(cx, |project, cx| {
2888 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), true, cx);
2889 });
2890 let mut events = cx.events(&project);
2891
2892 // Simulate the newly started server sending more diagnostics.
2893 let fake_server = fake_servers.next().await.unwrap();
2894 cx.executor().run_until_parked();
2895 assert_eq!(
2896 events.next().await.unwrap(),
2897 Event::LanguageServerRemoved(LanguageServerId(0))
2898 );
2899 assert_eq!(
2900 events.next().await.unwrap(),
2901 Event::LanguageServerAdded(
2902 LanguageServerId(1),
2903 fake_server.server.name(),
2904 Some(worktree_id)
2905 )
2906 );
2907 fake_server.start_progress(progress_token).await;
2908 assert_eq!(
2909 events.next().await.unwrap(),
2910 Event::LanguageServerBufferRegistered {
2911 server_id: LanguageServerId(1),
2912 buffer_id,
2913 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2914 name: Some(fake_server.server.name())
2915 }
2916 );
2917 assert_eq!(
2918 events.next().await.unwrap(),
2919 Event::DiskBasedDiagnosticsStarted {
2920 language_server_id: LanguageServerId(1)
2921 }
2922 );
2923 project.update(cx, |project, cx| {
2924 assert_eq!(
2925 project
2926 .language_servers_running_disk_based_diagnostics(cx)
2927 .collect::<Vec<_>>(),
2928 [LanguageServerId(1)]
2929 );
2930 });
2931
2932 // All diagnostics are considered done, despite the old server's diagnostic
2933 // task never completing.
2934 fake_server.end_progress(progress_token);
2935 assert_eq!(
2936 events.next().await.unwrap(),
2937 Event::DiskBasedDiagnosticsFinished {
2938 language_server_id: LanguageServerId(1)
2939 }
2940 );
2941 project.update(cx, |project, cx| {
2942 assert_eq!(
2943 project
2944 .language_servers_running_disk_based_diagnostics(cx)
2945 .collect::<Vec<_>>(),
2946 [] as [language::LanguageServerId; 0]
2947 );
2948 });
2949}
2950
2951#[gpui::test]
2952async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2953 init_test(cx);
2954
2955 let fs = FakeFs::new(cx.executor());
2956 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2957
2958 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2959
2960 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2961 language_registry.add(rust_lang());
2962 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2963
2964 let (buffer, _) = project
2965 .update(cx, |project, cx| {
2966 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2967 })
2968 .await
2969 .unwrap();
2970
2971 // Publish diagnostics
2972 let fake_server = fake_servers.next().await.unwrap();
2973 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2974 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2975 version: None,
2976 diagnostics: vec![lsp::Diagnostic {
2977 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2978 severity: Some(lsp::DiagnosticSeverity::ERROR),
2979 message: "the message".to_string(),
2980 ..Default::default()
2981 }],
2982 });
2983
2984 cx.executor().run_until_parked();
2985 buffer.update(cx, |buffer, _| {
2986 assert_eq!(
2987 buffer
2988 .snapshot()
2989 .diagnostics_in_range::<_, usize>(0..1, false)
2990 .map(|entry| entry.diagnostic.message.clone())
2991 .collect::<Vec<_>>(),
2992 ["the message".to_string()]
2993 );
2994 });
2995 project.update(cx, |project, cx| {
2996 assert_eq!(
2997 project.diagnostic_summary(false, cx),
2998 DiagnosticSummary {
2999 error_count: 1,
3000 warning_count: 0,
3001 }
3002 );
3003 });
3004
3005 project.update(cx, |project, cx| {
3006 project.restart_language_servers_for_buffers(
3007 vec![buffer.clone()],
3008 HashSet::default(),
3009 true,
3010 cx,
3011 );
3012 });
3013
3014 // The diagnostics are cleared.
3015 cx.executor().run_until_parked();
3016 buffer.update(cx, |buffer, _| {
3017 assert_eq!(
3018 buffer
3019 .snapshot()
3020 .diagnostics_in_range::<_, usize>(0..1, false)
3021 .map(|entry| entry.diagnostic.message.clone())
3022 .collect::<Vec<_>>(),
3023 Vec::<String>::new(),
3024 );
3025 });
3026 project.update(cx, |project, cx| {
3027 assert_eq!(
3028 project.diagnostic_summary(false, cx),
3029 DiagnosticSummary {
3030 error_count: 0,
3031 warning_count: 0,
3032 }
3033 );
3034 });
3035}
3036
3037#[gpui::test]
3038async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3039 init_test(cx);
3040
3041 let fs = FakeFs::new(cx.executor());
3042 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3043
3044 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3046
3047 language_registry.add(rust_lang());
3048 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3049
3050 let (buffer, _handle) = project
3051 .update(cx, |project, cx| {
3052 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3053 })
3054 .await
3055 .unwrap();
3056
3057 // Before restarting the server, report diagnostics with an unknown buffer version.
3058 let fake_server = fake_servers.next().await.unwrap();
3059 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3060 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3061 version: Some(10000),
3062 diagnostics: Vec::new(),
3063 });
3064 cx.executor().run_until_parked();
3065 project.update(cx, |project, cx| {
3066 project.restart_language_servers_for_buffers(
3067 vec![buffer.clone()],
3068 HashSet::default(),
3069 true,
3070 cx,
3071 );
3072 });
3073
3074 let mut fake_server = fake_servers.next().await.unwrap();
3075 let notification = fake_server
3076 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3077 .await
3078 .text_document;
3079 assert_eq!(notification.version, 0);
3080}
3081
3082#[gpui::test]
3083async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3084 init_test(cx);
3085
3086 let progress_token = "the-progress-token";
3087
3088 let fs = FakeFs::new(cx.executor());
3089 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3090
3091 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3092
3093 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3094 language_registry.add(rust_lang());
3095 let mut fake_servers = language_registry.register_fake_lsp(
3096 "Rust",
3097 FakeLspAdapter {
3098 name: "the-language-server",
3099 disk_based_diagnostics_sources: vec!["disk".into()],
3100 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3101 ..Default::default()
3102 },
3103 );
3104
3105 let (buffer, _handle) = project
3106 .update(cx, |project, cx| {
3107 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3108 })
3109 .await
3110 .unwrap();
3111
3112 // Simulate diagnostics starting to update.
3113 let mut fake_server = fake_servers.next().await.unwrap();
3114 fake_server
3115 .start_progress_with(
3116 "another-token",
3117 lsp::WorkDoneProgressBegin {
3118 cancellable: Some(false),
3119 ..Default::default()
3120 },
3121 DEFAULT_LSP_REQUEST_TIMEOUT,
3122 )
3123 .await;
3124 // Ensure progress notification is fully processed before starting the next one
3125 cx.executor().run_until_parked();
3126
3127 fake_server
3128 .start_progress_with(
3129 progress_token,
3130 lsp::WorkDoneProgressBegin {
3131 cancellable: Some(true),
3132 ..Default::default()
3133 },
3134 DEFAULT_LSP_REQUEST_TIMEOUT,
3135 )
3136 .await;
3137 // Ensure progress notification is fully processed before cancelling
3138 cx.executor().run_until_parked();
3139
3140 project.update(cx, |project, cx| {
3141 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3142 });
3143 cx.executor().run_until_parked();
3144
3145 let cancel_notification = fake_server
3146 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3147 .await;
3148 assert_eq!(
3149 cancel_notification.token,
3150 NumberOrString::String(progress_token.into())
3151 );
3152}
3153
3154#[gpui::test]
3155async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3156 init_test(cx);
3157
3158 let fs = FakeFs::new(cx.executor());
3159 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3160 .await;
3161
3162 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3163 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3164
3165 let mut fake_rust_servers = language_registry.register_fake_lsp(
3166 "Rust",
3167 FakeLspAdapter {
3168 name: "rust-lsp",
3169 ..Default::default()
3170 },
3171 );
3172 let mut fake_js_servers = language_registry.register_fake_lsp(
3173 "JavaScript",
3174 FakeLspAdapter {
3175 name: "js-lsp",
3176 ..Default::default()
3177 },
3178 );
3179 language_registry.add(rust_lang());
3180 language_registry.add(js_lang());
3181
3182 let _rs_buffer = project
3183 .update(cx, |project, cx| {
3184 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3185 })
3186 .await
3187 .unwrap();
3188 let _js_buffer = project
3189 .update(cx, |project, cx| {
3190 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3191 })
3192 .await
3193 .unwrap();
3194
3195 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3196 assert_eq!(
3197 fake_rust_server_1
3198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3199 .await
3200 .text_document
3201 .uri
3202 .as_str(),
3203 uri!("file:///dir/a.rs")
3204 );
3205
3206 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3207 assert_eq!(
3208 fake_js_server
3209 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3210 .await
3211 .text_document
3212 .uri
3213 .as_str(),
3214 uri!("file:///dir/b.js")
3215 );
3216
3217 // Disable Rust language server, ensuring only that server gets stopped.
3218 cx.update(|cx| {
3219 SettingsStore::update_global(cx, |settings, cx| {
3220 settings.update_user_settings(cx, |settings| {
3221 settings.languages_mut().insert(
3222 "Rust".into(),
3223 LanguageSettingsContent {
3224 enable_language_server: Some(false),
3225 ..Default::default()
3226 },
3227 );
3228 });
3229 })
3230 });
3231 fake_rust_server_1
3232 .receive_notification::<lsp::notification::Exit>()
3233 .await;
3234
3235 // Enable Rust and disable JavaScript language servers, ensuring that the
3236 // former gets started again and that the latter stops.
3237 cx.update(|cx| {
3238 SettingsStore::update_global(cx, |settings, cx| {
3239 settings.update_user_settings(cx, |settings| {
3240 settings.languages_mut().insert(
3241 "Rust".into(),
3242 LanguageSettingsContent {
3243 enable_language_server: Some(true),
3244 ..Default::default()
3245 },
3246 );
3247 settings.languages_mut().insert(
3248 "JavaScript".into(),
3249 LanguageSettingsContent {
3250 enable_language_server: Some(false),
3251 ..Default::default()
3252 },
3253 );
3254 });
3255 })
3256 });
3257 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3258 assert_eq!(
3259 fake_rust_server_2
3260 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3261 .await
3262 .text_document
3263 .uri
3264 .as_str(),
3265 uri!("file:///dir/a.rs")
3266 );
3267 fake_js_server
3268 .receive_notification::<lsp::notification::Exit>()
3269 .await;
3270}
3271
3272#[gpui::test(iterations = 3)]
3273async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3274 init_test(cx);
3275
3276 let text = "
3277 fn a() { A }
3278 fn b() { BB }
3279 fn c() { CCC }
3280 "
3281 .unindent();
3282
3283 let fs = FakeFs::new(cx.executor());
3284 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3285
3286 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3287 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3288
3289 language_registry.add(rust_lang());
3290 let mut fake_servers = language_registry.register_fake_lsp(
3291 "Rust",
3292 FakeLspAdapter {
3293 disk_based_diagnostics_sources: vec!["disk".into()],
3294 ..Default::default()
3295 },
3296 );
3297
3298 let buffer = project
3299 .update(cx, |project, cx| {
3300 project.open_local_buffer(path!("/dir/a.rs"), cx)
3301 })
3302 .await
3303 .unwrap();
3304
3305 let _handle = project.update(cx, |project, cx| {
3306 project.register_buffer_with_language_servers(&buffer, cx)
3307 });
3308
3309 let mut fake_server = fake_servers.next().await.unwrap();
3310 let open_notification = fake_server
3311 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3312 .await;
3313
3314 // Edit the buffer, moving the content down
3315 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3316 let change_notification_1 = fake_server
3317 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3318 .await;
3319 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3320
3321 // Report some diagnostics for the initial version of the buffer
3322 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3323 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3324 version: Some(open_notification.text_document.version),
3325 diagnostics: vec![
3326 lsp::Diagnostic {
3327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3328 severity: Some(DiagnosticSeverity::ERROR),
3329 message: "undefined variable 'A'".to_string(),
3330 source: Some("disk".to_string()),
3331 ..Default::default()
3332 },
3333 lsp::Diagnostic {
3334 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3335 severity: Some(DiagnosticSeverity::ERROR),
3336 message: "undefined variable 'BB'".to_string(),
3337 source: Some("disk".to_string()),
3338 ..Default::default()
3339 },
3340 lsp::Diagnostic {
3341 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3342 severity: Some(DiagnosticSeverity::ERROR),
3343 source: Some("disk".to_string()),
3344 message: "undefined variable 'CCC'".to_string(),
3345 ..Default::default()
3346 },
3347 ],
3348 });
3349
3350 // The diagnostics have moved down since they were created.
3351 cx.executor().run_until_parked();
3352 buffer.update(cx, |buffer, _| {
3353 assert_eq!(
3354 buffer
3355 .snapshot()
3356 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3357 .collect::<Vec<_>>(),
3358 &[
3359 DiagnosticEntry {
3360 range: Point::new(3, 9)..Point::new(3, 11),
3361 diagnostic: Diagnostic {
3362 source: Some("disk".into()),
3363 severity: DiagnosticSeverity::ERROR,
3364 message: "undefined variable 'BB'".to_string(),
3365 is_disk_based: true,
3366 group_id: 1,
3367 is_primary: true,
3368 source_kind: DiagnosticSourceKind::Pushed,
3369 ..Diagnostic::default()
3370 },
3371 },
3372 DiagnosticEntry {
3373 range: Point::new(4, 9)..Point::new(4, 12),
3374 diagnostic: Diagnostic {
3375 source: Some("disk".into()),
3376 severity: DiagnosticSeverity::ERROR,
3377 message: "undefined variable 'CCC'".to_string(),
3378 is_disk_based: true,
3379 group_id: 2,
3380 is_primary: true,
3381 source_kind: DiagnosticSourceKind::Pushed,
3382 ..Diagnostic::default()
3383 }
3384 }
3385 ]
3386 );
3387 assert_eq!(
3388 chunks_with_diagnostics(buffer, 0..buffer.len()),
3389 [
3390 ("\n\nfn a() { ".to_string(), None),
3391 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3392 (" }\nfn b() { ".to_string(), None),
3393 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3394 (" }\nfn c() { ".to_string(), None),
3395 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3396 (" }\n".to_string(), None),
3397 ]
3398 );
3399 assert_eq!(
3400 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3401 [
3402 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3403 (" }\nfn c() { ".to_string(), None),
3404 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3405 ]
3406 );
3407 });
3408
3409 // Ensure overlapping diagnostics are highlighted correctly.
3410 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3411 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3412 version: Some(open_notification.text_document.version),
3413 diagnostics: vec![
3414 lsp::Diagnostic {
3415 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3416 severity: Some(DiagnosticSeverity::ERROR),
3417 message: "undefined variable 'A'".to_string(),
3418 source: Some("disk".to_string()),
3419 ..Default::default()
3420 },
3421 lsp::Diagnostic {
3422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3423 severity: Some(DiagnosticSeverity::WARNING),
3424 message: "unreachable statement".to_string(),
3425 source: Some("disk".to_string()),
3426 ..Default::default()
3427 },
3428 ],
3429 });
3430
3431 cx.executor().run_until_parked();
3432 buffer.update(cx, |buffer, _| {
3433 assert_eq!(
3434 buffer
3435 .snapshot()
3436 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3437 .collect::<Vec<_>>(),
3438 &[
3439 DiagnosticEntry {
3440 range: Point::new(2, 9)..Point::new(2, 12),
3441 diagnostic: Diagnostic {
3442 source: Some("disk".into()),
3443 severity: DiagnosticSeverity::WARNING,
3444 message: "unreachable statement".to_string(),
3445 is_disk_based: true,
3446 group_id: 4,
3447 is_primary: true,
3448 source_kind: DiagnosticSourceKind::Pushed,
3449 ..Diagnostic::default()
3450 }
3451 },
3452 DiagnosticEntry {
3453 range: Point::new(2, 9)..Point::new(2, 10),
3454 diagnostic: Diagnostic {
3455 source: Some("disk".into()),
3456 severity: DiagnosticSeverity::ERROR,
3457 message: "undefined variable 'A'".to_string(),
3458 is_disk_based: true,
3459 group_id: 3,
3460 is_primary: true,
3461 source_kind: DiagnosticSourceKind::Pushed,
3462 ..Diagnostic::default()
3463 },
3464 }
3465 ]
3466 );
3467 assert_eq!(
3468 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3469 [
3470 ("fn a() { ".to_string(), None),
3471 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3472 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3473 ("\n".to_string(), None),
3474 ]
3475 );
3476 assert_eq!(
3477 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3478 [
3479 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3480 ("\n".to_string(), None),
3481 ]
3482 );
3483 });
3484
3485 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3486 // changes since the last save.
3487 buffer.update(cx, |buffer, cx| {
3488 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3489 buffer.edit(
3490 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3491 None,
3492 cx,
3493 );
3494 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3495 });
3496 let change_notification_2 = fake_server
3497 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3498 .await;
3499 assert!(
3500 change_notification_2.text_document.version > change_notification_1.text_document.version
3501 );
3502
3503 // Handle out-of-order diagnostics
3504 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3505 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3506 version: Some(change_notification_2.text_document.version),
3507 diagnostics: vec![
3508 lsp::Diagnostic {
3509 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3510 severity: Some(DiagnosticSeverity::ERROR),
3511 message: "undefined variable 'BB'".to_string(),
3512 source: Some("disk".to_string()),
3513 ..Default::default()
3514 },
3515 lsp::Diagnostic {
3516 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3517 severity: Some(DiagnosticSeverity::WARNING),
3518 message: "undefined variable 'A'".to_string(),
3519 source: Some("disk".to_string()),
3520 ..Default::default()
3521 },
3522 ],
3523 });
3524
3525 cx.executor().run_until_parked();
3526 buffer.update(cx, |buffer, _| {
3527 assert_eq!(
3528 buffer
3529 .snapshot()
3530 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3531 .collect::<Vec<_>>(),
3532 &[
3533 DiagnosticEntry {
3534 range: Point::new(2, 21)..Point::new(2, 22),
3535 diagnostic: Diagnostic {
3536 source: Some("disk".into()),
3537 severity: DiagnosticSeverity::WARNING,
3538 message: "undefined variable 'A'".to_string(),
3539 is_disk_based: true,
3540 group_id: 6,
3541 is_primary: true,
3542 source_kind: DiagnosticSourceKind::Pushed,
3543 ..Diagnostic::default()
3544 }
3545 },
3546 DiagnosticEntry {
3547 range: Point::new(3, 9)..Point::new(3, 14),
3548 diagnostic: Diagnostic {
3549 source: Some("disk".into()),
3550 severity: DiagnosticSeverity::ERROR,
3551 message: "undefined variable 'BB'".to_string(),
3552 is_disk_based: true,
3553 group_id: 5,
3554 is_primary: true,
3555 source_kind: DiagnosticSourceKind::Pushed,
3556 ..Diagnostic::default()
3557 },
3558 }
3559 ]
3560 );
3561 });
3562}
3563
3564#[gpui::test]
3565async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3566 init_test(cx);
3567
3568 let text = concat!(
3569 "let one = ;\n", //
3570 "let two = \n",
3571 "let three = 3;\n",
3572 );
3573
3574 let fs = FakeFs::new(cx.executor());
3575 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3576
3577 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3578 let buffer = project
3579 .update(cx, |project, cx| {
3580 project.open_local_buffer(path!("/dir/a.rs"), cx)
3581 })
3582 .await
3583 .unwrap();
3584
3585 project.update(cx, |project, cx| {
3586 project.lsp_store().update(cx, |lsp_store, cx| {
3587 lsp_store
3588 .update_diagnostic_entries(
3589 LanguageServerId(0),
3590 PathBuf::from(path!("/dir/a.rs")),
3591 None,
3592 None,
3593 vec![
3594 DiagnosticEntry {
3595 range: Unclipped(PointUtf16::new(0, 10))
3596 ..Unclipped(PointUtf16::new(0, 10)),
3597 diagnostic: Diagnostic {
3598 severity: DiagnosticSeverity::ERROR,
3599 message: "syntax error 1".to_string(),
3600 source_kind: DiagnosticSourceKind::Pushed,
3601 ..Diagnostic::default()
3602 },
3603 },
3604 DiagnosticEntry {
3605 range: Unclipped(PointUtf16::new(1, 10))
3606 ..Unclipped(PointUtf16::new(1, 10)),
3607 diagnostic: Diagnostic {
3608 severity: DiagnosticSeverity::ERROR,
3609 message: "syntax error 2".to_string(),
3610 source_kind: DiagnosticSourceKind::Pushed,
3611 ..Diagnostic::default()
3612 },
3613 },
3614 ],
3615 cx,
3616 )
3617 .unwrap();
3618 })
3619 });
3620
3621 // An empty range is extended forward to include the following character.
3622 // At the end of a line, an empty range is extended backward to include
3623 // the preceding character.
3624 buffer.update(cx, |buffer, _| {
3625 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3626 assert_eq!(
3627 chunks
3628 .iter()
3629 .map(|(s, d)| (s.as_str(), *d))
3630 .collect::<Vec<_>>(),
3631 &[
3632 ("let one = ", None),
3633 (";", Some(DiagnosticSeverity::ERROR)),
3634 ("\nlet two =", None),
3635 (" ", Some(DiagnosticSeverity::ERROR)),
3636 ("\nlet three = 3;\n", None)
3637 ]
3638 );
3639 });
3640}
3641
3642#[gpui::test]
3643async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3644 init_test(cx);
3645
3646 let fs = FakeFs::new(cx.executor());
3647 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3648 .await;
3649
3650 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3651 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3652
3653 lsp_store.update(cx, |lsp_store, cx| {
3654 lsp_store
3655 .update_diagnostic_entries(
3656 LanguageServerId(0),
3657 Path::new(path!("/dir/a.rs")).to_owned(),
3658 None,
3659 None,
3660 vec![DiagnosticEntry {
3661 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3662 diagnostic: Diagnostic {
3663 severity: DiagnosticSeverity::ERROR,
3664 is_primary: true,
3665 message: "syntax error a1".to_string(),
3666 source_kind: DiagnosticSourceKind::Pushed,
3667 ..Diagnostic::default()
3668 },
3669 }],
3670 cx,
3671 )
3672 .unwrap();
3673 lsp_store
3674 .update_diagnostic_entries(
3675 LanguageServerId(1),
3676 Path::new(path!("/dir/a.rs")).to_owned(),
3677 None,
3678 None,
3679 vec![DiagnosticEntry {
3680 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::ERROR,
3683 is_primary: true,
3684 message: "syntax error b1".to_string(),
3685 source_kind: DiagnosticSourceKind::Pushed,
3686 ..Diagnostic::default()
3687 },
3688 }],
3689 cx,
3690 )
3691 .unwrap();
3692
3693 assert_eq!(
3694 lsp_store.diagnostic_summary(false, cx),
3695 DiagnosticSummary {
3696 error_count: 2,
3697 warning_count: 0,
3698 }
3699 );
3700 });
3701}
3702
3703#[gpui::test]
3704async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3705 cx: &mut gpui::TestAppContext,
3706) {
3707 init_test(cx);
3708
3709 let fs = FakeFs::new(cx.executor());
3710 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3711 .await;
3712
3713 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3714 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3715
3716 lsp_store.update(cx, |lsp_store, cx| {
3717 lsp_store
3718 .update_diagnostic_entries(
3719 LanguageServerId(0),
3720 Path::new(path!("/dir/a.rs")).to_owned(),
3721 None,
3722 None,
3723 vec![DiagnosticEntry {
3724 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3725 diagnostic: Diagnostic {
3726 severity: DiagnosticSeverity::ERROR,
3727 is_primary: true,
3728 message: "error in a".to_string(),
3729 source_kind: DiagnosticSourceKind::Pushed,
3730 ..Diagnostic::default()
3731 },
3732 }],
3733 cx,
3734 )
3735 .unwrap();
3736 lsp_store
3737 .update_diagnostic_entries(
3738 LanguageServerId(0),
3739 Path::new(path!("/dir/b.rs")).to_owned(),
3740 None,
3741 None,
3742 vec![DiagnosticEntry {
3743 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3744 diagnostic: Diagnostic {
3745 severity: DiagnosticSeverity::WARNING,
3746 is_primary: true,
3747 message: "warning in b".to_string(),
3748 source_kind: DiagnosticSourceKind::Pushed,
3749 ..Diagnostic::default()
3750 },
3751 }],
3752 cx,
3753 )
3754 .unwrap();
3755
3756 assert_eq!(
3757 lsp_store.diagnostic_summary(false, cx),
3758 DiagnosticSummary {
3759 error_count: 1,
3760 warning_count: 1,
3761 }
3762 );
3763 });
3764
3765 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3766 .await
3767 .unwrap();
3768 cx.executor().run_until_parked();
3769
3770 lsp_store.update(cx, |lsp_store, cx| {
3771 assert_eq!(
3772 lsp_store.diagnostic_summary(false, cx),
3773 DiagnosticSummary {
3774 error_count: 0,
3775 warning_count: 1,
3776 },
3777 );
3778 });
3779}
3780
3781#[gpui::test]
3782async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3783 init_test(cx);
3784
3785 let fs = FakeFs::new(cx.executor());
3786 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3787
3788 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3789
3790 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3791 language_registry.add(rust_lang());
3792 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3793
3794 let (buffer, _handle) = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 let fake_server = fake_servers.next().await.unwrap();
3802 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3803 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3804 version: None,
3805 diagnostics: vec![lsp::Diagnostic {
3806 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3807 severity: Some(lsp::DiagnosticSeverity::ERROR),
3808 message: "error before restart".to_string(),
3809 ..Default::default()
3810 }],
3811 });
3812 cx.executor().run_until_parked();
3813
3814 project.update(cx, |project, cx| {
3815 assert_eq!(
3816 project.diagnostic_summary(false, cx),
3817 DiagnosticSummary {
3818 error_count: 1,
3819 warning_count: 0,
3820 }
3821 );
3822 });
3823
3824 let mut events = cx.events(&project);
3825
3826 project.update(cx, |project, cx| {
3827 project.restart_language_servers_for_buffers(
3828 vec![buffer.clone()],
3829 HashSet::default(),
3830 true,
3831 cx,
3832 );
3833 });
3834 cx.executor().run_until_parked();
3835
3836 let mut received_diagnostics_updated = false;
3837 while let Some(Some(event)) =
3838 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3839 {
3840 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3841 received_diagnostics_updated = true;
3842 }
3843 }
3844 assert!(
3845 received_diagnostics_updated,
3846 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3847 );
3848
3849 project.update(cx, |project, cx| {
3850 assert_eq!(
3851 project.diagnostic_summary(false, cx),
3852 DiagnosticSummary {
3853 error_count: 0,
3854 warning_count: 0,
3855 }
3856 );
3857 });
3858}
3859
3860#[gpui::test]
3861async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3862 init_test(cx);
3863
3864 let fs = FakeFs::new(cx.executor());
3865 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3866 .await;
3867
3868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3869
3870 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3871 language_registry.add(rust_lang());
3872 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3873 let closure_pull_count = pull_count.clone();
3874 let mut fake_servers = language_registry.register_fake_lsp(
3875 "Rust",
3876 FakeLspAdapter {
3877 capabilities: lsp::ServerCapabilities {
3878 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3879 lsp::DiagnosticOptions {
3880 identifier: Some("test-reload".to_string()),
3881 inter_file_dependencies: true,
3882 workspace_diagnostics: false,
3883 work_done_progress_options: Default::default(),
3884 },
3885 )),
3886 ..lsp::ServerCapabilities::default()
3887 },
3888 initializer: Some(Box::new(move |fake_server| {
3889 let pull_count = closure_pull_count.clone();
3890 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3891 move |_, _| {
3892 let pull_count = pull_count.clone();
3893 async move {
3894 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3895 Ok(lsp::DocumentDiagnosticReportResult::Report(
3896 lsp::DocumentDiagnosticReport::Full(
3897 lsp::RelatedFullDocumentDiagnosticReport {
3898 related_documents: None,
3899 full_document_diagnostic_report:
3900 lsp::FullDocumentDiagnosticReport {
3901 result_id: None,
3902 items: Vec::new(),
3903 },
3904 },
3905 ),
3906 ))
3907 }
3908 },
3909 );
3910 })),
3911 ..FakeLspAdapter::default()
3912 },
3913 );
3914
3915 let (_buffer, _handle) = project
3916 .update(cx, |project, cx| {
3917 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3918 })
3919 .await
3920 .unwrap();
3921
3922 let fake_server = fake_servers.next().await.unwrap();
3923 cx.executor().run_until_parked();
3924
3925 // Publish initial diagnostics via the fake server.
3926 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3927 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3928 version: None,
3929 diagnostics: vec![lsp::Diagnostic {
3930 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3931 severity: Some(lsp::DiagnosticSeverity::ERROR),
3932 message: "error in a".to_string(),
3933 ..Default::default()
3934 }],
3935 });
3936 cx.executor().run_until_parked();
3937
3938 project.update(cx, |project, cx| {
3939 assert_eq!(
3940 project.diagnostic_summary(false, cx),
3941 DiagnosticSummary {
3942 error_count: 1,
3943 warning_count: 0,
3944 }
3945 );
3946 });
3947
3948 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3949
3950 // Change the file on disk. The FS event triggers buffer reload,
3951 // which in turn triggers pull_diagnostics_for_buffer.
3952 fs.save(
3953 path!("/dir/a.rs").as_ref(),
3954 &"fixed content".into(),
3955 LineEnding::Unix,
3956 )
3957 .await
3958 .unwrap();
3959 cx.executor().run_until_parked();
3960
3961 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3962 assert!(
3963 pulls_after > pulls_before,
3964 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3965 );
3966}
3967
3968#[gpui::test]
3969async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3970 init_test(cx);
3971
3972 let text = "
3973 fn a() {
3974 f1();
3975 }
3976 fn b() {
3977 f2();
3978 }
3979 fn c() {
3980 f3();
3981 }
3982 "
3983 .unindent();
3984
3985 let fs = FakeFs::new(cx.executor());
3986 fs.insert_tree(
3987 path!("/dir"),
3988 json!({
3989 "a.rs": text.clone(),
3990 }),
3991 )
3992 .await;
3993
3994 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3995 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3996
3997 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3998 language_registry.add(rust_lang());
3999 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4000
4001 let (buffer, _handle) = project
4002 .update(cx, |project, cx| {
4003 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
4004 })
4005 .await
4006 .unwrap();
4007
4008 let mut fake_server = fake_servers.next().await.unwrap();
4009 let lsp_document_version = fake_server
4010 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4011 .await
4012 .text_document
4013 .version;
4014
4015 // Simulate editing the buffer after the language server computes some edits.
4016 buffer.update(cx, |buffer, cx| {
4017 buffer.edit(
4018 [(
4019 Point::new(0, 0)..Point::new(0, 0),
4020 "// above first function\n",
4021 )],
4022 None,
4023 cx,
4024 );
4025 buffer.edit(
4026 [(
4027 Point::new(2, 0)..Point::new(2, 0),
4028 " // inside first function\n",
4029 )],
4030 None,
4031 cx,
4032 );
4033 buffer.edit(
4034 [(
4035 Point::new(6, 4)..Point::new(6, 4),
4036 "// inside second function ",
4037 )],
4038 None,
4039 cx,
4040 );
4041
4042 assert_eq!(
4043 buffer.text(),
4044 "
4045 // above first function
4046 fn a() {
4047 // inside first function
4048 f1();
4049 }
4050 fn b() {
4051 // inside second function f2();
4052 }
4053 fn c() {
4054 f3();
4055 }
4056 "
4057 .unindent()
4058 );
4059 });
4060
4061 let edits = lsp_store
4062 .update(cx, |lsp_store, cx| {
4063 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4064 &buffer,
4065 vec![
4066 // replace body of first function
4067 lsp::TextEdit {
4068 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4069 new_text: "
4070 fn a() {
4071 f10();
4072 }
4073 "
4074 .unindent(),
4075 },
4076 // edit inside second function
4077 lsp::TextEdit {
4078 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4079 new_text: "00".into(),
4080 },
4081 // edit inside third function via two distinct edits
4082 lsp::TextEdit {
4083 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4084 new_text: "4000".into(),
4085 },
4086 lsp::TextEdit {
4087 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4088 new_text: "".into(),
4089 },
4090 ],
4091 LanguageServerId(0),
4092 Some(lsp_document_version),
4093 cx,
4094 )
4095 })
4096 .await
4097 .unwrap();
4098
4099 buffer.update(cx, |buffer, cx| {
4100 for (range, new_text) in edits {
4101 buffer.edit([(range, new_text)], None, cx);
4102 }
4103 assert_eq!(
4104 buffer.text(),
4105 "
4106 // above first function
4107 fn a() {
4108 // inside first function
4109 f10();
4110 }
4111 fn b() {
4112 // inside second function f200();
4113 }
4114 fn c() {
4115 f4000();
4116 }
4117 "
4118 .unindent()
4119 );
4120 });
4121}
4122
4123#[gpui::test]
4124async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4125 init_test(cx);
4126
4127 let text = "
4128 use a::b;
4129 use a::c;
4130
4131 fn f() {
4132 b();
4133 c();
4134 }
4135 "
4136 .unindent();
4137
4138 let fs = FakeFs::new(cx.executor());
4139 fs.insert_tree(
4140 path!("/dir"),
4141 json!({
4142 "a.rs": text.clone(),
4143 }),
4144 )
4145 .await;
4146
4147 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4148 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4149 let buffer = project
4150 .update(cx, |project, cx| {
4151 project.open_local_buffer(path!("/dir/a.rs"), cx)
4152 })
4153 .await
4154 .unwrap();
4155
4156 // Simulate the language server sending us a small edit in the form of a very large diff.
4157 // Rust-analyzer does this when performing a merge-imports code action.
4158 let edits = lsp_store
4159 .update(cx, |lsp_store, cx| {
4160 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4161 &buffer,
4162 [
4163 // Replace the first use statement without editing the semicolon.
4164 lsp::TextEdit {
4165 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4166 new_text: "a::{b, c}".into(),
4167 },
4168 // Reinsert the remainder of the file between the semicolon and the final
4169 // newline of the file.
4170 lsp::TextEdit {
4171 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4172 new_text: "\n\n".into(),
4173 },
4174 lsp::TextEdit {
4175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4176 new_text: "
4177 fn f() {
4178 b();
4179 c();
4180 }"
4181 .unindent(),
4182 },
4183 // Delete everything after the first newline of the file.
4184 lsp::TextEdit {
4185 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4186 new_text: "".into(),
4187 },
4188 ],
4189 LanguageServerId(0),
4190 None,
4191 cx,
4192 )
4193 })
4194 .await
4195 .unwrap();
4196
4197 buffer.update(cx, |buffer, cx| {
4198 let edits = edits
4199 .into_iter()
4200 .map(|(range, text)| {
4201 (
4202 range.start.to_point(buffer)..range.end.to_point(buffer),
4203 text,
4204 )
4205 })
4206 .collect::<Vec<_>>();
4207
4208 assert_eq!(
4209 edits,
4210 [
4211 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4212 (Point::new(1, 0)..Point::new(2, 0), "".into())
4213 ]
4214 );
4215
4216 for (range, new_text) in edits {
4217 buffer.edit([(range, new_text)], None, cx);
4218 }
4219 assert_eq!(
4220 buffer.text(),
4221 "
4222 use a::{b, c};
4223
4224 fn f() {
4225 b();
4226 c();
4227 }
4228 "
4229 .unindent()
4230 );
4231 });
4232}
4233
4234#[gpui::test]
4235async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4236 cx: &mut gpui::TestAppContext,
4237) {
4238 init_test(cx);
4239
4240 let text = "Path()";
4241
4242 let fs = FakeFs::new(cx.executor());
4243 fs.insert_tree(
4244 path!("/dir"),
4245 json!({
4246 "a.rs": text
4247 }),
4248 )
4249 .await;
4250
4251 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4252 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4253 let buffer = project
4254 .update(cx, |project, cx| {
4255 project.open_local_buffer(path!("/dir/a.rs"), cx)
4256 })
4257 .await
4258 .unwrap();
4259
4260 // Simulate the language server sending us a pair of edits at the same location,
4261 // with an insertion following a replacement (which violates the LSP spec).
4262 let edits = lsp_store
4263 .update(cx, |lsp_store, cx| {
4264 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4265 &buffer,
4266 [
4267 lsp::TextEdit {
4268 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4269 new_text: "Path".into(),
4270 },
4271 lsp::TextEdit {
4272 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4273 new_text: "from path import Path\n\n\n".into(),
4274 },
4275 ],
4276 LanguageServerId(0),
4277 None,
4278 cx,
4279 )
4280 })
4281 .await
4282 .unwrap();
4283
4284 buffer.update(cx, |buffer, cx| {
4285 buffer.edit(edits, None, cx);
4286 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4287 });
4288}
4289
4290#[gpui::test]
4291async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4292 init_test(cx);
4293
4294 let text = "
4295 use a::b;
4296 use a::c;
4297
4298 fn f() {
4299 b();
4300 c();
4301 }
4302 "
4303 .unindent();
4304
4305 let fs = FakeFs::new(cx.executor());
4306 fs.insert_tree(
4307 path!("/dir"),
4308 json!({
4309 "a.rs": text.clone(),
4310 }),
4311 )
4312 .await;
4313
4314 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4315 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4316 let buffer = project
4317 .update(cx, |project, cx| {
4318 project.open_local_buffer(path!("/dir/a.rs"), cx)
4319 })
4320 .await
4321 .unwrap();
4322
4323 // Simulate the language server sending us edits in a non-ordered fashion,
4324 // with ranges sometimes being inverted or pointing to invalid locations.
4325 let edits = lsp_store
4326 .update(cx, |lsp_store, cx| {
4327 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4328 &buffer,
4329 [
4330 lsp::TextEdit {
4331 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4332 new_text: "\n\n".into(),
4333 },
4334 lsp::TextEdit {
4335 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4336 new_text: "a::{b, c}".into(),
4337 },
4338 lsp::TextEdit {
4339 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4340 new_text: "".into(),
4341 },
4342 lsp::TextEdit {
4343 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4344 new_text: "
4345 fn f() {
4346 b();
4347 c();
4348 }"
4349 .unindent(),
4350 },
4351 ],
4352 LanguageServerId(0),
4353 None,
4354 cx,
4355 )
4356 })
4357 .await
4358 .unwrap();
4359
4360 buffer.update(cx, |buffer, cx| {
4361 let edits = edits
4362 .into_iter()
4363 .map(|(range, text)| {
4364 (
4365 range.start.to_point(buffer)..range.end.to_point(buffer),
4366 text,
4367 )
4368 })
4369 .collect::<Vec<_>>();
4370
4371 assert_eq!(
4372 edits,
4373 [
4374 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4375 (Point::new(1, 0)..Point::new(2, 0), "".into())
4376 ]
4377 );
4378
4379 for (range, new_text) in edits {
4380 buffer.edit([(range, new_text)], None, cx);
4381 }
4382 assert_eq!(
4383 buffer.text(),
4384 "
4385 use a::{b, c};
4386
4387 fn f() {
4388 b();
4389 c();
4390 }
4391 "
4392 .unindent()
4393 );
4394 });
4395}
4396
4397fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4398 buffer: &Buffer,
4399 range: Range<T>,
4400) -> Vec<(String, Option<DiagnosticSeverity>)> {
4401 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4402 for chunk in buffer.snapshot().chunks(
4403 range,
4404 LanguageAwareStyling {
4405 tree_sitter: true,
4406 diagnostics: true,
4407 },
4408 ) {
4409 if chunks
4410 .last()
4411 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4412 {
4413 chunks.last_mut().unwrap().0.push_str(chunk.text);
4414 } else {
4415 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4416 }
4417 }
4418 chunks
4419}
4420
4421#[gpui::test(iterations = 10)]
4422async fn test_definition(cx: &mut gpui::TestAppContext) {
4423 init_test(cx);
4424
4425 let fs = FakeFs::new(cx.executor());
4426 fs.insert_tree(
4427 path!("/dir"),
4428 json!({
4429 "a.rs": "const fn a() { A }",
4430 "b.rs": "const y: i32 = crate::a()",
4431 }),
4432 )
4433 .await;
4434
4435 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4436
4437 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4438 language_registry.add(rust_lang());
4439 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4440
4441 let (buffer, _handle) = project
4442 .update(cx, |project, cx| {
4443 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4444 })
4445 .await
4446 .unwrap();
4447
4448 let fake_server = fake_servers.next().await.unwrap();
4449 cx.executor().run_until_parked();
4450
4451 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4452 let params = params.text_document_position_params;
4453 assert_eq!(
4454 params.text_document.uri.to_file_path().unwrap(),
4455 Path::new(path!("/dir/b.rs")),
4456 );
4457 assert_eq!(params.position, lsp::Position::new(0, 22));
4458
4459 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4460 lsp::Location::new(
4461 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4462 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4463 ),
4464 )))
4465 });
4466 let mut definitions = project
4467 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4468 .await
4469 .unwrap()
4470 .unwrap();
4471
4472 // Assert no new language server started
4473 cx.executor().run_until_parked();
4474 assert!(fake_servers.try_recv().is_err());
4475
4476 assert_eq!(definitions.len(), 1);
4477 let definition = definitions.pop().unwrap();
4478 cx.update(|cx| {
4479 let target_buffer = definition.target.buffer.read(cx);
4480 assert_eq!(
4481 target_buffer
4482 .file()
4483 .unwrap()
4484 .as_local()
4485 .unwrap()
4486 .abs_path(cx),
4487 Path::new(path!("/dir/a.rs")),
4488 );
4489 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4490 assert_eq!(
4491 list_worktrees(&project, cx),
4492 [
4493 (path!("/dir/a.rs").as_ref(), false),
4494 (path!("/dir/b.rs").as_ref(), true)
4495 ],
4496 );
4497
4498 drop(definition);
4499 });
4500 cx.update(|cx| {
4501 assert_eq!(
4502 list_worktrees(&project, cx),
4503 [(path!("/dir/b.rs").as_ref(), true)]
4504 );
4505 });
4506
4507 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4508 project
4509 .read(cx)
4510 .worktrees(cx)
4511 .map(|worktree| {
4512 let worktree = worktree.read(cx);
4513 (
4514 worktree.as_local().unwrap().abs_path().as_ref(),
4515 worktree.is_visible(),
4516 )
4517 })
4518 .collect::<Vec<_>>()
4519 }
4520}
4521
4522#[gpui::test]
4523async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4524 init_test(cx);
4525
4526 let fs = FakeFs::new(cx.executor());
4527 fs.insert_tree(
4528 path!("/dir"),
4529 json!({
4530 "a.ts": "",
4531 }),
4532 )
4533 .await;
4534
4535 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4536
4537 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4538 language_registry.add(typescript_lang());
4539 let mut fake_language_servers = language_registry.register_fake_lsp(
4540 "TypeScript",
4541 FakeLspAdapter {
4542 capabilities: lsp::ServerCapabilities {
4543 completion_provider: Some(lsp::CompletionOptions {
4544 trigger_characters: Some(vec![".".to_string()]),
4545 ..Default::default()
4546 }),
4547 ..Default::default()
4548 },
4549 ..Default::default()
4550 },
4551 );
4552
4553 let (buffer, _handle) = project
4554 .update(cx, |p, cx| {
4555 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4556 })
4557 .await
4558 .unwrap();
4559
4560 let fake_server = fake_language_servers.next().await.unwrap();
4561 cx.executor().run_until_parked();
4562
4563 // When text_edit exists, it takes precedence over insert_text and label
4564 let text = "let a = obj.fqn";
4565 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4566 let completions = project.update(cx, |project, cx| {
4567 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4568 });
4569
4570 fake_server
4571 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4572 Ok(Some(lsp::CompletionResponse::Array(vec![
4573 lsp::CompletionItem {
4574 label: "labelText".into(),
4575 insert_text: Some("insertText".into()),
4576 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4577 range: lsp::Range::new(
4578 lsp::Position::new(0, text.len() as u32 - 3),
4579 lsp::Position::new(0, text.len() as u32),
4580 ),
4581 new_text: "textEditText".into(),
4582 })),
4583 ..Default::default()
4584 },
4585 ])))
4586 })
4587 .next()
4588 .await;
4589
4590 let completions = completions
4591 .await
4592 .unwrap()
4593 .into_iter()
4594 .flat_map(|response| response.completions)
4595 .collect::<Vec<_>>();
4596 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4597
4598 assert_eq!(completions.len(), 1);
4599 assert_eq!(completions[0].new_text, "textEditText");
4600 assert_eq!(
4601 completions[0].replace_range.to_offset(&snapshot),
4602 text.len() - 3..text.len()
4603 );
4604}
4605
4606#[gpui::test]
4607async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4608 init_test(cx);
4609
4610 let fs = FakeFs::new(cx.executor());
4611 fs.insert_tree(
4612 path!("/dir"),
4613 json!({
4614 "a.ts": "",
4615 }),
4616 )
4617 .await;
4618
4619 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4620
4621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4622 language_registry.add(typescript_lang());
4623 let mut fake_language_servers = language_registry.register_fake_lsp(
4624 "TypeScript",
4625 FakeLspAdapter {
4626 capabilities: lsp::ServerCapabilities {
4627 completion_provider: Some(lsp::CompletionOptions {
4628 trigger_characters: Some(vec![".".to_string()]),
4629 ..Default::default()
4630 }),
4631 ..Default::default()
4632 },
4633 ..Default::default()
4634 },
4635 );
4636
4637 let (buffer, _handle) = project
4638 .update(cx, |p, cx| {
4639 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4640 })
4641 .await
4642 .unwrap();
4643
4644 let fake_server = fake_language_servers.next().await.unwrap();
4645 cx.executor().run_until_parked();
4646 let text = "let a = obj.fqn";
4647
4648 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4649 {
4650 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4651 let completions = project.update(cx, |project, cx| {
4652 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4653 });
4654
4655 fake_server
4656 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4657 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4658 is_incomplete: false,
4659 item_defaults: Some(lsp::CompletionListItemDefaults {
4660 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4661 lsp::Range::new(
4662 lsp::Position::new(0, text.len() as u32 - 3),
4663 lsp::Position::new(0, text.len() as u32),
4664 ),
4665 )),
4666 ..Default::default()
4667 }),
4668 items: vec![lsp::CompletionItem {
4669 label: "labelText".into(),
4670 text_edit_text: Some("textEditText".into()),
4671 text_edit: None,
4672 ..Default::default()
4673 }],
4674 })))
4675 })
4676 .next()
4677 .await;
4678
4679 let completions = completions
4680 .await
4681 .unwrap()
4682 .into_iter()
4683 .flat_map(|response| response.completions)
4684 .collect::<Vec<_>>();
4685 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4686
4687 assert_eq!(completions.len(), 1);
4688 assert_eq!(completions[0].new_text, "textEditText");
4689 assert_eq!(
4690 completions[0].replace_range.to_offset(&snapshot),
4691 text.len() - 3..text.len()
4692 );
4693 }
4694
4695 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4696 {
4697 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4698 let completions = project.update(cx, |project, cx| {
4699 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4700 });
4701
4702 fake_server
4703 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4704 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4705 is_incomplete: false,
4706 item_defaults: Some(lsp::CompletionListItemDefaults {
4707 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4708 lsp::Range::new(
4709 lsp::Position::new(0, text.len() as u32 - 3),
4710 lsp::Position::new(0, text.len() as u32),
4711 ),
4712 )),
4713 ..Default::default()
4714 }),
4715 items: vec![lsp::CompletionItem {
4716 label: "labelText".into(),
4717 text_edit_text: None,
4718 insert_text: Some("irrelevant".into()),
4719 text_edit: None,
4720 ..Default::default()
4721 }],
4722 })))
4723 })
4724 .next()
4725 .await;
4726
4727 let completions = completions
4728 .await
4729 .unwrap()
4730 .into_iter()
4731 .flat_map(|response| response.completions)
4732 .collect::<Vec<_>>();
4733 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4734
4735 assert_eq!(completions.len(), 1);
4736 assert_eq!(completions[0].new_text, "labelText");
4737 assert_eq!(
4738 completions[0].replace_range.to_offset(&snapshot),
4739 text.len() - 3..text.len()
4740 );
4741 }
4742}
4743
4744#[gpui::test]
4745async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4746 init_test(cx);
4747
4748 let fs = FakeFs::new(cx.executor());
4749 fs.insert_tree(
4750 path!("/dir"),
4751 json!({
4752 "a.ts": "",
4753 }),
4754 )
4755 .await;
4756
4757 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4758
4759 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4760 language_registry.add(typescript_lang());
4761 let mut fake_language_servers = language_registry.register_fake_lsp(
4762 "TypeScript",
4763 FakeLspAdapter {
4764 capabilities: lsp::ServerCapabilities {
4765 completion_provider: Some(lsp::CompletionOptions {
4766 trigger_characters: Some(vec![":".to_string()]),
4767 ..Default::default()
4768 }),
4769 ..Default::default()
4770 },
4771 ..Default::default()
4772 },
4773 );
4774
4775 let (buffer, _handle) = project
4776 .update(cx, |p, cx| {
4777 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4778 })
4779 .await
4780 .unwrap();
4781
4782 let fake_server = fake_language_servers.next().await.unwrap();
4783 cx.executor().run_until_parked();
4784
4785 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4786 let text = "let a = b.fqn";
4787 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4788 let completions = project.update(cx, |project, cx| {
4789 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4790 });
4791
4792 fake_server
4793 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4794 Ok(Some(lsp::CompletionResponse::Array(vec![
4795 lsp::CompletionItem {
4796 label: "fullyQualifiedName?".into(),
4797 insert_text: Some("fullyQualifiedName".into()),
4798 ..Default::default()
4799 },
4800 ])))
4801 })
4802 .next()
4803 .await;
4804 let completions = completions
4805 .await
4806 .unwrap()
4807 .into_iter()
4808 .flat_map(|response| response.completions)
4809 .collect::<Vec<_>>();
4810 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4811 assert_eq!(completions.len(), 1);
4812 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4813 assert_eq!(
4814 completions[0].replace_range.to_offset(&snapshot),
4815 text.len() - 3..text.len()
4816 );
4817
4818 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4819 let text = "let a = \"atoms/cmp\"";
4820 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4821 let completions = project.update(cx, |project, cx| {
4822 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4823 });
4824
4825 fake_server
4826 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4827 Ok(Some(lsp::CompletionResponse::Array(vec![
4828 lsp::CompletionItem {
4829 label: "component".into(),
4830 ..Default::default()
4831 },
4832 ])))
4833 })
4834 .next()
4835 .await;
4836 let completions = completions
4837 .await
4838 .unwrap()
4839 .into_iter()
4840 .flat_map(|response| response.completions)
4841 .collect::<Vec<_>>();
4842 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4843 assert_eq!(completions.len(), 1);
4844 assert_eq!(completions[0].new_text, "component");
4845 assert_eq!(
4846 completions[0].replace_range.to_offset(&snapshot),
4847 text.len() - 4..text.len() - 1
4848 );
4849}
4850
4851#[gpui::test]
4852async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4853 init_test(cx);
4854
4855 let fs = FakeFs::new(cx.executor());
4856 fs.insert_tree(
4857 path!("/dir"),
4858 json!({
4859 "a.ts": "",
4860 }),
4861 )
4862 .await;
4863
4864 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4865
4866 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4867 language_registry.add(typescript_lang());
4868 let mut fake_language_servers = language_registry.register_fake_lsp(
4869 "TypeScript",
4870 FakeLspAdapter {
4871 capabilities: lsp::ServerCapabilities {
4872 completion_provider: Some(lsp::CompletionOptions {
4873 trigger_characters: Some(vec![":".to_string()]),
4874 ..Default::default()
4875 }),
4876 ..Default::default()
4877 },
4878 ..Default::default()
4879 },
4880 );
4881
4882 let (buffer, _handle) = project
4883 .update(cx, |p, cx| {
4884 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4885 })
4886 .await
4887 .unwrap();
4888
4889 let fake_server = fake_language_servers.next().await.unwrap();
4890 cx.executor().run_until_parked();
4891
4892 let text = "let a = b.fqn";
4893 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4894 let completions = project.update(cx, |project, cx| {
4895 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4896 });
4897
4898 fake_server
4899 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4900 Ok(Some(lsp::CompletionResponse::Array(vec![
4901 lsp::CompletionItem {
4902 label: "fullyQualifiedName?".into(),
4903 insert_text: Some("fully\rQualified\r\nName".into()),
4904 ..Default::default()
4905 },
4906 ])))
4907 })
4908 .next()
4909 .await;
4910 let completions = completions
4911 .await
4912 .unwrap()
4913 .into_iter()
4914 .flat_map(|response| response.completions)
4915 .collect::<Vec<_>>();
4916 assert_eq!(completions.len(), 1);
4917 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4918}
4919
4920#[gpui::test(iterations = 10)]
4921async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4922 init_test(cx);
4923
4924 let fs = FakeFs::new(cx.executor());
4925 fs.insert_tree(
4926 path!("/dir"),
4927 json!({
4928 "a.ts": "a",
4929 }),
4930 )
4931 .await;
4932
4933 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4934
4935 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4936 language_registry.add(typescript_lang());
4937 let mut fake_language_servers = language_registry.register_fake_lsp(
4938 "TypeScript",
4939 FakeLspAdapter {
4940 capabilities: lsp::ServerCapabilities {
4941 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4942 lsp::CodeActionOptions {
4943 resolve_provider: Some(true),
4944 ..lsp::CodeActionOptions::default()
4945 },
4946 )),
4947 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4948 commands: vec!["_the/command".to_string()],
4949 ..lsp::ExecuteCommandOptions::default()
4950 }),
4951 ..lsp::ServerCapabilities::default()
4952 },
4953 ..FakeLspAdapter::default()
4954 },
4955 );
4956
4957 let (buffer, _handle) = project
4958 .update(cx, |p, cx| {
4959 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4960 })
4961 .await
4962 .unwrap();
4963
4964 let fake_server = fake_language_servers.next().await.unwrap();
4965 cx.executor().run_until_parked();
4966
4967 // Language server returns code actions that contain commands, and not edits.
4968 let actions = project.update(cx, |project, cx| {
4969 project.code_actions(&buffer, 0..0, None, cx)
4970 });
4971 fake_server
4972 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4973 Ok(Some(vec![
4974 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4975 title: "The code action".into(),
4976 data: Some(serde_json::json!({
4977 "command": "_the/command",
4978 })),
4979 ..lsp::CodeAction::default()
4980 }),
4981 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4982 title: "two".into(),
4983 ..lsp::CodeAction::default()
4984 }),
4985 ]))
4986 })
4987 .next()
4988 .await;
4989
4990 let action = actions.await.unwrap().unwrap()[0].clone();
4991 let apply = project.update(cx, |project, cx| {
4992 project.apply_code_action(buffer.clone(), action, true, cx)
4993 });
4994
4995 // Resolving the code action does not populate its edits. In absence of
4996 // edits, we must execute the given command.
4997 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4998 |mut action, _| async move {
4999 if action.data.is_some() {
5000 action.command = Some(lsp::Command {
5001 title: "The command".into(),
5002 command: "_the/command".into(),
5003 arguments: Some(vec![json!("the-argument")]),
5004 });
5005 }
5006 Ok(action)
5007 },
5008 );
5009
5010 // While executing the command, the language server sends the editor
5011 // a `workspaceEdit` request.
5012 fake_server
5013 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
5014 let fake = fake_server.clone();
5015 move |params, _| {
5016 assert_eq!(params.command, "_the/command");
5017 let fake = fake.clone();
5018 async move {
5019 fake.server
5020 .request::<lsp::request::ApplyWorkspaceEdit>(
5021 lsp::ApplyWorkspaceEditParams {
5022 label: None,
5023 edit: lsp::WorkspaceEdit {
5024 changes: Some(
5025 [(
5026 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5027 vec![lsp::TextEdit {
5028 range: lsp::Range::new(
5029 lsp::Position::new(0, 0),
5030 lsp::Position::new(0, 0),
5031 ),
5032 new_text: "X".into(),
5033 }],
5034 )]
5035 .into_iter()
5036 .collect(),
5037 ),
5038 ..Default::default()
5039 },
5040 },
5041 DEFAULT_LSP_REQUEST_TIMEOUT,
5042 )
5043 .await
5044 .into_response()
5045 .unwrap();
5046 Ok(Some(json!(null)))
5047 }
5048 }
5049 })
5050 .next()
5051 .await;
5052
5053 // Applying the code action returns a project transaction containing the edits
5054 // sent by the language server in its `workspaceEdit` request.
5055 let transaction = apply.await.unwrap();
5056 assert!(transaction.0.contains_key(&buffer));
5057 buffer.update(cx, |buffer, cx| {
5058 assert_eq!(buffer.text(), "Xa");
5059 buffer.undo(cx);
5060 assert_eq!(buffer.text(), "a");
5061 });
5062}
5063
5064#[gpui::test]
5065async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5066 init_test(cx);
5067 let fs = FakeFs::new(cx.background_executor.clone());
5068 let expected_contents = "content";
5069 fs.as_fake()
5070 .insert_tree(
5071 "/root",
5072 json!({
5073 "test.txt": expected_contents
5074 }),
5075 )
5076 .await;
5077
5078 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5079
5080 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5081 let worktree = project.worktrees(cx).next().unwrap();
5082 let entry_id = worktree
5083 .read(cx)
5084 .entry_for_path(rel_path("test.txt"))
5085 .unwrap()
5086 .id;
5087 (worktree, entry_id)
5088 });
5089 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5090 let _result = project
5091 .update(cx, |project, cx| {
5092 project.rename_entry(
5093 entry_id,
5094 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5095 cx,
5096 )
5097 })
5098 .await
5099 .unwrap();
5100 worktree.read_with(cx, |worktree, _| {
5101 assert!(
5102 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5103 "Old file should have been removed"
5104 );
5105 assert!(
5106 worktree
5107 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5108 .is_some(),
5109 "Whole directory hierarchy and the new file should have been created"
5110 );
5111 });
5112 assert_eq!(
5113 worktree
5114 .update(cx, |worktree, cx| {
5115 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5116 })
5117 .await
5118 .unwrap()
5119 .text,
5120 expected_contents,
5121 "Moved file's contents should be preserved"
5122 );
5123
5124 let entry_id = worktree.read_with(cx, |worktree, _| {
5125 worktree
5126 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5127 .unwrap()
5128 .id
5129 });
5130
5131 let _result = project
5132 .update(cx, |project, cx| {
5133 project.rename_entry(
5134 entry_id,
5135 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5136 cx,
5137 )
5138 })
5139 .await
5140 .unwrap();
5141 worktree.read_with(cx, |worktree, _| {
5142 assert!(
5143 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5144 "First file should not reappear"
5145 );
5146 assert!(
5147 worktree
5148 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5149 .is_none(),
5150 "Old file should have been removed"
5151 );
5152 assert!(
5153 worktree
5154 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5155 .is_some(),
5156 "No error should have occurred after moving into existing directory"
5157 );
5158 });
5159 assert_eq!(
5160 worktree
5161 .update(cx, |worktree, cx| {
5162 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5163 })
5164 .await
5165 .unwrap()
5166 .text,
5167 expected_contents,
5168 "Moved file's contents should be preserved"
5169 );
5170}
5171
5172#[gpui::test(iterations = 10)]
5173async fn test_save_file(cx: &mut gpui::TestAppContext) {
5174 init_test(cx);
5175
5176 let fs = FakeFs::new(cx.executor());
5177 fs.insert_tree(
5178 path!("/dir"),
5179 json!({
5180 "file1": "the old contents",
5181 }),
5182 )
5183 .await;
5184
5185 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5186 let buffer = project
5187 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5188 .await
5189 .unwrap();
5190 buffer.update(cx, |buffer, cx| {
5191 assert_eq!(buffer.text(), "the old contents");
5192 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5193 });
5194
5195 project
5196 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5197 .await
5198 .unwrap();
5199
5200 let new_text = fs
5201 .load(Path::new(path!("/dir/file1")))
5202 .await
5203 .unwrap()
5204 .replace("\r\n", "\n");
5205 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5206}
5207
5208#[gpui::test(iterations = 10)]
5209async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5210 // Issue: #24349
5211 init_test(cx);
5212
5213 let fs = FakeFs::new(cx.executor());
5214 fs.insert_tree(path!("/dir"), json!({})).await;
5215
5216 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5217 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5218
5219 language_registry.add(rust_lang());
5220 let mut fake_rust_servers = language_registry.register_fake_lsp(
5221 "Rust",
5222 FakeLspAdapter {
5223 name: "the-rust-language-server",
5224 capabilities: lsp::ServerCapabilities {
5225 completion_provider: Some(lsp::CompletionOptions {
5226 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5227 ..Default::default()
5228 }),
5229 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5230 lsp::TextDocumentSyncOptions {
5231 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5232 ..Default::default()
5233 },
5234 )),
5235 ..Default::default()
5236 },
5237 ..Default::default()
5238 },
5239 );
5240
5241 let buffer = project
5242 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5243 .unwrap()
5244 .await;
5245 project.update(cx, |this, cx| {
5246 this.register_buffer_with_language_servers(&buffer, cx);
5247 buffer.update(cx, |buffer, cx| {
5248 assert!(!this.has_language_servers_for(buffer, cx));
5249 })
5250 });
5251
5252 project
5253 .update(cx, |this, cx| {
5254 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5255 this.save_buffer_as(
5256 buffer.clone(),
5257 ProjectPath {
5258 worktree_id,
5259 path: rel_path("file.rs").into(),
5260 },
5261 cx,
5262 )
5263 })
5264 .await
5265 .unwrap();
5266 // A server is started up, and it is notified about Rust files.
5267 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5268 assert_eq!(
5269 fake_rust_server
5270 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5271 .await
5272 .text_document,
5273 lsp::TextDocumentItem {
5274 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5275 version: 0,
5276 text: "".to_string(),
5277 language_id: "rust".to_string(),
5278 }
5279 );
5280
5281 project.update(cx, |this, cx| {
5282 buffer.update(cx, |buffer, cx| {
5283 assert!(this.has_language_servers_for(buffer, cx));
5284 })
5285 });
5286}
5287
5288#[gpui::test(iterations = 30)]
5289async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5290 init_test(cx);
5291
5292 let fs = FakeFs::new(cx.executor());
5293 fs.insert_tree(
5294 path!("/dir"),
5295 json!({
5296 "file1": "the original contents",
5297 }),
5298 )
5299 .await;
5300
5301 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5302 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5303 let buffer = project
5304 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5305 .await
5306 .unwrap();
5307
5308 // Change the buffer's file on disk, and then wait for the file change
5309 // to be detected by the worktree, so that the buffer starts reloading.
5310 fs.save(
5311 path!("/dir/file1").as_ref(),
5312 &"the first contents".into(),
5313 Default::default(),
5314 )
5315 .await
5316 .unwrap();
5317 worktree.next_event(cx).await;
5318
5319 // Change the buffer's file again. Depending on the random seed, the
5320 // previous file change may still be in progress.
5321 fs.save(
5322 path!("/dir/file1").as_ref(),
5323 &"the second contents".into(),
5324 Default::default(),
5325 )
5326 .await
5327 .unwrap();
5328 worktree.next_event(cx).await;
5329
5330 cx.executor().run_until_parked();
5331 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5332 buffer.read_with(cx, |buffer, _| {
5333 assert_eq!(buffer.text(), on_disk_text);
5334 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5335 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5336 });
5337}
5338
5339#[gpui::test(iterations = 30)]
5340async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5341 init_test(cx);
5342
5343 let fs = FakeFs::new(cx.executor());
5344 fs.insert_tree(
5345 path!("/dir"),
5346 json!({
5347 "file1": "the original contents",
5348 }),
5349 )
5350 .await;
5351
5352 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5353 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5354 let buffer = project
5355 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5356 .await
5357 .unwrap();
5358
5359 // Change the buffer's file on disk, and then wait for the file change
5360 // to be detected by the worktree, so that the buffer starts reloading.
5361 fs.save(
5362 path!("/dir/file1").as_ref(),
5363 &"the first contents".into(),
5364 Default::default(),
5365 )
5366 .await
5367 .unwrap();
5368 worktree.next_event(cx).await;
5369
5370 cx.executor()
5371 .spawn(cx.executor().simulate_random_delay())
5372 .await;
5373
5374 // Perform a noop edit, causing the buffer's version to increase.
5375 buffer.update(cx, |buffer, cx| {
5376 buffer.edit([(0..0, " ")], None, cx);
5377 buffer.undo(cx);
5378 });
5379
5380 cx.executor().run_until_parked();
5381 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5382 buffer.read_with(cx, |buffer, _| {
5383 let buffer_text = buffer.text();
5384 if buffer_text == on_disk_text {
5385 assert!(
5386 !buffer.is_dirty() && !buffer.has_conflict(),
5387 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5388 );
5389 }
5390 // If the file change occurred while the buffer was processing the first
5391 // change, the buffer will be in a conflicting state.
5392 else {
5393 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5394 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5395 }
5396 });
5397}
5398
5399#[gpui::test]
5400async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5401 init_test(cx);
5402
5403 let fs = FakeFs::new(cx.executor());
5404 fs.insert_tree(
5405 path!("/dir"),
5406 json!({
5407 "file1": "the old contents",
5408 }),
5409 )
5410 .await;
5411
5412 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5413 let buffer = project
5414 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5415 .await
5416 .unwrap();
5417 buffer.update(cx, |buffer, cx| {
5418 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5419 });
5420
5421 project
5422 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5423 .await
5424 .unwrap();
5425
5426 let new_text = fs
5427 .load(Path::new(path!("/dir/file1")))
5428 .await
5429 .unwrap()
5430 .replace("\r\n", "\n");
5431 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5432}
5433
5434#[gpui::test]
5435async fn test_save_as(cx: &mut gpui::TestAppContext) {
5436 init_test(cx);
5437
5438 let fs = FakeFs::new(cx.executor());
5439 fs.insert_tree("/dir", json!({})).await;
5440
5441 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5442
5443 let languages = project.update(cx, |project, _| project.languages().clone());
5444 languages.add(rust_lang());
5445
5446 let buffer = project.update(cx, |project, cx| {
5447 project.create_local_buffer("", None, false, cx)
5448 });
5449 buffer.update(cx, |buffer, cx| {
5450 buffer.edit([(0..0, "abc")], None, cx);
5451 assert!(buffer.is_dirty());
5452 assert!(!buffer.has_conflict());
5453 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5454 });
5455 project
5456 .update(cx, |project, cx| {
5457 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5458 let path = ProjectPath {
5459 worktree_id,
5460 path: rel_path("file1.rs").into(),
5461 };
5462 project.save_buffer_as(buffer.clone(), path, cx)
5463 })
5464 .await
5465 .unwrap();
5466 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5467
5468 cx.executor().run_until_parked();
5469 buffer.update(cx, |buffer, cx| {
5470 assert_eq!(
5471 buffer.file().unwrap().full_path(cx),
5472 Path::new("dir/file1.rs")
5473 );
5474 assert!(!buffer.is_dirty());
5475 assert!(!buffer.has_conflict());
5476 assert_eq!(buffer.language().unwrap().name(), "Rust");
5477 });
5478
5479 let opened_buffer = project
5480 .update(cx, |project, cx| {
5481 project.open_local_buffer("/dir/file1.rs", cx)
5482 })
5483 .await
5484 .unwrap();
5485 assert_eq!(opened_buffer, buffer);
5486}
5487
5488#[gpui::test]
5489async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5490 init_test(cx);
5491
5492 let fs = FakeFs::new(cx.executor());
5493 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5494
5495 fs.insert_tree(
5496 path!("/dir"),
5497 json!({
5498 "data_a.txt": "data about a"
5499 }),
5500 )
5501 .await;
5502
5503 let buffer = project
5504 .update(cx, |project, cx| {
5505 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5506 })
5507 .await
5508 .unwrap();
5509
5510 buffer.update(cx, |buffer, cx| {
5511 buffer.edit([(11..12, "b")], None, cx);
5512 });
5513
5514 // Save buffer's contents as a new file and confirm that the buffer's now
5515 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5516 // file associated with the buffer has now been updated to `data_b.txt`
5517 project
5518 .update(cx, |project, cx| {
5519 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5520 let new_path = ProjectPath {
5521 worktree_id,
5522 path: rel_path("data_b.txt").into(),
5523 };
5524
5525 project.save_buffer_as(buffer.clone(), new_path, cx)
5526 })
5527 .await
5528 .unwrap();
5529
5530 buffer.update(cx, |buffer, cx| {
5531 assert_eq!(
5532 buffer.file().unwrap().full_path(cx),
5533 Path::new("dir/data_b.txt")
5534 )
5535 });
5536
5537 // Open the original `data_a.txt` file, confirming that its contents are
5538 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5539 let original_buffer = project
5540 .update(cx, |project, cx| {
5541 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5542 })
5543 .await
5544 .unwrap();
5545
5546 original_buffer.update(cx, |buffer, cx| {
5547 assert_eq!(buffer.text(), "data about a");
5548 assert_eq!(
5549 buffer.file().unwrap().full_path(cx),
5550 Path::new("dir/data_a.txt")
5551 )
5552 });
5553}
5554
5555#[gpui::test(retries = 5)]
5556async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5557 use worktree::WorktreeModelHandle as _;
5558
5559 init_test(cx);
5560 cx.executor().allow_parking();
5561
5562 let dir = TempTree::new(json!({
5563 "a": {
5564 "file1": "",
5565 "file2": "",
5566 "file3": "",
5567 },
5568 "b": {
5569 "c": {
5570 "file4": "",
5571 "file5": "",
5572 }
5573 }
5574 }));
5575
5576 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5577
5578 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5579 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5580 async move { buffer.await.unwrap() }
5581 };
5582 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5583 project.update(cx, |project, cx| {
5584 let tree = project.worktrees(cx).next().unwrap();
5585 tree.read(cx)
5586 .entry_for_path(rel_path(path))
5587 .unwrap_or_else(|| panic!("no entry for path {}", path))
5588 .id
5589 })
5590 };
5591
5592 let buffer2 = buffer_for_path("a/file2", cx).await;
5593 let buffer3 = buffer_for_path("a/file3", cx).await;
5594 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5595 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5596
5597 let file2_id = id_for_path("a/file2", cx);
5598 let file3_id = id_for_path("a/file3", cx);
5599 let file4_id = id_for_path("b/c/file4", cx);
5600
5601 // Create a remote copy of this worktree.
5602 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5603 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5604
5605 let updates = Arc::new(Mutex::new(Vec::new()));
5606 tree.update(cx, |tree, cx| {
5607 let updates = updates.clone();
5608 tree.observe_updates(0, cx, move |update| {
5609 updates.lock().push(update);
5610 async { true }
5611 });
5612 });
5613
5614 let remote = cx.update(|cx| {
5615 Worktree::remote(
5616 0,
5617 ReplicaId::REMOTE_SERVER,
5618 metadata,
5619 project.read(cx).client().into(),
5620 project.read(cx).path_style(cx),
5621 cx,
5622 )
5623 });
5624
5625 cx.executor().run_until_parked();
5626
5627 cx.update(|cx| {
5628 assert!(!buffer2.read(cx).is_dirty());
5629 assert!(!buffer3.read(cx).is_dirty());
5630 assert!(!buffer4.read(cx).is_dirty());
5631 assert!(!buffer5.read(cx).is_dirty());
5632 });
5633
5634 // Rename and delete files and directories.
5635 tree.flush_fs_events(cx).await;
5636 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5637 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5638 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5639 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5640 tree.flush_fs_events(cx).await;
5641
5642 cx.update(|app| {
5643 assert_eq!(
5644 tree.read(app).paths().collect::<Vec<_>>(),
5645 vec![
5646 rel_path("a"),
5647 rel_path("a/file1"),
5648 rel_path("a/file2.new"),
5649 rel_path("b"),
5650 rel_path("d"),
5651 rel_path("d/file3"),
5652 rel_path("d/file4"),
5653 ]
5654 );
5655 });
5656
5657 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5658 assert_eq!(id_for_path("d/file3", cx), file3_id);
5659 assert_eq!(id_for_path("d/file4", cx), file4_id);
5660
5661 cx.update(|cx| {
5662 assert_eq!(
5663 buffer2.read(cx).file().unwrap().path().as_ref(),
5664 rel_path("a/file2.new")
5665 );
5666 assert_eq!(
5667 buffer3.read(cx).file().unwrap().path().as_ref(),
5668 rel_path("d/file3")
5669 );
5670 assert_eq!(
5671 buffer4.read(cx).file().unwrap().path().as_ref(),
5672 rel_path("d/file4")
5673 );
5674 assert_eq!(
5675 buffer5.read(cx).file().unwrap().path().as_ref(),
5676 rel_path("b/c/file5")
5677 );
5678
5679 assert_matches!(
5680 buffer2.read(cx).file().unwrap().disk_state(),
5681 DiskState::Present { .. }
5682 );
5683 assert_matches!(
5684 buffer3.read(cx).file().unwrap().disk_state(),
5685 DiskState::Present { .. }
5686 );
5687 assert_matches!(
5688 buffer4.read(cx).file().unwrap().disk_state(),
5689 DiskState::Present { .. }
5690 );
5691 assert_eq!(
5692 buffer5.read(cx).file().unwrap().disk_state(),
5693 DiskState::Deleted
5694 );
5695 });
5696
5697 // Update the remote worktree. Check that it becomes consistent with the
5698 // local worktree.
5699 cx.executor().run_until_parked();
5700
5701 remote.update(cx, |remote, _| {
5702 for update in updates.lock().drain(..) {
5703 remote.as_remote_mut().unwrap().update_from_remote(update);
5704 }
5705 });
5706 cx.executor().run_until_parked();
5707 remote.update(cx, |remote, _| {
5708 assert_eq!(
5709 remote.paths().collect::<Vec<_>>(),
5710 vec![
5711 rel_path("a"),
5712 rel_path("a/file1"),
5713 rel_path("a/file2.new"),
5714 rel_path("b"),
5715 rel_path("d"),
5716 rel_path("d/file3"),
5717 rel_path("d/file4"),
5718 ]
5719 );
5720 });
5721}
5722
5723#[cfg(target_os = "linux")]
5724#[gpui::test(retries = 5)]
5725async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5726 init_test(cx);
5727 cx.executor().allow_parking();
5728
5729 let dir = TempTree::new(json!({}));
5730 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5731 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5732
5733 tree.flush_fs_events(cx).await;
5734
5735 let repro_dir = dir.path().join("repro");
5736 std::fs::create_dir(&repro_dir).unwrap();
5737 tree.flush_fs_events(cx).await;
5738
5739 cx.update(|cx| {
5740 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5741 });
5742
5743 std::fs::remove_dir_all(&repro_dir).unwrap();
5744 tree.flush_fs_events(cx).await;
5745
5746 cx.update(|cx| {
5747 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5748 });
5749
5750 std::fs::create_dir(&repro_dir).unwrap();
5751 tree.flush_fs_events(cx).await;
5752
5753 cx.update(|cx| {
5754 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5755 });
5756
5757 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5758 tree.flush_fs_events(cx).await;
5759
5760 cx.update(|cx| {
5761 assert!(
5762 tree.read(cx)
5763 .entry_for_path(rel_path("repro/repro-marker"))
5764 .is_some()
5765 );
5766 });
5767}
5768
5769#[gpui::test(iterations = 10)]
5770async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5771 init_test(cx);
5772
5773 let fs = FakeFs::new(cx.executor());
5774 fs.insert_tree(
5775 path!("/dir"),
5776 json!({
5777 "a": {
5778 "file1": "",
5779 }
5780 }),
5781 )
5782 .await;
5783
5784 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5785 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5786 let tree_id = tree.update(cx, |tree, _| tree.id());
5787
5788 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5789 project.update(cx, |project, cx| {
5790 let tree = project.worktrees(cx).next().unwrap();
5791 tree.read(cx)
5792 .entry_for_path(rel_path(path))
5793 .unwrap_or_else(|| panic!("no entry for path {}", path))
5794 .id
5795 })
5796 };
5797
5798 let dir_id = id_for_path("a", cx);
5799 let file_id = id_for_path("a/file1", cx);
5800 let buffer = project
5801 .update(cx, |p, cx| {
5802 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5803 })
5804 .await
5805 .unwrap();
5806 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5807
5808 project
5809 .update(cx, |project, cx| {
5810 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5811 })
5812 .unwrap()
5813 .await
5814 .into_included()
5815 .unwrap();
5816 cx.executor().run_until_parked();
5817
5818 assert_eq!(id_for_path("b", cx), dir_id);
5819 assert_eq!(id_for_path("b/file1", cx), file_id);
5820 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5821}
5822
5823#[gpui::test]
5824async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5825 init_test(cx);
5826
5827 let fs = FakeFs::new(cx.executor());
5828 fs.insert_tree(
5829 "/dir",
5830 json!({
5831 "a.txt": "a-contents",
5832 "b.txt": "b-contents",
5833 }),
5834 )
5835 .await;
5836
5837 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5838
5839 // Spawn multiple tasks to open paths, repeating some paths.
5840 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5841 (
5842 p.open_local_buffer("/dir/a.txt", cx),
5843 p.open_local_buffer("/dir/b.txt", cx),
5844 p.open_local_buffer("/dir/a.txt", cx),
5845 )
5846 });
5847
5848 let buffer_a_1 = buffer_a_1.await.unwrap();
5849 let buffer_a_2 = buffer_a_2.await.unwrap();
5850 let buffer_b = buffer_b.await.unwrap();
5851 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5852 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5853
5854 // There is only one buffer per path.
5855 let buffer_a_id = buffer_a_1.entity_id();
5856 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5857
5858 // Open the same path again while it is still open.
5859 drop(buffer_a_1);
5860 let buffer_a_3 = project
5861 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5862 .await
5863 .unwrap();
5864
5865 // There's still only one buffer per path.
5866 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5867}
5868
5869#[gpui::test]
5870async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5871 init_test(cx);
5872
5873 let fs = FakeFs::new(cx.executor());
5874 fs.insert_tree(
5875 path!("/dir"),
5876 json!({
5877 "file1": "abc",
5878 "file2": "def",
5879 "file3": "ghi",
5880 }),
5881 )
5882 .await;
5883
5884 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5885
5886 let buffer1 = project
5887 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5888 .await
5889 .unwrap();
5890 let events = Arc::new(Mutex::new(Vec::new()));
5891
5892 // initially, the buffer isn't dirty.
5893 buffer1.update(cx, |buffer, cx| {
5894 cx.subscribe(&buffer1, {
5895 let events = events.clone();
5896 move |_, _, event, _| match event {
5897 BufferEvent::Operation { .. } => {}
5898 _ => events.lock().push(event.clone()),
5899 }
5900 })
5901 .detach();
5902
5903 assert!(!buffer.is_dirty());
5904 assert!(events.lock().is_empty());
5905
5906 buffer.edit([(1..2, "")], None, cx);
5907 });
5908
5909 // after the first edit, the buffer is dirty, and emits a dirtied event.
5910 buffer1.update(cx, |buffer, cx| {
5911 assert!(buffer.text() == "ac");
5912 assert!(buffer.is_dirty());
5913 assert_eq!(
5914 *events.lock(),
5915 &[
5916 language::BufferEvent::Edited { is_local: true },
5917 language::BufferEvent::DirtyChanged
5918 ]
5919 );
5920 events.lock().clear();
5921 buffer.did_save(
5922 buffer.version(),
5923 buffer.file().unwrap().disk_state().mtime(),
5924 cx,
5925 );
5926 });
5927
5928 // after saving, the buffer is not dirty, and emits a saved event.
5929 buffer1.update(cx, |buffer, cx| {
5930 assert!(!buffer.is_dirty());
5931 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5932 events.lock().clear();
5933
5934 buffer.edit([(1..1, "B")], None, cx);
5935 buffer.edit([(2..2, "D")], None, cx);
5936 });
5937
5938 // after editing again, the buffer is dirty, and emits another dirty event.
5939 buffer1.update(cx, |buffer, cx| {
5940 assert!(buffer.text() == "aBDc");
5941 assert!(buffer.is_dirty());
5942 assert_eq!(
5943 *events.lock(),
5944 &[
5945 language::BufferEvent::Edited { is_local: true },
5946 language::BufferEvent::DirtyChanged,
5947 language::BufferEvent::Edited { is_local: true },
5948 ],
5949 );
5950 events.lock().clear();
5951
5952 // After restoring the buffer to its previously-saved state,
5953 // the buffer is not considered dirty anymore.
5954 buffer.edit([(1..3, "")], None, cx);
5955 assert!(buffer.text() == "ac");
5956 assert!(!buffer.is_dirty());
5957 });
5958
5959 assert_eq!(
5960 *events.lock(),
5961 &[
5962 language::BufferEvent::Edited { is_local: true },
5963 language::BufferEvent::DirtyChanged
5964 ]
5965 );
5966
5967 // When a file is deleted, it is not considered dirty.
5968 let events = Arc::new(Mutex::new(Vec::new()));
5969 let buffer2 = project
5970 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5971 .await
5972 .unwrap();
5973 buffer2.update(cx, |_, cx| {
5974 cx.subscribe(&buffer2, {
5975 let events = events.clone();
5976 move |_, _, event, _| match event {
5977 BufferEvent::Operation { .. } => {}
5978 _ => events.lock().push(event.clone()),
5979 }
5980 })
5981 .detach();
5982 });
5983
5984 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5985 .await
5986 .unwrap();
5987 cx.executor().run_until_parked();
5988 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5989 assert_eq!(
5990 mem::take(&mut *events.lock()),
5991 &[language::BufferEvent::FileHandleChanged]
5992 );
5993
5994 // Buffer becomes dirty when edited.
5995 buffer2.update(cx, |buffer, cx| {
5996 buffer.edit([(2..3, "")], None, cx);
5997 assert_eq!(buffer.is_dirty(), true);
5998 });
5999 assert_eq!(
6000 mem::take(&mut *events.lock()),
6001 &[
6002 language::BufferEvent::Edited { is_local: true },
6003 language::BufferEvent::DirtyChanged
6004 ]
6005 );
6006
6007 // Buffer becomes clean again when all of its content is removed, because
6008 // the file was deleted.
6009 buffer2.update(cx, |buffer, cx| {
6010 buffer.edit([(0..2, "")], None, cx);
6011 assert_eq!(buffer.is_empty(), true);
6012 assert_eq!(buffer.is_dirty(), false);
6013 });
6014 assert_eq!(
6015 *events.lock(),
6016 &[
6017 language::BufferEvent::Edited { is_local: true },
6018 language::BufferEvent::DirtyChanged
6019 ]
6020 );
6021
6022 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6023 let events = Arc::new(Mutex::new(Vec::new()));
6024 let buffer3 = project
6025 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6026 .await
6027 .unwrap();
6028 buffer3.update(cx, |_, cx| {
6029 cx.subscribe(&buffer3, {
6030 let events = events.clone();
6031 move |_, _, event, _| match event {
6032 BufferEvent::Operation { .. } => {}
6033 _ => events.lock().push(event.clone()),
6034 }
6035 })
6036 .detach();
6037 });
6038
6039 buffer3.update(cx, |buffer, cx| {
6040 buffer.edit([(0..0, "x")], None, cx);
6041 });
6042 events.lock().clear();
6043 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6044 .await
6045 .unwrap();
6046 cx.executor().run_until_parked();
6047 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6048 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6049}
6050
6051#[gpui::test]
6052async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6053 init_test(cx);
6054
6055 let fs = FakeFs::new(cx.executor());
6056 fs.insert_tree(
6057 path!("/dir"),
6058 json!({
6059 "file.txt": "version 1",
6060 }),
6061 )
6062 .await;
6063
6064 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6065 let buffer = project
6066 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6067 .await
6068 .unwrap();
6069
6070 buffer.read_with(cx, |buffer, _| {
6071 assert_eq!(buffer.text(), "version 1");
6072 assert!(!buffer.is_dirty());
6073 });
6074
6075 // User makes an edit, making the buffer dirty.
6076 buffer.update(cx, |buffer, cx| {
6077 buffer.edit([(0..0, "user edit: ")], None, cx);
6078 });
6079
6080 buffer.read_with(cx, |buffer, _| {
6081 assert!(buffer.is_dirty());
6082 assert_eq!(buffer.text(), "user edit: version 1");
6083 });
6084
6085 // External tool writes new content while buffer is dirty.
6086 // file_updated() updates the File but suppresses ReloadNeeded.
6087 fs.save(
6088 path!("/dir/file.txt").as_ref(),
6089 &"version 2 from external tool".into(),
6090 Default::default(),
6091 )
6092 .await
6093 .unwrap();
6094 cx.executor().run_until_parked();
6095
6096 buffer.read_with(cx, |buffer, _| {
6097 assert!(buffer.has_conflict());
6098 assert_eq!(buffer.text(), "user edit: version 1");
6099 });
6100
6101 // User undoes their edit. Buffer becomes clean, but disk has different
6102 // content. did_edit() detects the dirty->clean transition and checks if
6103 // disk changed while dirty. Since mtime differs from saved_mtime, it
6104 // emits ReloadNeeded.
6105 buffer.update(cx, |buffer, cx| {
6106 buffer.undo(cx);
6107 });
6108 cx.executor().run_until_parked();
6109
6110 buffer.read_with(cx, |buffer, _| {
6111 assert_eq!(
6112 buffer.text(),
6113 "version 2 from external tool",
6114 "buffer should reload from disk after undo makes it clean"
6115 );
6116 assert!(!buffer.is_dirty());
6117 });
6118}
6119
6120#[gpui::test]
6121async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6122 init_test(cx);
6123
6124 let (initial_contents, initial_offsets) =
6125 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6126 let fs = FakeFs::new(cx.executor());
6127 fs.insert_tree(
6128 path!("/dir"),
6129 json!({
6130 "the-file": initial_contents,
6131 }),
6132 )
6133 .await;
6134 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6135 let buffer = project
6136 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6137 .await
6138 .unwrap();
6139
6140 let anchors = initial_offsets
6141 .iter()
6142 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6143 .collect::<Vec<_>>();
6144
6145 // Change the file on disk, adding two new lines of text, and removing
6146 // one line.
6147 buffer.update(cx, |buffer, _| {
6148 assert!(!buffer.is_dirty());
6149 assert!(!buffer.has_conflict());
6150 });
6151
6152 let (new_contents, new_offsets) =
6153 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6154 fs.save(
6155 path!("/dir/the-file").as_ref(),
6156 &new_contents.as_str().into(),
6157 LineEnding::Unix,
6158 )
6159 .await
6160 .unwrap();
6161
6162 // Because the buffer was not modified, it is reloaded from disk. Its
6163 // contents are edited according to the diff between the old and new
6164 // file contents.
6165 cx.executor().run_until_parked();
6166 buffer.update(cx, |buffer, _| {
6167 assert_eq!(buffer.text(), new_contents);
6168 assert!(!buffer.is_dirty());
6169 assert!(!buffer.has_conflict());
6170
6171 let anchor_offsets = anchors
6172 .iter()
6173 .map(|anchor| anchor.to_offset(&*buffer))
6174 .collect::<Vec<_>>();
6175 assert_eq!(anchor_offsets, new_offsets);
6176 });
6177
6178 // Modify the buffer
6179 buffer.update(cx, |buffer, cx| {
6180 buffer.edit([(0..0, " ")], None, cx);
6181 assert!(buffer.is_dirty());
6182 assert!(!buffer.has_conflict());
6183 });
6184
6185 // Change the file on disk again, adding blank lines to the beginning.
6186 fs.save(
6187 path!("/dir/the-file").as_ref(),
6188 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6189 LineEnding::Unix,
6190 )
6191 .await
6192 .unwrap();
6193
6194 // Because the buffer is modified, it doesn't reload from disk, but is
6195 // marked as having a conflict.
6196 cx.executor().run_until_parked();
6197 buffer.update(cx, |buffer, _| {
6198 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6199 assert!(buffer.has_conflict());
6200 });
6201}
6202
6203#[gpui::test]
6204async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6205 init_test(cx);
6206
6207 let fs = FakeFs::new(cx.executor());
6208 fs.insert_tree(
6209 path!("/dir"),
6210 json!({
6211 "file1": "a\nb\nc\n",
6212 "file2": "one\r\ntwo\r\nthree\r\n",
6213 }),
6214 )
6215 .await;
6216
6217 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6218 let buffer1 = project
6219 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6220 .await
6221 .unwrap();
6222 let buffer2 = project
6223 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6224 .await
6225 .unwrap();
6226
6227 buffer1.update(cx, |buffer, _| {
6228 assert_eq!(buffer.text(), "a\nb\nc\n");
6229 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6230 });
6231 buffer2.update(cx, |buffer, _| {
6232 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6233 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6234 });
6235
6236 // Change a file's line endings on disk from unix to windows. The buffer's
6237 // state updates correctly.
6238 fs.save(
6239 path!("/dir/file1").as_ref(),
6240 &"aaa\nb\nc\n".into(),
6241 LineEnding::Windows,
6242 )
6243 .await
6244 .unwrap();
6245 cx.executor().run_until_parked();
6246 buffer1.update(cx, |buffer, _| {
6247 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6248 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6249 });
6250
6251 // Save a file with windows line endings. The file is written correctly.
6252 buffer2.update(cx, |buffer, cx| {
6253 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6254 });
6255 project
6256 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6257 .await
6258 .unwrap();
6259 assert_eq!(
6260 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6261 "one\r\ntwo\r\nthree\r\nfour\r\n",
6262 );
6263}
6264
6265#[gpui::test]
6266async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6267 init_test(cx);
6268
6269 let fs = FakeFs::new(cx.executor());
6270 fs.insert_tree(
6271 path!("/dir"),
6272 json!({
6273 "a.rs": "
6274 fn foo(mut v: Vec<usize>) {
6275 for x in &v {
6276 v.push(1);
6277 }
6278 }
6279 "
6280 .unindent(),
6281 }),
6282 )
6283 .await;
6284
6285 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6286 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6287 let buffer = project
6288 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6289 .await
6290 .unwrap();
6291
6292 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6293 let message = lsp::PublishDiagnosticsParams {
6294 uri: buffer_uri.clone(),
6295 diagnostics: vec![
6296 lsp::Diagnostic {
6297 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6298 severity: Some(DiagnosticSeverity::WARNING),
6299 message: "error 1".to_string(),
6300 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6301 location: lsp::Location {
6302 uri: buffer_uri.clone(),
6303 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6304 },
6305 message: "error 1 hint 1".to_string(),
6306 }]),
6307 ..Default::default()
6308 },
6309 lsp::Diagnostic {
6310 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6311 severity: Some(DiagnosticSeverity::HINT),
6312 message: "error 1 hint 1".to_string(),
6313 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6314 location: lsp::Location {
6315 uri: buffer_uri.clone(),
6316 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6317 },
6318 message: "original diagnostic".to_string(),
6319 }]),
6320 ..Default::default()
6321 },
6322 lsp::Diagnostic {
6323 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6324 severity: Some(DiagnosticSeverity::ERROR),
6325 message: "error 2".to_string(),
6326 related_information: Some(vec![
6327 lsp::DiagnosticRelatedInformation {
6328 location: lsp::Location {
6329 uri: buffer_uri.clone(),
6330 range: lsp::Range::new(
6331 lsp::Position::new(1, 13),
6332 lsp::Position::new(1, 15),
6333 ),
6334 },
6335 message: "error 2 hint 1".to_string(),
6336 },
6337 lsp::DiagnosticRelatedInformation {
6338 location: lsp::Location {
6339 uri: buffer_uri.clone(),
6340 range: lsp::Range::new(
6341 lsp::Position::new(1, 13),
6342 lsp::Position::new(1, 15),
6343 ),
6344 },
6345 message: "error 2 hint 2".to_string(),
6346 },
6347 ]),
6348 ..Default::default()
6349 },
6350 lsp::Diagnostic {
6351 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6352 severity: Some(DiagnosticSeverity::HINT),
6353 message: "error 2 hint 1".to_string(),
6354 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6355 location: lsp::Location {
6356 uri: buffer_uri.clone(),
6357 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6358 },
6359 message: "original diagnostic".to_string(),
6360 }]),
6361 ..Default::default()
6362 },
6363 lsp::Diagnostic {
6364 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6365 severity: Some(DiagnosticSeverity::HINT),
6366 message: "error 2 hint 2".to_string(),
6367 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6368 location: lsp::Location {
6369 uri: buffer_uri,
6370 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6371 },
6372 message: "original diagnostic".to_string(),
6373 }]),
6374 ..Default::default()
6375 },
6376 ],
6377 version: None,
6378 };
6379
6380 lsp_store
6381 .update(cx, |lsp_store, cx| {
6382 lsp_store.update_diagnostics(
6383 LanguageServerId(0),
6384 message,
6385 None,
6386 DiagnosticSourceKind::Pushed,
6387 &[],
6388 cx,
6389 )
6390 })
6391 .unwrap();
6392 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6393
6394 assert_eq!(
6395 buffer
6396 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6397 .collect::<Vec<_>>(),
6398 &[
6399 DiagnosticEntry {
6400 range: Point::new(1, 8)..Point::new(1, 9),
6401 diagnostic: Diagnostic {
6402 severity: DiagnosticSeverity::WARNING,
6403 message: "error 1".to_string(),
6404 group_id: 1,
6405 is_primary: true,
6406 source_kind: DiagnosticSourceKind::Pushed,
6407 ..Diagnostic::default()
6408 }
6409 },
6410 DiagnosticEntry {
6411 range: Point::new(1, 8)..Point::new(1, 9),
6412 diagnostic: Diagnostic {
6413 severity: DiagnosticSeverity::HINT,
6414 message: "error 1 hint 1".to_string(),
6415 group_id: 1,
6416 is_primary: false,
6417 source_kind: DiagnosticSourceKind::Pushed,
6418 ..Diagnostic::default()
6419 }
6420 },
6421 DiagnosticEntry {
6422 range: Point::new(1, 13)..Point::new(1, 15),
6423 diagnostic: Diagnostic {
6424 severity: DiagnosticSeverity::HINT,
6425 message: "error 2 hint 1".to_string(),
6426 group_id: 0,
6427 is_primary: false,
6428 source_kind: DiagnosticSourceKind::Pushed,
6429 ..Diagnostic::default()
6430 }
6431 },
6432 DiagnosticEntry {
6433 range: Point::new(1, 13)..Point::new(1, 15),
6434 diagnostic: Diagnostic {
6435 severity: DiagnosticSeverity::HINT,
6436 message: "error 2 hint 2".to_string(),
6437 group_id: 0,
6438 is_primary: false,
6439 source_kind: DiagnosticSourceKind::Pushed,
6440 ..Diagnostic::default()
6441 }
6442 },
6443 DiagnosticEntry {
6444 range: Point::new(2, 8)..Point::new(2, 17),
6445 diagnostic: Diagnostic {
6446 severity: DiagnosticSeverity::ERROR,
6447 message: "error 2".to_string(),
6448 group_id: 0,
6449 is_primary: true,
6450 source_kind: DiagnosticSourceKind::Pushed,
6451 ..Diagnostic::default()
6452 }
6453 }
6454 ]
6455 );
6456
6457 assert_eq!(
6458 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6459 &[
6460 DiagnosticEntry {
6461 range: Point::new(1, 13)..Point::new(1, 15),
6462 diagnostic: Diagnostic {
6463 severity: DiagnosticSeverity::HINT,
6464 message: "error 2 hint 1".to_string(),
6465 group_id: 0,
6466 is_primary: false,
6467 source_kind: DiagnosticSourceKind::Pushed,
6468 ..Diagnostic::default()
6469 }
6470 },
6471 DiagnosticEntry {
6472 range: Point::new(1, 13)..Point::new(1, 15),
6473 diagnostic: Diagnostic {
6474 severity: DiagnosticSeverity::HINT,
6475 message: "error 2 hint 2".to_string(),
6476 group_id: 0,
6477 is_primary: false,
6478 source_kind: DiagnosticSourceKind::Pushed,
6479 ..Diagnostic::default()
6480 }
6481 },
6482 DiagnosticEntry {
6483 range: Point::new(2, 8)..Point::new(2, 17),
6484 diagnostic: Diagnostic {
6485 severity: DiagnosticSeverity::ERROR,
6486 message: "error 2".to_string(),
6487 group_id: 0,
6488 is_primary: true,
6489 source_kind: DiagnosticSourceKind::Pushed,
6490 ..Diagnostic::default()
6491 }
6492 }
6493 ]
6494 );
6495
6496 assert_eq!(
6497 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6498 &[
6499 DiagnosticEntry {
6500 range: Point::new(1, 8)..Point::new(1, 9),
6501 diagnostic: Diagnostic {
6502 severity: DiagnosticSeverity::WARNING,
6503 message: "error 1".to_string(),
6504 group_id: 1,
6505 is_primary: true,
6506 source_kind: DiagnosticSourceKind::Pushed,
6507 ..Diagnostic::default()
6508 }
6509 },
6510 DiagnosticEntry {
6511 range: Point::new(1, 8)..Point::new(1, 9),
6512 diagnostic: Diagnostic {
6513 severity: DiagnosticSeverity::HINT,
6514 message: "error 1 hint 1".to_string(),
6515 group_id: 1,
6516 is_primary: false,
6517 source_kind: DiagnosticSourceKind::Pushed,
6518 ..Diagnostic::default()
6519 }
6520 },
6521 ]
6522 );
6523}
6524
6525#[gpui::test]
6526async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6527 init_test(cx);
6528
6529 let fs = FakeFs::new(cx.executor());
6530 fs.insert_tree(
6531 path!("/dir"),
6532 json!({
6533 "one.rs": "const ONE: usize = 1;",
6534 "two": {
6535 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6536 }
6537
6538 }),
6539 )
6540 .await;
6541 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6542
6543 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6544 language_registry.add(rust_lang());
6545 let watched_paths = lsp::FileOperationRegistrationOptions {
6546 filters: vec![
6547 FileOperationFilter {
6548 scheme: Some("file".to_owned()),
6549 pattern: lsp::FileOperationPattern {
6550 glob: "**/*.rs".to_owned(),
6551 matches: Some(lsp::FileOperationPatternKind::File),
6552 options: None,
6553 },
6554 },
6555 FileOperationFilter {
6556 scheme: Some("file".to_owned()),
6557 pattern: lsp::FileOperationPattern {
6558 glob: "**/**".to_owned(),
6559 matches: Some(lsp::FileOperationPatternKind::Folder),
6560 options: None,
6561 },
6562 },
6563 ],
6564 };
6565 let mut fake_servers = language_registry.register_fake_lsp(
6566 "Rust",
6567 FakeLspAdapter {
6568 capabilities: lsp::ServerCapabilities {
6569 workspace: Some(lsp::WorkspaceServerCapabilities {
6570 workspace_folders: None,
6571 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6572 did_rename: Some(watched_paths.clone()),
6573 will_rename: Some(watched_paths),
6574 ..Default::default()
6575 }),
6576 }),
6577 ..Default::default()
6578 },
6579 ..Default::default()
6580 },
6581 );
6582
6583 let _ = project
6584 .update(cx, |project, cx| {
6585 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6586 })
6587 .await
6588 .unwrap();
6589
6590 let fake_server = fake_servers.next().await.unwrap();
6591 cx.executor().run_until_parked();
6592 let response = project.update(cx, |project, cx| {
6593 let worktree = project.worktrees(cx).next().unwrap();
6594 let entry = worktree
6595 .read(cx)
6596 .entry_for_path(rel_path("one.rs"))
6597 .unwrap();
6598 project.rename_entry(
6599 entry.id,
6600 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6601 cx,
6602 )
6603 });
6604 let expected_edit = lsp::WorkspaceEdit {
6605 changes: None,
6606 document_changes: Some(DocumentChanges::Edits({
6607 vec![TextDocumentEdit {
6608 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6609 range: lsp::Range {
6610 start: lsp::Position {
6611 line: 0,
6612 character: 1,
6613 },
6614 end: lsp::Position {
6615 line: 0,
6616 character: 3,
6617 },
6618 },
6619 new_text: "This is not a drill".to_owned(),
6620 })],
6621 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6622 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6623 version: Some(1337),
6624 },
6625 }]
6626 })),
6627 change_annotations: None,
6628 };
6629 let resolved_workspace_edit = Arc::new(OnceLock::new());
6630 fake_server
6631 .set_request_handler::<WillRenameFiles, _, _>({
6632 let resolved_workspace_edit = resolved_workspace_edit.clone();
6633 let expected_edit = expected_edit.clone();
6634 move |params, _| {
6635 let resolved_workspace_edit = resolved_workspace_edit.clone();
6636 let expected_edit = expected_edit.clone();
6637 async move {
6638 assert_eq!(params.files.len(), 1);
6639 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6640 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6641 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6642 Ok(Some(expected_edit))
6643 }
6644 }
6645 })
6646 .next()
6647 .await
6648 .unwrap();
6649 let _ = response.await.unwrap();
6650 fake_server
6651 .handle_notification::<DidRenameFiles, _>(|params, _| {
6652 assert_eq!(params.files.len(), 1);
6653 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6654 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6655 })
6656 .next()
6657 .await
6658 .unwrap();
6659 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6660}
6661
6662#[gpui::test]
6663async fn test_rename(cx: &mut gpui::TestAppContext) {
6664 // hi
6665 init_test(cx);
6666
6667 let fs = FakeFs::new(cx.executor());
6668 fs.insert_tree(
6669 path!("/dir"),
6670 json!({
6671 "one.rs": "const ONE: usize = 1;",
6672 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6673 }),
6674 )
6675 .await;
6676
6677 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6678
6679 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6680 language_registry.add(rust_lang());
6681 let mut fake_servers = language_registry.register_fake_lsp(
6682 "Rust",
6683 FakeLspAdapter {
6684 capabilities: lsp::ServerCapabilities {
6685 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6686 prepare_provider: Some(true),
6687 work_done_progress_options: Default::default(),
6688 })),
6689 ..Default::default()
6690 },
6691 ..Default::default()
6692 },
6693 );
6694
6695 let (buffer, _handle) = project
6696 .update(cx, |project, cx| {
6697 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6698 })
6699 .await
6700 .unwrap();
6701
6702 let fake_server = fake_servers.next().await.unwrap();
6703 cx.executor().run_until_parked();
6704
6705 let response = project.update(cx, |project, cx| {
6706 project.prepare_rename(buffer.clone(), 7, cx)
6707 });
6708 fake_server
6709 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6710 assert_eq!(
6711 params.text_document.uri.as_str(),
6712 uri!("file:///dir/one.rs")
6713 );
6714 assert_eq!(params.position, lsp::Position::new(0, 7));
6715 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6716 lsp::Position::new(0, 6),
6717 lsp::Position::new(0, 9),
6718 ))))
6719 })
6720 .next()
6721 .await
6722 .unwrap();
6723 let response = response.await.unwrap();
6724 let PrepareRenameResponse::Success(range) = response else {
6725 panic!("{:?}", response);
6726 };
6727 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6728 assert_eq!(range, 6..9);
6729
6730 let response = project.update(cx, |project, cx| {
6731 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6732 });
6733 fake_server
6734 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6735 assert_eq!(
6736 params.text_document_position.text_document.uri.as_str(),
6737 uri!("file:///dir/one.rs")
6738 );
6739 assert_eq!(
6740 params.text_document_position.position,
6741 lsp::Position::new(0, 7)
6742 );
6743 assert_eq!(params.new_name, "THREE");
6744 Ok(Some(lsp::WorkspaceEdit {
6745 changes: Some(
6746 [
6747 (
6748 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6749 vec![lsp::TextEdit::new(
6750 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6751 "THREE".to_string(),
6752 )],
6753 ),
6754 (
6755 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6756 vec![
6757 lsp::TextEdit::new(
6758 lsp::Range::new(
6759 lsp::Position::new(0, 24),
6760 lsp::Position::new(0, 27),
6761 ),
6762 "THREE".to_string(),
6763 ),
6764 lsp::TextEdit::new(
6765 lsp::Range::new(
6766 lsp::Position::new(0, 35),
6767 lsp::Position::new(0, 38),
6768 ),
6769 "THREE".to_string(),
6770 ),
6771 ],
6772 ),
6773 ]
6774 .into_iter()
6775 .collect(),
6776 ),
6777 ..Default::default()
6778 }))
6779 })
6780 .next()
6781 .await
6782 .unwrap();
6783 let mut transaction = response.await.unwrap().0;
6784 assert_eq!(transaction.len(), 2);
6785 assert_eq!(
6786 transaction
6787 .remove_entry(&buffer)
6788 .unwrap()
6789 .0
6790 .update(cx, |buffer, _| buffer.text()),
6791 "const THREE: usize = 1;"
6792 );
6793 assert_eq!(
6794 transaction
6795 .into_keys()
6796 .next()
6797 .unwrap()
6798 .update(cx, |buffer, _| buffer.text()),
6799 "const TWO: usize = one::THREE + one::THREE;"
6800 );
6801}
6802
6803#[gpui::test]
6804async fn test_search(cx: &mut gpui::TestAppContext) {
6805 init_test(cx);
6806
6807 let fs = FakeFs::new(cx.executor());
6808 fs.insert_tree(
6809 path!("/dir"),
6810 json!({
6811 "one.rs": "const ONE: usize = 1;",
6812 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6813 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6814 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6815 }),
6816 )
6817 .await;
6818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6819 assert_eq!(
6820 search(
6821 &project,
6822 SearchQuery::text(
6823 "TWO",
6824 false,
6825 true,
6826 false,
6827 Default::default(),
6828 Default::default(),
6829 false,
6830 None
6831 )
6832 .unwrap(),
6833 cx
6834 )
6835 .await
6836 .unwrap(),
6837 HashMap::from_iter([
6838 (path!("dir/two.rs").to_string(), vec![6..9]),
6839 (path!("dir/three.rs").to_string(), vec![37..40])
6840 ])
6841 );
6842
6843 let buffer_4 = project
6844 .update(cx, |project, cx| {
6845 project.open_local_buffer(path!("/dir/four.rs"), cx)
6846 })
6847 .await
6848 .unwrap();
6849 buffer_4.update(cx, |buffer, cx| {
6850 let text = "two::TWO";
6851 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6852 });
6853
6854 assert_eq!(
6855 search(
6856 &project,
6857 SearchQuery::text(
6858 "TWO",
6859 false,
6860 true,
6861 false,
6862 Default::default(),
6863 Default::default(),
6864 false,
6865 None,
6866 )
6867 .unwrap(),
6868 cx
6869 )
6870 .await
6871 .unwrap(),
6872 HashMap::from_iter([
6873 (path!("dir/two.rs").to_string(), vec![6..9]),
6874 (path!("dir/three.rs").to_string(), vec![37..40]),
6875 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6876 ])
6877 );
6878}
6879
6880#[gpui::test]
6881async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6882 init_test(cx);
6883
6884 let search_query = "file";
6885
6886 let fs = FakeFs::new(cx.executor());
6887 fs.insert_tree(
6888 path!("/dir"),
6889 json!({
6890 "one.rs": r#"// Rust file one"#,
6891 "one.ts": r#"// TypeScript file one"#,
6892 "two.rs": r#"// Rust file two"#,
6893 "two.ts": r#"// TypeScript file two"#,
6894 }),
6895 )
6896 .await;
6897 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6898
6899 assert!(
6900 search(
6901 &project,
6902 SearchQuery::text(
6903 search_query,
6904 false,
6905 true,
6906 false,
6907 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6908 Default::default(),
6909 false,
6910 None
6911 )
6912 .unwrap(),
6913 cx
6914 )
6915 .await
6916 .unwrap()
6917 .is_empty(),
6918 "If no inclusions match, no files should be returned"
6919 );
6920
6921 assert_eq!(
6922 search(
6923 &project,
6924 SearchQuery::text(
6925 search_query,
6926 false,
6927 true,
6928 false,
6929 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6930 Default::default(),
6931 false,
6932 None
6933 )
6934 .unwrap(),
6935 cx
6936 )
6937 .await
6938 .unwrap(),
6939 HashMap::from_iter([
6940 (path!("dir/one.rs").to_string(), vec![8..12]),
6941 (path!("dir/two.rs").to_string(), vec![8..12]),
6942 ]),
6943 "Rust only search should give only Rust files"
6944 );
6945
6946 assert_eq!(
6947 search(
6948 &project,
6949 SearchQuery::text(
6950 search_query,
6951 false,
6952 true,
6953 false,
6954 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6955 .unwrap(),
6956 Default::default(),
6957 false,
6958 None,
6959 )
6960 .unwrap(),
6961 cx
6962 )
6963 .await
6964 .unwrap(),
6965 HashMap::from_iter([
6966 (path!("dir/one.ts").to_string(), vec![14..18]),
6967 (path!("dir/two.ts").to_string(), vec![14..18]),
6968 ]),
6969 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6970 );
6971
6972 assert_eq!(
6973 search(
6974 &project,
6975 SearchQuery::text(
6976 search_query,
6977 false,
6978 true,
6979 false,
6980 PathMatcher::new(
6981 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6982 PathStyle::local()
6983 )
6984 .unwrap(),
6985 Default::default(),
6986 false,
6987 None,
6988 )
6989 .unwrap(),
6990 cx
6991 )
6992 .await
6993 .unwrap(),
6994 HashMap::from_iter([
6995 (path!("dir/two.ts").to_string(), vec![14..18]),
6996 (path!("dir/one.rs").to_string(), vec![8..12]),
6997 (path!("dir/one.ts").to_string(), vec![14..18]),
6998 (path!("dir/two.rs").to_string(), vec![8..12]),
6999 ]),
7000 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7001 );
7002}
7003
7004#[gpui::test]
7005async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7006 init_test(cx);
7007
7008 let search_query = "file";
7009
7010 let fs = FakeFs::new(cx.executor());
7011 fs.insert_tree(
7012 path!("/dir"),
7013 json!({
7014 "one.rs": r#"// Rust file one"#,
7015 "one.ts": r#"// TypeScript file one"#,
7016 "two.rs": r#"// Rust file two"#,
7017 "two.ts": r#"// TypeScript file two"#,
7018 }),
7019 )
7020 .await;
7021 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7022
7023 assert_eq!(
7024 search(
7025 &project,
7026 SearchQuery::text(
7027 search_query,
7028 false,
7029 true,
7030 false,
7031 Default::default(),
7032 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7033 false,
7034 None,
7035 )
7036 .unwrap(),
7037 cx
7038 )
7039 .await
7040 .unwrap(),
7041 HashMap::from_iter([
7042 (path!("dir/one.rs").to_string(), vec![8..12]),
7043 (path!("dir/one.ts").to_string(), vec![14..18]),
7044 (path!("dir/two.rs").to_string(), vec![8..12]),
7045 (path!("dir/two.ts").to_string(), vec![14..18]),
7046 ]),
7047 "If no exclusions match, all files should be returned"
7048 );
7049
7050 assert_eq!(
7051 search(
7052 &project,
7053 SearchQuery::text(
7054 search_query,
7055 false,
7056 true,
7057 false,
7058 Default::default(),
7059 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7060 false,
7061 None,
7062 )
7063 .unwrap(),
7064 cx
7065 )
7066 .await
7067 .unwrap(),
7068 HashMap::from_iter([
7069 (path!("dir/one.ts").to_string(), vec![14..18]),
7070 (path!("dir/two.ts").to_string(), vec![14..18]),
7071 ]),
7072 "Rust exclusion search should give only TypeScript files"
7073 );
7074
7075 assert_eq!(
7076 search(
7077 &project,
7078 SearchQuery::text(
7079 search_query,
7080 false,
7081 true,
7082 false,
7083 Default::default(),
7084 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7085 .unwrap(),
7086 false,
7087 None,
7088 )
7089 .unwrap(),
7090 cx
7091 )
7092 .await
7093 .unwrap(),
7094 HashMap::from_iter([
7095 (path!("dir/one.rs").to_string(), vec![8..12]),
7096 (path!("dir/two.rs").to_string(), vec![8..12]),
7097 ]),
7098 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7099 );
7100
7101 assert!(
7102 search(
7103 &project,
7104 SearchQuery::text(
7105 search_query,
7106 false,
7107 true,
7108 false,
7109 Default::default(),
7110 PathMatcher::new(
7111 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7112 PathStyle::local(),
7113 )
7114 .unwrap(),
7115 false,
7116 None,
7117 )
7118 .unwrap(),
7119 cx
7120 )
7121 .await
7122 .unwrap()
7123 .is_empty(),
7124 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7125 );
7126}
7127
7128#[gpui::test]
7129async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7130 init_test(cx);
7131
7132 let search_query = "file";
7133
7134 let fs = FakeFs::new(cx.executor());
7135 fs.insert_tree(
7136 path!("/dir"),
7137 json!({
7138 "one.rs": r#"// Rust file one"#,
7139 "one.ts": r#"// TypeScript file one"#,
7140 "two.rs": r#"// Rust file two"#,
7141 "two.ts": r#"// TypeScript file two"#,
7142 }),
7143 )
7144 .await;
7145
7146 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7147 let path_style = PathStyle::local();
7148 let _buffer = project.update(cx, |project, cx| {
7149 project.create_local_buffer("file", None, false, cx)
7150 });
7151
7152 assert_eq!(
7153 search(
7154 &project,
7155 SearchQuery::text(
7156 search_query,
7157 false,
7158 true,
7159 false,
7160 Default::default(),
7161 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7162 false,
7163 None,
7164 )
7165 .unwrap(),
7166 cx
7167 )
7168 .await
7169 .unwrap(),
7170 HashMap::from_iter([
7171 (path!("dir/one.rs").to_string(), vec![8..12]),
7172 (path!("dir/one.ts").to_string(), vec![14..18]),
7173 (path!("dir/two.rs").to_string(), vec![8..12]),
7174 (path!("dir/two.ts").to_string(), vec![14..18]),
7175 ]),
7176 "If no exclusions match, all files should be returned"
7177 );
7178
7179 assert_eq!(
7180 search(
7181 &project,
7182 SearchQuery::text(
7183 search_query,
7184 false,
7185 true,
7186 false,
7187 Default::default(),
7188 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7189 false,
7190 None,
7191 )
7192 .unwrap(),
7193 cx
7194 )
7195 .await
7196 .unwrap(),
7197 HashMap::from_iter([
7198 (path!("dir/one.ts").to_string(), vec![14..18]),
7199 (path!("dir/two.ts").to_string(), vec![14..18]),
7200 ]),
7201 "Rust exclusion search should give only TypeScript files"
7202 );
7203
7204 assert_eq!(
7205 search(
7206 &project,
7207 SearchQuery::text(
7208 search_query,
7209 false,
7210 true,
7211 false,
7212 Default::default(),
7213 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7214 false,
7215 None,
7216 )
7217 .unwrap(),
7218 cx
7219 )
7220 .await
7221 .unwrap(),
7222 HashMap::from_iter([
7223 (path!("dir/one.rs").to_string(), vec![8..12]),
7224 (path!("dir/two.rs").to_string(), vec![8..12]),
7225 ]),
7226 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7227 );
7228
7229 assert!(
7230 search(
7231 &project,
7232 SearchQuery::text(
7233 search_query,
7234 false,
7235 true,
7236 false,
7237 Default::default(),
7238 PathMatcher::new(
7239 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7240 PathStyle::local(),
7241 )
7242 .unwrap(),
7243 false,
7244 None,
7245 )
7246 .unwrap(),
7247 cx
7248 )
7249 .await
7250 .unwrap()
7251 .is_empty(),
7252 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7253 );
7254}
7255
7256#[gpui::test]
7257async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7258 init_test(cx);
7259
7260 let search_query = "file";
7261
7262 let fs = FakeFs::new(cx.executor());
7263 fs.insert_tree(
7264 path!("/dir"),
7265 json!({
7266 "one.rs": r#"// Rust file one"#,
7267 "one.ts": r#"// TypeScript file one"#,
7268 "two.rs": r#"// Rust file two"#,
7269 "two.ts": r#"// TypeScript file two"#,
7270 }),
7271 )
7272 .await;
7273 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7274 assert!(
7275 search(
7276 &project,
7277 SearchQuery::text(
7278 search_query,
7279 false,
7280 true,
7281 false,
7282 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7283 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7284 false,
7285 None,
7286 )
7287 .unwrap(),
7288 cx
7289 )
7290 .await
7291 .unwrap()
7292 .is_empty(),
7293 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7294 );
7295
7296 assert!(
7297 search(
7298 &project,
7299 SearchQuery::text(
7300 search_query,
7301 false,
7302 true,
7303 false,
7304 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7305 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7306 false,
7307 None,
7308 )
7309 .unwrap(),
7310 cx
7311 )
7312 .await
7313 .unwrap()
7314 .is_empty(),
7315 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7316 );
7317
7318 assert!(
7319 search(
7320 &project,
7321 SearchQuery::text(
7322 search_query,
7323 false,
7324 true,
7325 false,
7326 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7327 .unwrap(),
7328 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7329 .unwrap(),
7330 false,
7331 None,
7332 )
7333 .unwrap(),
7334 cx
7335 )
7336 .await
7337 .unwrap()
7338 .is_empty(),
7339 "Non-matching inclusions and exclusions should not change that."
7340 );
7341
7342 assert_eq!(
7343 search(
7344 &project,
7345 SearchQuery::text(
7346 search_query,
7347 false,
7348 true,
7349 false,
7350 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7351 .unwrap(),
7352 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7353 .unwrap(),
7354 false,
7355 None,
7356 )
7357 .unwrap(),
7358 cx
7359 )
7360 .await
7361 .unwrap(),
7362 HashMap::from_iter([
7363 (path!("dir/one.ts").to_string(), vec![14..18]),
7364 (path!("dir/two.ts").to_string(), vec![14..18]),
7365 ]),
7366 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7367 );
7368}
7369
7370#[gpui::test]
7371async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7372 init_test(cx);
7373
7374 let fs = FakeFs::new(cx.executor());
7375 fs.insert_tree(
7376 path!("/worktree-a"),
7377 json!({
7378 "haystack.rs": r#"// NEEDLE"#,
7379 "haystack.ts": r#"// NEEDLE"#,
7380 }),
7381 )
7382 .await;
7383 fs.insert_tree(
7384 path!("/worktree-b"),
7385 json!({
7386 "haystack.rs": r#"// NEEDLE"#,
7387 "haystack.ts": r#"// NEEDLE"#,
7388 }),
7389 )
7390 .await;
7391
7392 let path_style = PathStyle::local();
7393 let project = Project::test(
7394 fs.clone(),
7395 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7396 cx,
7397 )
7398 .await;
7399
7400 assert_eq!(
7401 search(
7402 &project,
7403 SearchQuery::text(
7404 "NEEDLE",
7405 false,
7406 true,
7407 false,
7408 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7409 Default::default(),
7410 true,
7411 None,
7412 )
7413 .unwrap(),
7414 cx
7415 )
7416 .await
7417 .unwrap(),
7418 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7419 "should only return results from included worktree"
7420 );
7421 assert_eq!(
7422 search(
7423 &project,
7424 SearchQuery::text(
7425 "NEEDLE",
7426 false,
7427 true,
7428 false,
7429 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7430 Default::default(),
7431 true,
7432 None,
7433 )
7434 .unwrap(),
7435 cx
7436 )
7437 .await
7438 .unwrap(),
7439 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7440 "should only return results from included worktree"
7441 );
7442
7443 assert_eq!(
7444 search(
7445 &project,
7446 SearchQuery::text(
7447 "NEEDLE",
7448 false,
7449 true,
7450 false,
7451 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7452 Default::default(),
7453 false,
7454 None,
7455 )
7456 .unwrap(),
7457 cx
7458 )
7459 .await
7460 .unwrap(),
7461 HashMap::from_iter([
7462 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7463 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7464 ]),
7465 "should return results from both worktrees"
7466 );
7467}
7468
7469#[gpui::test]
7470async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7471 init_test(cx);
7472
7473 let fs = FakeFs::new(cx.background_executor.clone());
7474 fs.insert_tree(
7475 path!("/dir"),
7476 json!({
7477 ".git": {},
7478 ".gitignore": "**/target\n/node_modules\n",
7479 "target": {
7480 "index.txt": "index_key:index_value"
7481 },
7482 "node_modules": {
7483 "eslint": {
7484 "index.ts": "const eslint_key = 'eslint value'",
7485 "package.json": r#"{ "some_key": "some value" }"#,
7486 },
7487 "prettier": {
7488 "index.ts": "const prettier_key = 'prettier value'",
7489 "package.json": r#"{ "other_key": "other value" }"#,
7490 },
7491 },
7492 "package.json": r#"{ "main_key": "main value" }"#,
7493 }),
7494 )
7495 .await;
7496 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7497
7498 let query = "key";
7499 assert_eq!(
7500 search(
7501 &project,
7502 SearchQuery::text(
7503 query,
7504 false,
7505 false,
7506 false,
7507 Default::default(),
7508 Default::default(),
7509 false,
7510 None,
7511 )
7512 .unwrap(),
7513 cx
7514 )
7515 .await
7516 .unwrap(),
7517 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7518 "Only one non-ignored file should have the query"
7519 );
7520
7521 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7522 let path_style = PathStyle::local();
7523 assert_eq!(
7524 search(
7525 &project,
7526 SearchQuery::text(
7527 query,
7528 false,
7529 false,
7530 true,
7531 Default::default(),
7532 Default::default(),
7533 false,
7534 None,
7535 )
7536 .unwrap(),
7537 cx
7538 )
7539 .await
7540 .unwrap(),
7541 HashMap::from_iter([
7542 (path!("dir/package.json").to_string(), vec![8..11]),
7543 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7544 (
7545 path!("dir/node_modules/prettier/package.json").to_string(),
7546 vec![9..12]
7547 ),
7548 (
7549 path!("dir/node_modules/prettier/index.ts").to_string(),
7550 vec![15..18]
7551 ),
7552 (
7553 path!("dir/node_modules/eslint/index.ts").to_string(),
7554 vec![13..16]
7555 ),
7556 (
7557 path!("dir/node_modules/eslint/package.json").to_string(),
7558 vec![8..11]
7559 ),
7560 ]),
7561 "Unrestricted search with ignored directories should find every file with the query"
7562 );
7563
7564 let files_to_include =
7565 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7566 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7567 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7568 assert_eq!(
7569 search(
7570 &project,
7571 SearchQuery::text(
7572 query,
7573 false,
7574 false,
7575 true,
7576 files_to_include,
7577 files_to_exclude,
7578 false,
7579 None,
7580 )
7581 .unwrap(),
7582 cx
7583 )
7584 .await
7585 .unwrap(),
7586 HashMap::from_iter([(
7587 path!("dir/node_modules/prettier/package.json").to_string(),
7588 vec![9..12]
7589 )]),
7590 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7591 );
7592}
7593
7594#[gpui::test]
7595async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7596 init_test(cx);
7597
7598 let fs = FakeFs::new(cx.executor());
7599 fs.insert_tree(
7600 path!("/dir"),
7601 json!({
7602 "one.rs": "// ПРИВЕТ? привет!",
7603 "two.rs": "// ПРИВЕТ.",
7604 "three.rs": "// привет",
7605 }),
7606 )
7607 .await;
7608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7609 let unicode_case_sensitive_query = SearchQuery::text(
7610 "привет",
7611 false,
7612 true,
7613 false,
7614 Default::default(),
7615 Default::default(),
7616 false,
7617 None,
7618 );
7619 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7620 assert_eq!(
7621 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7622 .await
7623 .unwrap(),
7624 HashMap::from_iter([
7625 (path!("dir/one.rs").to_string(), vec![17..29]),
7626 (path!("dir/three.rs").to_string(), vec![3..15]),
7627 ])
7628 );
7629
7630 let unicode_case_insensitive_query = SearchQuery::text(
7631 "привет",
7632 false,
7633 false,
7634 false,
7635 Default::default(),
7636 Default::default(),
7637 false,
7638 None,
7639 );
7640 assert_matches!(
7641 unicode_case_insensitive_query,
7642 Ok(SearchQuery::Regex { .. })
7643 );
7644 assert_eq!(
7645 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7646 .await
7647 .unwrap(),
7648 HashMap::from_iter([
7649 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7650 (path!("dir/two.rs").to_string(), vec![3..15]),
7651 (path!("dir/three.rs").to_string(), vec![3..15]),
7652 ])
7653 );
7654
7655 assert_eq!(
7656 search(
7657 &project,
7658 SearchQuery::text(
7659 "привет.",
7660 false,
7661 false,
7662 false,
7663 Default::default(),
7664 Default::default(),
7665 false,
7666 None,
7667 )
7668 .unwrap(),
7669 cx
7670 )
7671 .await
7672 .unwrap(),
7673 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7674 );
7675}
7676
7677#[gpui::test]
7678async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7679 init_test(cx);
7680
7681 let fs = FakeFs::new(cx.executor());
7682 fs.insert_tree(
7683 "/one/two",
7684 json!({
7685 "three": {
7686 "a.txt": "",
7687 "four": {}
7688 },
7689 "c.rs": ""
7690 }),
7691 )
7692 .await;
7693
7694 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7695 project
7696 .update(cx, |project, cx| {
7697 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7698 project.create_entry((id, rel_path("b..")), true, cx)
7699 })
7700 .await
7701 .unwrap()
7702 .into_included()
7703 .unwrap();
7704
7705 assert_eq!(
7706 fs.paths(true),
7707 vec![
7708 PathBuf::from(path!("/")),
7709 PathBuf::from(path!("/one")),
7710 PathBuf::from(path!("/one/two")),
7711 PathBuf::from(path!("/one/two/c.rs")),
7712 PathBuf::from(path!("/one/two/three")),
7713 PathBuf::from(path!("/one/two/three/a.txt")),
7714 PathBuf::from(path!("/one/two/three/b..")),
7715 PathBuf::from(path!("/one/two/three/four")),
7716 ]
7717 );
7718}
7719
7720#[gpui::test]
7721async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7722 init_test(cx);
7723
7724 let fs = FakeFs::new(cx.executor());
7725 fs.insert_tree(
7726 path!("/dir"),
7727 json!({
7728 "a.tsx": "a",
7729 }),
7730 )
7731 .await;
7732
7733 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7734
7735 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7736 language_registry.add(tsx_lang());
7737 let language_server_names = [
7738 "TypeScriptServer",
7739 "TailwindServer",
7740 "ESLintServer",
7741 "NoHoverCapabilitiesServer",
7742 ];
7743 let mut language_servers = [
7744 language_registry.register_fake_lsp(
7745 "tsx",
7746 FakeLspAdapter {
7747 name: language_server_names[0],
7748 capabilities: lsp::ServerCapabilities {
7749 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7750 ..lsp::ServerCapabilities::default()
7751 },
7752 ..FakeLspAdapter::default()
7753 },
7754 ),
7755 language_registry.register_fake_lsp(
7756 "tsx",
7757 FakeLspAdapter {
7758 name: language_server_names[1],
7759 capabilities: lsp::ServerCapabilities {
7760 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7761 ..lsp::ServerCapabilities::default()
7762 },
7763 ..FakeLspAdapter::default()
7764 },
7765 ),
7766 language_registry.register_fake_lsp(
7767 "tsx",
7768 FakeLspAdapter {
7769 name: language_server_names[2],
7770 capabilities: lsp::ServerCapabilities {
7771 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7772 ..lsp::ServerCapabilities::default()
7773 },
7774 ..FakeLspAdapter::default()
7775 },
7776 ),
7777 language_registry.register_fake_lsp(
7778 "tsx",
7779 FakeLspAdapter {
7780 name: language_server_names[3],
7781 capabilities: lsp::ServerCapabilities {
7782 hover_provider: None,
7783 ..lsp::ServerCapabilities::default()
7784 },
7785 ..FakeLspAdapter::default()
7786 },
7787 ),
7788 ];
7789
7790 let (buffer, _handle) = project
7791 .update(cx, |p, cx| {
7792 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7793 })
7794 .await
7795 .unwrap();
7796 cx.executor().run_until_parked();
7797
7798 let mut servers_with_hover_requests = HashMap::default();
7799 for i in 0..language_server_names.len() {
7800 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7801 panic!(
7802 "Failed to get language server #{i} with name {}",
7803 &language_server_names[i]
7804 )
7805 });
7806 let new_server_name = new_server.server.name();
7807 assert!(
7808 !servers_with_hover_requests.contains_key(&new_server_name),
7809 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7810 );
7811 match new_server_name.as_ref() {
7812 "TailwindServer" | "TypeScriptServer" => {
7813 servers_with_hover_requests.insert(
7814 new_server_name.clone(),
7815 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7816 move |_, _| {
7817 let name = new_server_name.clone();
7818 async move {
7819 Ok(Some(lsp::Hover {
7820 contents: lsp::HoverContents::Scalar(
7821 lsp::MarkedString::String(format!("{name} hover")),
7822 ),
7823 range: None,
7824 }))
7825 }
7826 },
7827 ),
7828 );
7829 }
7830 "ESLintServer" => {
7831 servers_with_hover_requests.insert(
7832 new_server_name,
7833 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7834 |_, _| async move { Ok(None) },
7835 ),
7836 );
7837 }
7838 "NoHoverCapabilitiesServer" => {
7839 let _never_handled = new_server
7840 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7841 panic!(
7842 "Should not call for hovers server with no corresponding capabilities"
7843 )
7844 });
7845 }
7846 unexpected => panic!("Unexpected server name: {unexpected}"),
7847 }
7848 }
7849
7850 let hover_task = project.update(cx, |project, cx| {
7851 project.hover(&buffer, Point::new(0, 0), cx)
7852 });
7853 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7854 |mut hover_request| async move {
7855 hover_request
7856 .next()
7857 .await
7858 .expect("All hover requests should have been triggered")
7859 },
7860 ))
7861 .await;
7862 assert_eq!(
7863 vec!["TailwindServer hover", "TypeScriptServer hover"],
7864 hover_task
7865 .await
7866 .into_iter()
7867 .flatten()
7868 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7869 .sorted()
7870 .collect::<Vec<_>>(),
7871 "Should receive hover responses from all related servers with hover capabilities"
7872 );
7873}
7874
7875#[gpui::test]
7876async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7877 init_test(cx);
7878
7879 let fs = FakeFs::new(cx.executor());
7880 fs.insert_tree(
7881 path!("/dir"),
7882 json!({
7883 "a.ts": "a",
7884 }),
7885 )
7886 .await;
7887
7888 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7889
7890 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7891 language_registry.add(typescript_lang());
7892 let mut fake_language_servers = language_registry.register_fake_lsp(
7893 "TypeScript",
7894 FakeLspAdapter {
7895 capabilities: lsp::ServerCapabilities {
7896 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7897 ..lsp::ServerCapabilities::default()
7898 },
7899 ..FakeLspAdapter::default()
7900 },
7901 );
7902
7903 let (buffer, _handle) = project
7904 .update(cx, |p, cx| {
7905 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7906 })
7907 .await
7908 .unwrap();
7909 cx.executor().run_until_parked();
7910
7911 let fake_server = fake_language_servers
7912 .next()
7913 .await
7914 .expect("failed to get the language server");
7915
7916 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7917 move |_, _| async move {
7918 Ok(Some(lsp::Hover {
7919 contents: lsp::HoverContents::Array(vec![
7920 lsp::MarkedString::String("".to_string()),
7921 lsp::MarkedString::String(" ".to_string()),
7922 lsp::MarkedString::String("\n\n\n".to_string()),
7923 ]),
7924 range: None,
7925 }))
7926 },
7927 );
7928
7929 let hover_task = project.update(cx, |project, cx| {
7930 project.hover(&buffer, Point::new(0, 0), cx)
7931 });
7932 let () = request_handled
7933 .next()
7934 .await
7935 .expect("All hover requests should have been triggered");
7936 assert_eq!(
7937 Vec::<String>::new(),
7938 hover_task
7939 .await
7940 .into_iter()
7941 .flatten()
7942 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7943 .sorted()
7944 .collect::<Vec<_>>(),
7945 "Empty hover parts should be ignored"
7946 );
7947}
7948
7949#[gpui::test]
7950async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7951 init_test(cx);
7952
7953 let fs = FakeFs::new(cx.executor());
7954 fs.insert_tree(
7955 path!("/dir"),
7956 json!({
7957 "a.ts": "a",
7958 }),
7959 )
7960 .await;
7961
7962 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7963
7964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7965 language_registry.add(typescript_lang());
7966 let mut fake_language_servers = language_registry.register_fake_lsp(
7967 "TypeScript",
7968 FakeLspAdapter {
7969 capabilities: lsp::ServerCapabilities {
7970 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7971 ..lsp::ServerCapabilities::default()
7972 },
7973 ..FakeLspAdapter::default()
7974 },
7975 );
7976
7977 let (buffer, _handle) = project
7978 .update(cx, |p, cx| {
7979 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7980 })
7981 .await
7982 .unwrap();
7983 cx.executor().run_until_parked();
7984
7985 let fake_server = fake_language_servers
7986 .next()
7987 .await
7988 .expect("failed to get the language server");
7989
7990 let mut request_handled = fake_server
7991 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7992 Ok(Some(vec![
7993 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7994 title: "organize imports".to_string(),
7995 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7996 ..lsp::CodeAction::default()
7997 }),
7998 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7999 title: "fix code".to_string(),
8000 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8001 ..lsp::CodeAction::default()
8002 }),
8003 ]))
8004 });
8005
8006 let code_actions_task = project.update(cx, |project, cx| {
8007 project.code_actions(
8008 &buffer,
8009 0..buffer.read(cx).len(),
8010 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8011 cx,
8012 )
8013 });
8014
8015 let () = request_handled
8016 .next()
8017 .await
8018 .expect("The code action request should have been triggered");
8019
8020 let code_actions = code_actions_task.await.unwrap().unwrap();
8021 assert_eq!(code_actions.len(), 1);
8022 assert_eq!(
8023 code_actions[0].lsp_action.action_kind(),
8024 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8025 );
8026}
8027
8028#[gpui::test]
8029async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8030 cx: &mut gpui::TestAppContext,
8031) {
8032 init_test(cx);
8033
8034 let fs = FakeFs::new(cx.executor());
8035 fs.insert_tree(
8036 path!("/dir"),
8037 json!({
8038 "a.ts": "a",
8039 }),
8040 )
8041 .await;
8042
8043 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8044
8045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8046 language_registry.add(typescript_lang());
8047 let mut fake_language_servers = language_registry.register_fake_lsp(
8048 "TypeScript",
8049 FakeLspAdapter {
8050 capabilities: lsp::ServerCapabilities {
8051 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8052 lsp::CodeActionOptions {
8053 code_action_kinds: Some(vec![
8054 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8055 "source.doc".into(),
8056 ]),
8057 ..lsp::CodeActionOptions::default()
8058 },
8059 )),
8060 ..lsp::ServerCapabilities::default()
8061 },
8062 ..FakeLspAdapter::default()
8063 },
8064 );
8065
8066 let (buffer, _handle) = project
8067 .update(cx, |p, cx| {
8068 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8069 })
8070 .await
8071 .unwrap();
8072 cx.executor().run_until_parked();
8073
8074 let fake_server = fake_language_servers
8075 .next()
8076 .await
8077 .expect("failed to get the language server");
8078
8079 let mut request_handled = fake_server.set_request_handler::<
8080 lsp::request::CodeActionRequest,
8081 _,
8082 _,
8083 >(move |params, _| async move {
8084 assert_eq!(
8085 params.context.only, None,
8086 "Code action requests without explicit kind filters should not send `context.only`"
8087 );
8088 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8089 lsp::CodeAction {
8090 title: "Add test".to_string(),
8091 kind: Some("source.addTest".into()),
8092 ..lsp::CodeAction::default()
8093 },
8094 )]))
8095 });
8096
8097 let code_actions_task = project.update(cx, |project, cx| {
8098 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8099 });
8100
8101 let () = request_handled
8102 .next()
8103 .await
8104 .expect("The code action request should have been triggered");
8105
8106 let code_actions = code_actions_task.await.unwrap().unwrap();
8107 assert_eq!(code_actions.len(), 1);
8108 assert_eq!(
8109 code_actions[0].lsp_action.action_kind(),
8110 Some("source.addTest".into())
8111 );
8112}
8113
8114#[gpui::test]
8115async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8116 init_test(cx);
8117
8118 let fs = FakeFs::new(cx.executor());
8119 fs.insert_tree(
8120 path!("/dir"),
8121 json!({
8122 "a.tsx": "a",
8123 }),
8124 )
8125 .await;
8126
8127 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8128
8129 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8130 language_registry.add(tsx_lang());
8131 let language_server_names = [
8132 "TypeScriptServer",
8133 "TailwindServer",
8134 "ESLintServer",
8135 "NoActionsCapabilitiesServer",
8136 ];
8137
8138 let mut language_server_rxs = [
8139 language_registry.register_fake_lsp(
8140 "tsx",
8141 FakeLspAdapter {
8142 name: language_server_names[0],
8143 capabilities: lsp::ServerCapabilities {
8144 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8145 ..lsp::ServerCapabilities::default()
8146 },
8147 ..FakeLspAdapter::default()
8148 },
8149 ),
8150 language_registry.register_fake_lsp(
8151 "tsx",
8152 FakeLspAdapter {
8153 name: language_server_names[1],
8154 capabilities: lsp::ServerCapabilities {
8155 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8156 ..lsp::ServerCapabilities::default()
8157 },
8158 ..FakeLspAdapter::default()
8159 },
8160 ),
8161 language_registry.register_fake_lsp(
8162 "tsx",
8163 FakeLspAdapter {
8164 name: language_server_names[2],
8165 capabilities: lsp::ServerCapabilities {
8166 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8167 ..lsp::ServerCapabilities::default()
8168 },
8169 ..FakeLspAdapter::default()
8170 },
8171 ),
8172 language_registry.register_fake_lsp(
8173 "tsx",
8174 FakeLspAdapter {
8175 name: language_server_names[3],
8176 capabilities: lsp::ServerCapabilities {
8177 code_action_provider: None,
8178 ..lsp::ServerCapabilities::default()
8179 },
8180 ..FakeLspAdapter::default()
8181 },
8182 ),
8183 ];
8184
8185 let (buffer, _handle) = project
8186 .update(cx, |p, cx| {
8187 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8188 })
8189 .await
8190 .unwrap();
8191 cx.executor().run_until_parked();
8192
8193 let mut servers_with_actions_requests = HashMap::default();
8194 for i in 0..language_server_names.len() {
8195 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8196 panic!(
8197 "Failed to get language server #{i} with name {}",
8198 &language_server_names[i]
8199 )
8200 });
8201 let new_server_name = new_server.server.name();
8202
8203 assert!(
8204 !servers_with_actions_requests.contains_key(&new_server_name),
8205 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8206 );
8207 match new_server_name.0.as_ref() {
8208 "TailwindServer" | "TypeScriptServer" => {
8209 servers_with_actions_requests.insert(
8210 new_server_name.clone(),
8211 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8212 move |_, _| {
8213 let name = new_server_name.clone();
8214 async move {
8215 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8216 lsp::CodeAction {
8217 title: format!("{name} code action"),
8218 ..lsp::CodeAction::default()
8219 },
8220 )]))
8221 }
8222 },
8223 ),
8224 );
8225 }
8226 "ESLintServer" => {
8227 servers_with_actions_requests.insert(
8228 new_server_name,
8229 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8230 |_, _| async move { Ok(None) },
8231 ),
8232 );
8233 }
8234 "NoActionsCapabilitiesServer" => {
8235 let _never_handled = new_server
8236 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8237 panic!(
8238 "Should not call for code actions server with no corresponding capabilities"
8239 )
8240 });
8241 }
8242 unexpected => panic!("Unexpected server name: {unexpected}"),
8243 }
8244 }
8245
8246 let code_actions_task = project.update(cx, |project, cx| {
8247 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8248 });
8249
8250 // cx.run_until_parked();
8251 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8252 |mut code_actions_request| async move {
8253 code_actions_request
8254 .next()
8255 .await
8256 .expect("All code actions requests should have been triggered")
8257 },
8258 ))
8259 .await;
8260 assert_eq!(
8261 vec!["TailwindServer code action", "TypeScriptServer code action"],
8262 code_actions_task
8263 .await
8264 .unwrap()
8265 .unwrap()
8266 .into_iter()
8267 .map(|code_action| code_action.lsp_action.title().to_owned())
8268 .sorted()
8269 .collect::<Vec<_>>(),
8270 "Should receive code actions responses from all related servers with hover capabilities"
8271 );
8272}
8273
8274#[gpui::test]
8275async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8276 init_test(cx);
8277
8278 let fs = FakeFs::new(cx.executor());
8279 fs.insert_tree(
8280 "/dir",
8281 json!({
8282 "a.rs": "let a = 1;",
8283 "b.rs": "let b = 2;",
8284 "c.rs": "let c = 2;",
8285 }),
8286 )
8287 .await;
8288
8289 let project = Project::test(
8290 fs,
8291 [
8292 "/dir/a.rs".as_ref(),
8293 "/dir/b.rs".as_ref(),
8294 "/dir/c.rs".as_ref(),
8295 ],
8296 cx,
8297 )
8298 .await;
8299
8300 // check the initial state and get the worktrees
8301 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8302 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8303 assert_eq!(worktrees.len(), 3);
8304
8305 let worktree_a = worktrees[0].read(cx);
8306 let worktree_b = worktrees[1].read(cx);
8307 let worktree_c = worktrees[2].read(cx);
8308
8309 // check they start in the right order
8310 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8311 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8312 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8313
8314 (
8315 worktrees[0].clone(),
8316 worktrees[1].clone(),
8317 worktrees[2].clone(),
8318 )
8319 });
8320
8321 // move first worktree to after the second
8322 // [a, b, c] -> [b, a, c]
8323 project
8324 .update(cx, |project, cx| {
8325 let first = worktree_a.read(cx);
8326 let second = worktree_b.read(cx);
8327 project.move_worktree(first.id(), second.id(), cx)
8328 })
8329 .expect("moving first after second");
8330
8331 // check the state after moving
8332 project.update(cx, |project, cx| {
8333 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8334 assert_eq!(worktrees.len(), 3);
8335
8336 let first = worktrees[0].read(cx);
8337 let second = worktrees[1].read(cx);
8338 let third = worktrees[2].read(cx);
8339
8340 // check they are now in the right order
8341 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8342 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8343 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8344 });
8345
8346 // move the second worktree to before the first
8347 // [b, a, c] -> [a, b, c]
8348 project
8349 .update(cx, |project, cx| {
8350 let second = worktree_a.read(cx);
8351 let first = worktree_b.read(cx);
8352 project.move_worktree(first.id(), second.id(), cx)
8353 })
8354 .expect("moving second before first");
8355
8356 // check the state after moving
8357 project.update(cx, |project, cx| {
8358 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8359 assert_eq!(worktrees.len(), 3);
8360
8361 let first = worktrees[0].read(cx);
8362 let second = worktrees[1].read(cx);
8363 let third = worktrees[2].read(cx);
8364
8365 // check they are now in the right order
8366 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8367 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8368 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8369 });
8370
8371 // move the second worktree to after the third
8372 // [a, b, c] -> [a, c, b]
8373 project
8374 .update(cx, |project, cx| {
8375 let second = worktree_b.read(cx);
8376 let third = worktree_c.read(cx);
8377 project.move_worktree(second.id(), third.id(), cx)
8378 })
8379 .expect("moving second after third");
8380
8381 // check the state after moving
8382 project.update(cx, |project, cx| {
8383 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8384 assert_eq!(worktrees.len(), 3);
8385
8386 let first = worktrees[0].read(cx);
8387 let second = worktrees[1].read(cx);
8388 let third = worktrees[2].read(cx);
8389
8390 // check they are now in the right order
8391 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8392 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8393 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8394 });
8395
8396 // move the third worktree to before the second
8397 // [a, c, b] -> [a, b, c]
8398 project
8399 .update(cx, |project, cx| {
8400 let third = worktree_c.read(cx);
8401 let second = worktree_b.read(cx);
8402 project.move_worktree(third.id(), second.id(), cx)
8403 })
8404 .expect("moving third before second");
8405
8406 // check the state after moving
8407 project.update(cx, |project, cx| {
8408 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8409 assert_eq!(worktrees.len(), 3);
8410
8411 let first = worktrees[0].read(cx);
8412 let second = worktrees[1].read(cx);
8413 let third = worktrees[2].read(cx);
8414
8415 // check they are now in the right order
8416 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8417 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8418 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8419 });
8420
8421 // move the first worktree to after the third
8422 // [a, b, c] -> [b, c, a]
8423 project
8424 .update(cx, |project, cx| {
8425 let first = worktree_a.read(cx);
8426 let third = worktree_c.read(cx);
8427 project.move_worktree(first.id(), third.id(), cx)
8428 })
8429 .expect("moving first after third");
8430
8431 // check the state after moving
8432 project.update(cx, |project, cx| {
8433 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8434 assert_eq!(worktrees.len(), 3);
8435
8436 let first = worktrees[0].read(cx);
8437 let second = worktrees[1].read(cx);
8438 let third = worktrees[2].read(cx);
8439
8440 // check they are now in the right order
8441 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8442 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8443 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8444 });
8445
8446 // move the third worktree to before the first
8447 // [b, c, a] -> [a, b, c]
8448 project
8449 .update(cx, |project, cx| {
8450 let third = worktree_a.read(cx);
8451 let first = worktree_b.read(cx);
8452 project.move_worktree(third.id(), first.id(), cx)
8453 })
8454 .expect("moving third before first");
8455
8456 // check the state after moving
8457 project.update(cx, |project, cx| {
8458 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8459 assert_eq!(worktrees.len(), 3);
8460
8461 let first = worktrees[0].read(cx);
8462 let second = worktrees[1].read(cx);
8463 let third = worktrees[2].read(cx);
8464
8465 // check they are now in the right order
8466 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8467 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8468 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8469 });
8470}
8471
8472#[gpui::test]
8473async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8474 init_test(cx);
8475
8476 let staged_contents = r#"
8477 fn main() {
8478 println!("hello world");
8479 }
8480 "#
8481 .unindent();
8482 let file_contents = r#"
8483 // print goodbye
8484 fn main() {
8485 println!("goodbye world");
8486 }
8487 "#
8488 .unindent();
8489
8490 let fs = FakeFs::new(cx.background_executor.clone());
8491 fs.insert_tree(
8492 "/dir",
8493 json!({
8494 ".git": {},
8495 "src": {
8496 "main.rs": file_contents,
8497 }
8498 }),
8499 )
8500 .await;
8501
8502 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8503
8504 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8505
8506 let buffer = project
8507 .update(cx, |project, cx| {
8508 project.open_local_buffer("/dir/src/main.rs", cx)
8509 })
8510 .await
8511 .unwrap();
8512 let unstaged_diff = project
8513 .update(cx, |project, cx| {
8514 project.open_unstaged_diff(buffer.clone(), cx)
8515 })
8516 .await
8517 .unwrap();
8518
8519 cx.run_until_parked();
8520 unstaged_diff.update(cx, |unstaged_diff, cx| {
8521 let snapshot = buffer.read(cx).snapshot();
8522 assert_hunks(
8523 unstaged_diff.snapshot(cx).hunks(&snapshot),
8524 &snapshot,
8525 &unstaged_diff.base_text_string(cx).unwrap(),
8526 &[
8527 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8528 (
8529 2..3,
8530 " println!(\"hello world\");\n",
8531 " println!(\"goodbye world\");\n",
8532 DiffHunkStatus::modified_none(),
8533 ),
8534 ],
8535 );
8536 });
8537
8538 let staged_contents = r#"
8539 // print goodbye
8540 fn main() {
8541 }
8542 "#
8543 .unindent();
8544
8545 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8546
8547 cx.run_until_parked();
8548 unstaged_diff.update(cx, |unstaged_diff, cx| {
8549 let snapshot = buffer.read(cx).snapshot();
8550 assert_hunks(
8551 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8552 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8553 &snapshot,
8554 ),
8555 &snapshot,
8556 &unstaged_diff.base_text(cx).text(),
8557 &[(
8558 2..3,
8559 "",
8560 " println!(\"goodbye world\");\n",
8561 DiffHunkStatus::added_none(),
8562 )],
8563 );
8564 });
8565}
8566
8567#[gpui::test]
8568async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8569 init_test(cx);
8570
8571 let committed_contents = r#"
8572 fn main() {
8573 println!("hello world");
8574 }
8575 "#
8576 .unindent();
8577 let staged_contents = r#"
8578 fn main() {
8579 println!("goodbye world");
8580 }
8581 "#
8582 .unindent();
8583 let file_contents = r#"
8584 // print goodbye
8585 fn main() {
8586 println!("goodbye world");
8587 }
8588 "#
8589 .unindent();
8590
8591 let fs = FakeFs::new(cx.background_executor.clone());
8592 fs.insert_tree(
8593 "/dir",
8594 json!({
8595 ".git": {},
8596 "src": {
8597 "modification.rs": file_contents,
8598 }
8599 }),
8600 )
8601 .await;
8602
8603 fs.set_head_for_repo(
8604 Path::new("/dir/.git"),
8605 &[
8606 ("src/modification.rs", committed_contents),
8607 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8608 ],
8609 "deadbeef",
8610 );
8611 fs.set_index_for_repo(
8612 Path::new("/dir/.git"),
8613 &[
8614 ("src/modification.rs", staged_contents),
8615 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8616 ],
8617 );
8618
8619 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8620 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8621 let language = rust_lang();
8622 language_registry.add(language.clone());
8623
8624 let buffer_1 = project
8625 .update(cx, |project, cx| {
8626 project.open_local_buffer("/dir/src/modification.rs", cx)
8627 })
8628 .await
8629 .unwrap();
8630 let diff_1 = project
8631 .update(cx, |project, cx| {
8632 project.open_uncommitted_diff(buffer_1.clone(), cx)
8633 })
8634 .await
8635 .unwrap();
8636 diff_1.read_with(cx, |diff, cx| {
8637 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8638 });
8639 cx.run_until_parked();
8640 diff_1.update(cx, |diff, cx| {
8641 let snapshot = buffer_1.read(cx).snapshot();
8642 assert_hunks(
8643 diff.snapshot(cx).hunks_intersecting_range(
8644 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8645 &snapshot,
8646 ),
8647 &snapshot,
8648 &diff.base_text_string(cx).unwrap(),
8649 &[
8650 (
8651 0..1,
8652 "",
8653 "// print goodbye\n",
8654 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8655 ),
8656 (
8657 2..3,
8658 " println!(\"hello world\");\n",
8659 " println!(\"goodbye world\");\n",
8660 DiffHunkStatus::modified_none(),
8661 ),
8662 ],
8663 );
8664 });
8665
8666 // Reset HEAD to a version that differs from both the buffer and the index.
8667 let committed_contents = r#"
8668 // print goodbye
8669 fn main() {
8670 }
8671 "#
8672 .unindent();
8673 fs.set_head_for_repo(
8674 Path::new("/dir/.git"),
8675 &[
8676 ("src/modification.rs", committed_contents.clone()),
8677 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8678 ],
8679 "deadbeef",
8680 );
8681
8682 // Buffer now has an unstaged hunk.
8683 cx.run_until_parked();
8684 diff_1.update(cx, |diff, cx| {
8685 let snapshot = buffer_1.read(cx).snapshot();
8686 assert_hunks(
8687 diff.snapshot(cx).hunks_intersecting_range(
8688 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8689 &snapshot,
8690 ),
8691 &snapshot,
8692 &diff.base_text(cx).text(),
8693 &[(
8694 2..3,
8695 "",
8696 " println!(\"goodbye world\");\n",
8697 DiffHunkStatus::added_none(),
8698 )],
8699 );
8700 });
8701
8702 // Open a buffer for a file that's been deleted.
8703 let buffer_2 = project
8704 .update(cx, |project, cx| {
8705 project.open_local_buffer("/dir/src/deletion.rs", cx)
8706 })
8707 .await
8708 .unwrap();
8709 let diff_2 = project
8710 .update(cx, |project, cx| {
8711 project.open_uncommitted_diff(buffer_2.clone(), cx)
8712 })
8713 .await
8714 .unwrap();
8715 cx.run_until_parked();
8716 diff_2.update(cx, |diff, cx| {
8717 let snapshot = buffer_2.read(cx).snapshot();
8718 assert_hunks(
8719 diff.snapshot(cx).hunks_intersecting_range(
8720 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8721 &snapshot,
8722 ),
8723 &snapshot,
8724 &diff.base_text_string(cx).unwrap(),
8725 &[(
8726 0..0,
8727 "// the-deleted-contents\n",
8728 "",
8729 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8730 )],
8731 );
8732 });
8733
8734 // Stage the deletion of this file
8735 fs.set_index_for_repo(
8736 Path::new("/dir/.git"),
8737 &[("src/modification.rs", committed_contents.clone())],
8738 );
8739 cx.run_until_parked();
8740 diff_2.update(cx, |diff, cx| {
8741 let snapshot = buffer_2.read(cx).snapshot();
8742 assert_hunks(
8743 diff.snapshot(cx).hunks_intersecting_range(
8744 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8745 &snapshot,
8746 ),
8747 &snapshot,
8748 &diff.base_text_string(cx).unwrap(),
8749 &[(
8750 0..0,
8751 "// the-deleted-contents\n",
8752 "",
8753 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8754 )],
8755 );
8756 });
8757}
8758
8759#[gpui::test]
8760async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8761 use DiffHunkSecondaryStatus::*;
8762 init_test(cx);
8763
8764 let committed_contents = r#"
8765 zero
8766 one
8767 two
8768 three
8769 four
8770 five
8771 "#
8772 .unindent();
8773 let file_contents = r#"
8774 one
8775 TWO
8776 three
8777 FOUR
8778 five
8779 "#
8780 .unindent();
8781
8782 let fs = FakeFs::new(cx.background_executor.clone());
8783 fs.insert_tree(
8784 "/dir",
8785 json!({
8786 ".git": {},
8787 "file.txt": file_contents.clone()
8788 }),
8789 )
8790 .await;
8791
8792 fs.set_head_and_index_for_repo(
8793 path!("/dir/.git").as_ref(),
8794 &[("file.txt", committed_contents.clone())],
8795 );
8796
8797 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8798
8799 let buffer = project
8800 .update(cx, |project, cx| {
8801 project.open_local_buffer("/dir/file.txt", cx)
8802 })
8803 .await
8804 .unwrap();
8805 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8806 let uncommitted_diff = project
8807 .update(cx, |project, cx| {
8808 project.open_uncommitted_diff(buffer.clone(), cx)
8809 })
8810 .await
8811 .unwrap();
8812 let mut diff_events = cx.events(&uncommitted_diff);
8813
8814 // The hunks are initially unstaged.
8815 uncommitted_diff.read_with(cx, |diff, cx| {
8816 assert_hunks(
8817 diff.snapshot(cx).hunks(&snapshot),
8818 &snapshot,
8819 &diff.base_text_string(cx).unwrap(),
8820 &[
8821 (
8822 0..0,
8823 "zero\n",
8824 "",
8825 DiffHunkStatus::deleted(HasSecondaryHunk),
8826 ),
8827 (
8828 1..2,
8829 "two\n",
8830 "TWO\n",
8831 DiffHunkStatus::modified(HasSecondaryHunk),
8832 ),
8833 (
8834 3..4,
8835 "four\n",
8836 "FOUR\n",
8837 DiffHunkStatus::modified(HasSecondaryHunk),
8838 ),
8839 ],
8840 );
8841 });
8842
8843 // Stage a hunk. It appears as optimistically staged.
8844 uncommitted_diff.update(cx, |diff, cx| {
8845 let range =
8846 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8847 let hunks = diff
8848 .snapshot(cx)
8849 .hunks_intersecting_range(range, &snapshot)
8850 .collect::<Vec<_>>();
8851 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8852
8853 assert_hunks(
8854 diff.snapshot(cx).hunks(&snapshot),
8855 &snapshot,
8856 &diff.base_text_string(cx).unwrap(),
8857 &[
8858 (
8859 0..0,
8860 "zero\n",
8861 "",
8862 DiffHunkStatus::deleted(HasSecondaryHunk),
8863 ),
8864 (
8865 1..2,
8866 "two\n",
8867 "TWO\n",
8868 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8869 ),
8870 (
8871 3..4,
8872 "four\n",
8873 "FOUR\n",
8874 DiffHunkStatus::modified(HasSecondaryHunk),
8875 ),
8876 ],
8877 );
8878 });
8879
8880 // The diff emits a change event for the range of the staged hunk.
8881 assert!(matches!(
8882 diff_events.next().await.unwrap(),
8883 BufferDiffEvent::HunksStagedOrUnstaged(_)
8884 ));
8885 let event = diff_events.next().await.unwrap();
8886 if let BufferDiffEvent::DiffChanged(DiffChanged {
8887 changed_range: Some(changed_range),
8888 base_text_changed_range: _,
8889 extended_range: _,
8890 }) = event
8891 {
8892 let changed_range = changed_range.to_point(&snapshot);
8893 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8894 } else {
8895 panic!("Unexpected event {event:?}");
8896 }
8897
8898 // When the write to the index completes, it appears as staged.
8899 cx.run_until_parked();
8900 uncommitted_diff.update(cx, |diff, cx| {
8901 assert_hunks(
8902 diff.snapshot(cx).hunks(&snapshot),
8903 &snapshot,
8904 &diff.base_text_string(cx).unwrap(),
8905 &[
8906 (
8907 0..0,
8908 "zero\n",
8909 "",
8910 DiffHunkStatus::deleted(HasSecondaryHunk),
8911 ),
8912 (
8913 1..2,
8914 "two\n",
8915 "TWO\n",
8916 DiffHunkStatus::modified(NoSecondaryHunk),
8917 ),
8918 (
8919 3..4,
8920 "four\n",
8921 "FOUR\n",
8922 DiffHunkStatus::modified(HasSecondaryHunk),
8923 ),
8924 ],
8925 );
8926 });
8927
8928 // The diff emits a change event for the changed index text.
8929 let event = diff_events.next().await.unwrap();
8930 if let BufferDiffEvent::DiffChanged(DiffChanged {
8931 changed_range: Some(changed_range),
8932 base_text_changed_range: _,
8933 extended_range: _,
8934 }) = event
8935 {
8936 let changed_range = changed_range.to_point(&snapshot);
8937 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8938 } else {
8939 panic!("Unexpected event {event:?}");
8940 }
8941
8942 // Simulate a problem writing to the git index.
8943 fs.set_error_message_for_index_write(
8944 "/dir/.git".as_ref(),
8945 Some("failed to write git index".into()),
8946 );
8947
8948 // Stage another hunk.
8949 uncommitted_diff.update(cx, |diff, cx| {
8950 let range =
8951 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8952 let hunks = diff
8953 .snapshot(cx)
8954 .hunks_intersecting_range(range, &snapshot)
8955 .collect::<Vec<_>>();
8956 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8957
8958 assert_hunks(
8959 diff.snapshot(cx).hunks(&snapshot),
8960 &snapshot,
8961 &diff.base_text_string(cx).unwrap(),
8962 &[
8963 (
8964 0..0,
8965 "zero\n",
8966 "",
8967 DiffHunkStatus::deleted(HasSecondaryHunk),
8968 ),
8969 (
8970 1..2,
8971 "two\n",
8972 "TWO\n",
8973 DiffHunkStatus::modified(NoSecondaryHunk),
8974 ),
8975 (
8976 3..4,
8977 "four\n",
8978 "FOUR\n",
8979 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8980 ),
8981 ],
8982 );
8983 });
8984 assert!(matches!(
8985 diff_events.next().await.unwrap(),
8986 BufferDiffEvent::HunksStagedOrUnstaged(_)
8987 ));
8988 let event = diff_events.next().await.unwrap();
8989 if let BufferDiffEvent::DiffChanged(DiffChanged {
8990 changed_range: Some(changed_range),
8991 base_text_changed_range: _,
8992 extended_range: _,
8993 }) = event
8994 {
8995 let changed_range = changed_range.to_point(&snapshot);
8996 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8997 } else {
8998 panic!("Unexpected event {event:?}");
8999 }
9000
9001 // When the write fails, the hunk returns to being unstaged.
9002 cx.run_until_parked();
9003 uncommitted_diff.update(cx, |diff, cx| {
9004 assert_hunks(
9005 diff.snapshot(cx).hunks(&snapshot),
9006 &snapshot,
9007 &diff.base_text_string(cx).unwrap(),
9008 &[
9009 (
9010 0..0,
9011 "zero\n",
9012 "",
9013 DiffHunkStatus::deleted(HasSecondaryHunk),
9014 ),
9015 (
9016 1..2,
9017 "two\n",
9018 "TWO\n",
9019 DiffHunkStatus::modified(NoSecondaryHunk),
9020 ),
9021 (
9022 3..4,
9023 "four\n",
9024 "FOUR\n",
9025 DiffHunkStatus::modified(HasSecondaryHunk),
9026 ),
9027 ],
9028 );
9029 });
9030
9031 let event = diff_events.next().await.unwrap();
9032 if let BufferDiffEvent::DiffChanged(DiffChanged {
9033 changed_range: Some(changed_range),
9034 base_text_changed_range: _,
9035 extended_range: _,
9036 }) = event
9037 {
9038 let changed_range = changed_range.to_point(&snapshot);
9039 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9040 } else {
9041 panic!("Unexpected event {event:?}");
9042 }
9043
9044 // Allow writing to the git index to succeed again.
9045 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9046
9047 // Stage two hunks with separate operations.
9048 uncommitted_diff.update(cx, |diff, cx| {
9049 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9050 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9051 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9052 });
9053
9054 // Both staged hunks appear as pending.
9055 uncommitted_diff.update(cx, |diff, cx| {
9056 assert_hunks(
9057 diff.snapshot(cx).hunks(&snapshot),
9058 &snapshot,
9059 &diff.base_text_string(cx).unwrap(),
9060 &[
9061 (
9062 0..0,
9063 "zero\n",
9064 "",
9065 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9066 ),
9067 (
9068 1..2,
9069 "two\n",
9070 "TWO\n",
9071 DiffHunkStatus::modified(NoSecondaryHunk),
9072 ),
9073 (
9074 3..4,
9075 "four\n",
9076 "FOUR\n",
9077 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9078 ),
9079 ],
9080 );
9081 });
9082
9083 // Both staging operations take effect.
9084 cx.run_until_parked();
9085 uncommitted_diff.update(cx, |diff, cx| {
9086 assert_hunks(
9087 diff.snapshot(cx).hunks(&snapshot),
9088 &snapshot,
9089 &diff.base_text_string(cx).unwrap(),
9090 &[
9091 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9092 (
9093 1..2,
9094 "two\n",
9095 "TWO\n",
9096 DiffHunkStatus::modified(NoSecondaryHunk),
9097 ),
9098 (
9099 3..4,
9100 "four\n",
9101 "FOUR\n",
9102 DiffHunkStatus::modified(NoSecondaryHunk),
9103 ),
9104 ],
9105 );
9106 });
9107}
9108
9109#[gpui::test(seeds(340, 472))]
9110async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9111 use DiffHunkSecondaryStatus::*;
9112 init_test(cx);
9113
9114 let committed_contents = r#"
9115 zero
9116 one
9117 two
9118 three
9119 four
9120 five
9121 "#
9122 .unindent();
9123 let file_contents = r#"
9124 one
9125 TWO
9126 three
9127 FOUR
9128 five
9129 "#
9130 .unindent();
9131
9132 let fs = FakeFs::new(cx.background_executor.clone());
9133 fs.insert_tree(
9134 "/dir",
9135 json!({
9136 ".git": {},
9137 "file.txt": file_contents.clone()
9138 }),
9139 )
9140 .await;
9141
9142 fs.set_head_for_repo(
9143 "/dir/.git".as_ref(),
9144 &[("file.txt", committed_contents.clone())],
9145 "deadbeef",
9146 );
9147 fs.set_index_for_repo(
9148 "/dir/.git".as_ref(),
9149 &[("file.txt", committed_contents.clone())],
9150 );
9151
9152 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9153
9154 let buffer = project
9155 .update(cx, |project, cx| {
9156 project.open_local_buffer("/dir/file.txt", cx)
9157 })
9158 .await
9159 .unwrap();
9160 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9161 let uncommitted_diff = project
9162 .update(cx, |project, cx| {
9163 project.open_uncommitted_diff(buffer.clone(), cx)
9164 })
9165 .await
9166 .unwrap();
9167
9168 // The hunks are initially unstaged.
9169 uncommitted_diff.read_with(cx, |diff, cx| {
9170 assert_hunks(
9171 diff.snapshot(cx).hunks(&snapshot),
9172 &snapshot,
9173 &diff.base_text_string(cx).unwrap(),
9174 &[
9175 (
9176 0..0,
9177 "zero\n",
9178 "",
9179 DiffHunkStatus::deleted(HasSecondaryHunk),
9180 ),
9181 (
9182 1..2,
9183 "two\n",
9184 "TWO\n",
9185 DiffHunkStatus::modified(HasSecondaryHunk),
9186 ),
9187 (
9188 3..4,
9189 "four\n",
9190 "FOUR\n",
9191 DiffHunkStatus::modified(HasSecondaryHunk),
9192 ),
9193 ],
9194 );
9195 });
9196
9197 // Pause IO events
9198 fs.pause_events();
9199
9200 // Stage the first hunk.
9201 uncommitted_diff.update(cx, |diff, cx| {
9202 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9203 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9204 assert_hunks(
9205 diff.snapshot(cx).hunks(&snapshot),
9206 &snapshot,
9207 &diff.base_text_string(cx).unwrap(),
9208 &[
9209 (
9210 0..0,
9211 "zero\n",
9212 "",
9213 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9214 ),
9215 (
9216 1..2,
9217 "two\n",
9218 "TWO\n",
9219 DiffHunkStatus::modified(HasSecondaryHunk),
9220 ),
9221 (
9222 3..4,
9223 "four\n",
9224 "FOUR\n",
9225 DiffHunkStatus::modified(HasSecondaryHunk),
9226 ),
9227 ],
9228 );
9229 });
9230
9231 // Stage the second hunk *before* receiving the FS event for the first hunk.
9232 cx.run_until_parked();
9233 uncommitted_diff.update(cx, |diff, cx| {
9234 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9235 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9236 assert_hunks(
9237 diff.snapshot(cx).hunks(&snapshot),
9238 &snapshot,
9239 &diff.base_text_string(cx).unwrap(),
9240 &[
9241 (
9242 0..0,
9243 "zero\n",
9244 "",
9245 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9246 ),
9247 (
9248 1..2,
9249 "two\n",
9250 "TWO\n",
9251 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9252 ),
9253 (
9254 3..4,
9255 "four\n",
9256 "FOUR\n",
9257 DiffHunkStatus::modified(HasSecondaryHunk),
9258 ),
9259 ],
9260 );
9261 });
9262
9263 // Process the FS event for staging the first hunk (second event is still pending).
9264 fs.flush_events(1);
9265 cx.run_until_parked();
9266
9267 // Stage the third hunk before receiving the second FS event.
9268 uncommitted_diff.update(cx, |diff, cx| {
9269 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9270 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9271 });
9272
9273 // Wait for all remaining IO.
9274 cx.run_until_parked();
9275 fs.flush_events(fs.buffered_event_count());
9276
9277 // Now all hunks are staged.
9278 cx.run_until_parked();
9279 uncommitted_diff.update(cx, |diff, cx| {
9280 assert_hunks(
9281 diff.snapshot(cx).hunks(&snapshot),
9282 &snapshot,
9283 &diff.base_text_string(cx).unwrap(),
9284 &[
9285 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9286 (
9287 1..2,
9288 "two\n",
9289 "TWO\n",
9290 DiffHunkStatus::modified(NoSecondaryHunk),
9291 ),
9292 (
9293 3..4,
9294 "four\n",
9295 "FOUR\n",
9296 DiffHunkStatus::modified(NoSecondaryHunk),
9297 ),
9298 ],
9299 );
9300 });
9301}
9302
9303#[gpui::test(iterations = 25)]
9304async fn test_staging_random_hunks(
9305 mut rng: StdRng,
9306 _executor: BackgroundExecutor,
9307 cx: &mut gpui::TestAppContext,
9308) {
9309 let operations = env::var("OPERATIONS")
9310 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9311 .unwrap_or(20);
9312
9313 use DiffHunkSecondaryStatus::*;
9314 init_test(cx);
9315
9316 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9317 let index_text = committed_text.clone();
9318 let buffer_text = (0..30)
9319 .map(|i| match i % 5 {
9320 0 => format!("line {i} (modified)\n"),
9321 _ => format!("line {i}\n"),
9322 })
9323 .collect::<String>();
9324
9325 let fs = FakeFs::new(cx.background_executor.clone());
9326 fs.insert_tree(
9327 path!("/dir"),
9328 json!({
9329 ".git": {},
9330 "file.txt": buffer_text.clone()
9331 }),
9332 )
9333 .await;
9334 fs.set_head_for_repo(
9335 path!("/dir/.git").as_ref(),
9336 &[("file.txt", committed_text.clone())],
9337 "deadbeef",
9338 );
9339 fs.set_index_for_repo(
9340 path!("/dir/.git").as_ref(),
9341 &[("file.txt", index_text.clone())],
9342 );
9343 let repo = fs
9344 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9345 .unwrap();
9346
9347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9348 let buffer = project
9349 .update(cx, |project, cx| {
9350 project.open_local_buffer(path!("/dir/file.txt"), cx)
9351 })
9352 .await
9353 .unwrap();
9354 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9355 let uncommitted_diff = project
9356 .update(cx, |project, cx| {
9357 project.open_uncommitted_diff(buffer.clone(), cx)
9358 })
9359 .await
9360 .unwrap();
9361
9362 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9363 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9364 });
9365 assert_eq!(hunks.len(), 6);
9366
9367 for _i in 0..operations {
9368 let hunk_ix = rng.random_range(0..hunks.len());
9369 let hunk = &mut hunks[hunk_ix];
9370 let row = hunk.range.start.row;
9371
9372 if hunk.status().has_secondary_hunk() {
9373 log::info!("staging hunk at {row}");
9374 uncommitted_diff.update(cx, |diff, cx| {
9375 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9376 });
9377 hunk.secondary_status = SecondaryHunkRemovalPending;
9378 } else {
9379 log::info!("unstaging hunk at {row}");
9380 uncommitted_diff.update(cx, |diff, cx| {
9381 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9382 });
9383 hunk.secondary_status = SecondaryHunkAdditionPending;
9384 }
9385
9386 for _ in 0..rng.random_range(0..10) {
9387 log::info!("yielding");
9388 cx.executor().simulate_random_delay().await;
9389 }
9390 }
9391
9392 cx.executor().run_until_parked();
9393
9394 for hunk in &mut hunks {
9395 if hunk.secondary_status == SecondaryHunkRemovalPending {
9396 hunk.secondary_status = NoSecondaryHunk;
9397 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9398 hunk.secondary_status = HasSecondaryHunk;
9399 }
9400 }
9401
9402 log::info!(
9403 "index text:\n{}",
9404 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9405 .await
9406 .unwrap()
9407 );
9408
9409 uncommitted_diff.update(cx, |diff, cx| {
9410 let expected_hunks = hunks
9411 .iter()
9412 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9413 .collect::<Vec<_>>();
9414 let actual_hunks = diff
9415 .snapshot(cx)
9416 .hunks(&snapshot)
9417 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9418 .collect::<Vec<_>>();
9419 assert_eq!(actual_hunks, expected_hunks);
9420 });
9421}
9422
9423#[gpui::test]
9424async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9425 init_test(cx);
9426
9427 let committed_contents = r#"
9428 fn main() {
9429 println!("hello from HEAD");
9430 }
9431 "#
9432 .unindent();
9433 let file_contents = r#"
9434 fn main() {
9435 println!("hello from the working copy");
9436 }
9437 "#
9438 .unindent();
9439
9440 let fs = FakeFs::new(cx.background_executor.clone());
9441 fs.insert_tree(
9442 "/dir",
9443 json!({
9444 ".git": {},
9445 "src": {
9446 "main.rs": file_contents,
9447 }
9448 }),
9449 )
9450 .await;
9451
9452 fs.set_head_for_repo(
9453 Path::new("/dir/.git"),
9454 &[("src/main.rs", committed_contents.clone())],
9455 "deadbeef",
9456 );
9457 fs.set_index_for_repo(
9458 Path::new("/dir/.git"),
9459 &[("src/main.rs", committed_contents.clone())],
9460 );
9461
9462 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9463
9464 let buffer = project
9465 .update(cx, |project, cx| {
9466 project.open_local_buffer("/dir/src/main.rs", cx)
9467 })
9468 .await
9469 .unwrap();
9470 let uncommitted_diff = project
9471 .update(cx, |project, cx| {
9472 project.open_uncommitted_diff(buffer.clone(), cx)
9473 })
9474 .await
9475 .unwrap();
9476
9477 cx.run_until_parked();
9478 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9479 let snapshot = buffer.read(cx).snapshot();
9480 assert_hunks(
9481 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9482 &snapshot,
9483 &uncommitted_diff.base_text_string(cx).unwrap(),
9484 &[(
9485 1..2,
9486 " println!(\"hello from HEAD\");\n",
9487 " println!(\"hello from the working copy\");\n",
9488 DiffHunkStatus {
9489 kind: DiffHunkStatusKind::Modified,
9490 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9491 },
9492 )],
9493 );
9494 });
9495}
9496
9497// TODO: Should we test this on Windows also?
9498#[gpui::test]
9499#[cfg(not(windows))]
9500async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9501 use std::os::unix::fs::PermissionsExt;
9502 init_test(cx);
9503 cx.executor().allow_parking();
9504 let committed_contents = "bar\n";
9505 let file_contents = "baz\n";
9506 let root = TempTree::new(json!({
9507 "project": {
9508 "foo": committed_contents
9509 },
9510 }));
9511
9512 let work_dir = root.path().join("project");
9513 let file_path = work_dir.join("foo");
9514 let repo = git_init(work_dir.as_path());
9515 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9516 perms.set_mode(0o755);
9517 std::fs::set_permissions(&file_path, perms).unwrap();
9518 git_add("foo", &repo);
9519 git_commit("Initial commit", &repo);
9520 std::fs::write(&file_path, file_contents).unwrap();
9521
9522 let project = Project::test(
9523 Arc::new(RealFs::new(None, cx.executor())),
9524 [root.path()],
9525 cx,
9526 )
9527 .await;
9528
9529 let buffer = project
9530 .update(cx, |project, cx| {
9531 project.open_local_buffer(file_path.as_path(), cx)
9532 })
9533 .await
9534 .unwrap();
9535
9536 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9537
9538 let uncommitted_diff = project
9539 .update(cx, |project, cx| {
9540 project.open_uncommitted_diff(buffer.clone(), cx)
9541 })
9542 .await
9543 .unwrap();
9544
9545 uncommitted_diff.update(cx, |diff, cx| {
9546 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9547 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9548 });
9549
9550 cx.run_until_parked();
9551
9552 let output = smol::process::Command::new("git")
9553 .current_dir(&work_dir)
9554 .args(["diff", "--staged"])
9555 .output()
9556 .await
9557 .unwrap();
9558
9559 let staged_diff = String::from_utf8_lossy(&output.stdout);
9560
9561 assert!(
9562 !staged_diff.contains("new mode 100644"),
9563 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9564 staged_diff
9565 );
9566
9567 let output = smol::process::Command::new("git")
9568 .current_dir(&work_dir)
9569 .args(["ls-files", "-s"])
9570 .output()
9571 .await
9572 .unwrap();
9573 let index_contents = String::from_utf8_lossy(&output.stdout);
9574
9575 assert!(
9576 index_contents.contains("100755"),
9577 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9578 index_contents
9579 );
9580}
9581
9582#[gpui::test]
9583async fn test_repository_and_path_for_project_path(
9584 background_executor: BackgroundExecutor,
9585 cx: &mut gpui::TestAppContext,
9586) {
9587 init_test(cx);
9588 let fs = FakeFs::new(background_executor);
9589 fs.insert_tree(
9590 path!("/root"),
9591 json!({
9592 "c.txt": "",
9593 "dir1": {
9594 ".git": {},
9595 "deps": {
9596 "dep1": {
9597 ".git": {},
9598 "src": {
9599 "a.txt": ""
9600 }
9601 }
9602 },
9603 "src": {
9604 "b.txt": ""
9605 }
9606 },
9607 }),
9608 )
9609 .await;
9610
9611 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9612 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9613 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9614 project
9615 .update(cx, |project, cx| project.git_scans_complete(cx))
9616 .await;
9617 cx.run_until_parked();
9618
9619 project.read_with(cx, |project, cx| {
9620 let git_store = project.git_store().read(cx);
9621 let pairs = [
9622 ("c.txt", None),
9623 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9624 (
9625 "dir1/deps/dep1/src/a.txt",
9626 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9627 ),
9628 ];
9629 let expected = pairs
9630 .iter()
9631 .map(|(path, result)| {
9632 (
9633 path,
9634 result.map(|(repo, repo_path)| {
9635 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9636 }),
9637 )
9638 })
9639 .collect::<Vec<_>>();
9640 let actual = pairs
9641 .iter()
9642 .map(|(path, _)| {
9643 let project_path = (tree_id, rel_path(path)).into();
9644 let result = maybe!({
9645 let (repo, repo_path) =
9646 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9647 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9648 });
9649 (path, result)
9650 })
9651 .collect::<Vec<_>>();
9652 pretty_assertions::assert_eq!(expected, actual);
9653 });
9654
9655 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9656 .await
9657 .unwrap();
9658 cx.run_until_parked();
9659
9660 project.read_with(cx, |project, cx| {
9661 let git_store = project.git_store().read(cx);
9662 assert_eq!(
9663 git_store.repository_and_path_for_project_path(
9664 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9665 cx
9666 ),
9667 None
9668 );
9669 });
9670}
9671
9672#[gpui::test]
9673async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9674 init_test(cx);
9675 let fs = FakeFs::new(cx.background_executor.clone());
9676 let home = paths::home_dir();
9677 fs.insert_tree(
9678 home,
9679 json!({
9680 ".git": {},
9681 "project": {
9682 "a.txt": "A"
9683 },
9684 }),
9685 )
9686 .await;
9687
9688 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9689 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9690 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9691
9692 project
9693 .update(cx, |project, cx| project.git_scans_complete(cx))
9694 .await;
9695 tree.flush_fs_events(cx).await;
9696
9697 project.read_with(cx, |project, cx| {
9698 let containing = project
9699 .git_store()
9700 .read(cx)
9701 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9702 assert!(containing.is_none());
9703 });
9704
9705 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9706 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9707 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9708 project
9709 .update(cx, |project, cx| project.git_scans_complete(cx))
9710 .await;
9711 tree.flush_fs_events(cx).await;
9712
9713 project.read_with(cx, |project, cx| {
9714 let containing = project
9715 .git_store()
9716 .read(cx)
9717 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9718 assert_eq!(
9719 containing
9720 .unwrap()
9721 .0
9722 .read(cx)
9723 .work_directory_abs_path
9724 .as_ref(),
9725 home,
9726 );
9727 });
9728}
9729
9730#[gpui::test]
9731async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9732 init_test(cx);
9733 cx.executor().allow_parking();
9734
9735 let root = TempTree::new(json!({
9736 "project": {
9737 "a.txt": "a", // Modified
9738 "b.txt": "bb", // Added
9739 "c.txt": "ccc", // Unchanged
9740 "d.txt": "dddd", // Deleted
9741 },
9742 }));
9743
9744 // Set up git repository before creating the project.
9745 let work_dir = root.path().join("project");
9746 let repo = git_init(work_dir.as_path());
9747 git_add("a.txt", &repo);
9748 git_add("c.txt", &repo);
9749 git_add("d.txt", &repo);
9750 git_commit("Initial commit", &repo);
9751 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9752 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9753
9754 let project = Project::test(
9755 Arc::new(RealFs::new(None, cx.executor())),
9756 [root.path()],
9757 cx,
9758 )
9759 .await;
9760
9761 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9762 tree.flush_fs_events(cx).await;
9763 project
9764 .update(cx, |project, cx| project.git_scans_complete(cx))
9765 .await;
9766 cx.executor().run_until_parked();
9767
9768 let repository = project.read_with(cx, |project, cx| {
9769 project.repositories(cx).values().next().unwrap().clone()
9770 });
9771
9772 // Check that the right git state is observed on startup
9773 repository.read_with(cx, |repository, _| {
9774 let entries = repository.cached_status().collect::<Vec<_>>();
9775 assert_eq!(
9776 entries,
9777 [
9778 StatusEntry {
9779 repo_path: repo_path("a.txt"),
9780 status: StatusCode::Modified.worktree(),
9781 diff_stat: Some(DiffStat {
9782 added: 1,
9783 deleted: 1,
9784 }),
9785 },
9786 StatusEntry {
9787 repo_path: repo_path("b.txt"),
9788 status: FileStatus::Untracked,
9789 diff_stat: None,
9790 },
9791 StatusEntry {
9792 repo_path: repo_path("d.txt"),
9793 status: StatusCode::Deleted.worktree(),
9794 diff_stat: Some(DiffStat {
9795 added: 0,
9796 deleted: 1,
9797 }),
9798 },
9799 ]
9800 );
9801 });
9802
9803 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9804
9805 tree.flush_fs_events(cx).await;
9806 project
9807 .update(cx, |project, cx| project.git_scans_complete(cx))
9808 .await;
9809 cx.executor().run_until_parked();
9810
9811 repository.read_with(cx, |repository, _| {
9812 let entries = repository.cached_status().collect::<Vec<_>>();
9813 assert_eq!(
9814 entries,
9815 [
9816 StatusEntry {
9817 repo_path: repo_path("a.txt"),
9818 status: StatusCode::Modified.worktree(),
9819 diff_stat: Some(DiffStat {
9820 added: 1,
9821 deleted: 1,
9822 }),
9823 },
9824 StatusEntry {
9825 repo_path: repo_path("b.txt"),
9826 status: FileStatus::Untracked,
9827 diff_stat: None,
9828 },
9829 StatusEntry {
9830 repo_path: repo_path("c.txt"),
9831 status: StatusCode::Modified.worktree(),
9832 diff_stat: Some(DiffStat {
9833 added: 1,
9834 deleted: 1,
9835 }),
9836 },
9837 StatusEntry {
9838 repo_path: repo_path("d.txt"),
9839 status: StatusCode::Deleted.worktree(),
9840 diff_stat: Some(DiffStat {
9841 added: 0,
9842 deleted: 1,
9843 }),
9844 },
9845 ]
9846 );
9847 });
9848
9849 git_add("a.txt", &repo);
9850 git_add("c.txt", &repo);
9851 git_remove_index(Path::new("d.txt"), &repo);
9852 git_commit("Another commit", &repo);
9853 tree.flush_fs_events(cx).await;
9854 project
9855 .update(cx, |project, cx| project.git_scans_complete(cx))
9856 .await;
9857 cx.executor().run_until_parked();
9858
9859 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9860 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9861 tree.flush_fs_events(cx).await;
9862 project
9863 .update(cx, |project, cx| project.git_scans_complete(cx))
9864 .await;
9865 cx.executor().run_until_parked();
9866
9867 repository.read_with(cx, |repository, _cx| {
9868 let entries = repository.cached_status().collect::<Vec<_>>();
9869
9870 // Deleting an untracked entry, b.txt, should leave no status
9871 // a.txt was tracked, and so should have a status
9872 assert_eq!(
9873 entries,
9874 [StatusEntry {
9875 repo_path: repo_path("a.txt"),
9876 status: StatusCode::Deleted.worktree(),
9877 diff_stat: Some(DiffStat {
9878 added: 0,
9879 deleted: 1,
9880 }),
9881 }]
9882 );
9883 });
9884}
9885
9886#[gpui::test]
9887#[ignore]
9888async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9889 init_test(cx);
9890 cx.executor().allow_parking();
9891
9892 let root = TempTree::new(json!({
9893 "project": {
9894 "sub": {},
9895 "a.txt": "",
9896 },
9897 }));
9898
9899 let work_dir = root.path().join("project");
9900 let repo = git_init(work_dir.as_path());
9901 // a.txt exists in HEAD and the working copy but is deleted in the index.
9902 git_add("a.txt", &repo);
9903 git_commit("Initial commit", &repo);
9904 git_remove_index("a.txt".as_ref(), &repo);
9905 // `sub` is a nested git repository.
9906 let _sub = git_init(&work_dir.join("sub"));
9907
9908 let project = Project::test(
9909 Arc::new(RealFs::new(None, cx.executor())),
9910 [root.path()],
9911 cx,
9912 )
9913 .await;
9914
9915 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9916 tree.flush_fs_events(cx).await;
9917 project
9918 .update(cx, |project, cx| project.git_scans_complete(cx))
9919 .await;
9920 cx.executor().run_until_parked();
9921
9922 let repository = project.read_with(cx, |project, cx| {
9923 project
9924 .repositories(cx)
9925 .values()
9926 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9927 .unwrap()
9928 .clone()
9929 });
9930
9931 repository.read_with(cx, |repository, _cx| {
9932 let entries = repository.cached_status().collect::<Vec<_>>();
9933
9934 // `sub` doesn't appear in our computed statuses.
9935 // a.txt appears with a combined `DA` status.
9936 assert_eq!(
9937 entries,
9938 [StatusEntry {
9939 repo_path: repo_path("a.txt"),
9940 status: TrackedStatus {
9941 index_status: StatusCode::Deleted,
9942 worktree_status: StatusCode::Added
9943 }
9944 .into(),
9945 diff_stat: None,
9946 }]
9947 )
9948 });
9949}
9950
9951#[track_caller]
9952/// We merge lhs into rhs.
9953fn merge_pending_ops_snapshots(
9954 source: Vec<pending_op::PendingOps>,
9955 mut target: Vec<pending_op::PendingOps>,
9956) -> Vec<pending_op::PendingOps> {
9957 for s_ops in source {
9958 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9959 if ops.repo_path == s_ops.repo_path {
9960 Some(idx)
9961 } else {
9962 None
9963 }
9964 }) {
9965 let t_ops = &mut target[idx];
9966 for s_op in s_ops.ops {
9967 if let Some(op_idx) = t_ops
9968 .ops
9969 .iter()
9970 .zip(0..)
9971 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9972 {
9973 let t_op = &mut t_ops.ops[op_idx];
9974 match (s_op.job_status, t_op.job_status) {
9975 (pending_op::JobStatus::Running, _) => {}
9976 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9977 (s_st, t_st) if s_st == t_st => {}
9978 _ => unreachable!(),
9979 }
9980 } else {
9981 t_ops.ops.push(s_op);
9982 }
9983 }
9984 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9985 } else {
9986 target.push(s_ops);
9987 }
9988 }
9989 target
9990}
9991
9992#[gpui::test]
9993async fn test_repository_pending_ops_staging(
9994 executor: gpui::BackgroundExecutor,
9995 cx: &mut gpui::TestAppContext,
9996) {
9997 init_test(cx);
9998
9999 let fs = FakeFs::new(executor);
10000 fs.insert_tree(
10001 path!("/root"),
10002 json!({
10003 "my-repo": {
10004 ".git": {},
10005 "a.txt": "a",
10006 }
10007
10008 }),
10009 )
10010 .await;
10011
10012 fs.set_status_for_repo(
10013 path!("/root/my-repo/.git").as_ref(),
10014 &[("a.txt", FileStatus::Untracked)],
10015 );
10016
10017 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10018 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10019 project.update(cx, |project, cx| {
10020 let pending_ops_all = pending_ops_all.clone();
10021 cx.subscribe(project.git_store(), move |_, _, e, _| {
10022 if let GitStoreEvent::RepositoryUpdated(
10023 _,
10024 RepositoryEvent::PendingOpsChanged { pending_ops },
10025 _,
10026 ) = e
10027 {
10028 let merged = merge_pending_ops_snapshots(
10029 pending_ops.items(()),
10030 pending_ops_all.lock().items(()),
10031 );
10032 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10033 }
10034 })
10035 .detach();
10036 });
10037 project
10038 .update(cx, |project, cx| project.git_scans_complete(cx))
10039 .await;
10040
10041 let repo = project.read_with(cx, |project, cx| {
10042 project.repositories(cx).values().next().unwrap().clone()
10043 });
10044
10045 // Ensure we have no pending ops for any of the untracked files
10046 repo.read_with(cx, |repo, _cx| {
10047 assert!(repo.pending_ops().next().is_none());
10048 });
10049
10050 let mut id = 1u16;
10051
10052 let mut assert_stage = async |path: RepoPath, stage| {
10053 let git_status = if stage {
10054 pending_op::GitStatus::Staged
10055 } else {
10056 pending_op::GitStatus::Unstaged
10057 };
10058 repo.update(cx, |repo, cx| {
10059 let task = if stage {
10060 repo.stage_entries(vec![path.clone()], cx)
10061 } else {
10062 repo.unstage_entries(vec![path.clone()], cx)
10063 };
10064 let ops = repo.pending_ops_for_path(&path).unwrap();
10065 assert_eq!(
10066 ops.ops.last(),
10067 Some(&pending_op::PendingOp {
10068 id: id.into(),
10069 git_status,
10070 job_status: pending_op::JobStatus::Running
10071 })
10072 );
10073 task
10074 })
10075 .await
10076 .unwrap();
10077
10078 repo.read_with(cx, |repo, _cx| {
10079 let ops = repo.pending_ops_for_path(&path).unwrap();
10080 assert_eq!(
10081 ops.ops.last(),
10082 Some(&pending_op::PendingOp {
10083 id: id.into(),
10084 git_status,
10085 job_status: pending_op::JobStatus::Finished
10086 })
10087 );
10088 });
10089
10090 id += 1;
10091 };
10092
10093 assert_stage(repo_path("a.txt"), true).await;
10094 assert_stage(repo_path("a.txt"), false).await;
10095 assert_stage(repo_path("a.txt"), true).await;
10096 assert_stage(repo_path("a.txt"), false).await;
10097 assert_stage(repo_path("a.txt"), true).await;
10098
10099 cx.run_until_parked();
10100
10101 assert_eq!(
10102 pending_ops_all
10103 .lock()
10104 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10105 .unwrap()
10106 .ops,
10107 vec![
10108 pending_op::PendingOp {
10109 id: 1u16.into(),
10110 git_status: pending_op::GitStatus::Staged,
10111 job_status: pending_op::JobStatus::Finished
10112 },
10113 pending_op::PendingOp {
10114 id: 2u16.into(),
10115 git_status: pending_op::GitStatus::Unstaged,
10116 job_status: pending_op::JobStatus::Finished
10117 },
10118 pending_op::PendingOp {
10119 id: 3u16.into(),
10120 git_status: pending_op::GitStatus::Staged,
10121 job_status: pending_op::JobStatus::Finished
10122 },
10123 pending_op::PendingOp {
10124 id: 4u16.into(),
10125 git_status: pending_op::GitStatus::Unstaged,
10126 job_status: pending_op::JobStatus::Finished
10127 },
10128 pending_op::PendingOp {
10129 id: 5u16.into(),
10130 git_status: pending_op::GitStatus::Staged,
10131 job_status: pending_op::JobStatus::Finished
10132 }
10133 ],
10134 );
10135
10136 repo.update(cx, |repo, _cx| {
10137 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10138
10139 assert_eq!(
10140 git_statuses,
10141 [StatusEntry {
10142 repo_path: repo_path("a.txt"),
10143 status: TrackedStatus {
10144 index_status: StatusCode::Added,
10145 worktree_status: StatusCode::Unmodified
10146 }
10147 .into(),
10148 diff_stat: Some(DiffStat {
10149 added: 1,
10150 deleted: 0,
10151 }),
10152 }]
10153 );
10154 });
10155}
10156
10157#[gpui::test]
10158async fn test_repository_pending_ops_long_running_staging(
10159 executor: gpui::BackgroundExecutor,
10160 cx: &mut gpui::TestAppContext,
10161) {
10162 init_test(cx);
10163
10164 let fs = FakeFs::new(executor);
10165 fs.insert_tree(
10166 path!("/root"),
10167 json!({
10168 "my-repo": {
10169 ".git": {},
10170 "a.txt": "a",
10171 }
10172
10173 }),
10174 )
10175 .await;
10176
10177 fs.set_status_for_repo(
10178 path!("/root/my-repo/.git").as_ref(),
10179 &[("a.txt", FileStatus::Untracked)],
10180 );
10181
10182 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10183 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10184 project.update(cx, |project, cx| {
10185 let pending_ops_all = pending_ops_all.clone();
10186 cx.subscribe(project.git_store(), move |_, _, e, _| {
10187 if let GitStoreEvent::RepositoryUpdated(
10188 _,
10189 RepositoryEvent::PendingOpsChanged { pending_ops },
10190 _,
10191 ) = e
10192 {
10193 let merged = merge_pending_ops_snapshots(
10194 pending_ops.items(()),
10195 pending_ops_all.lock().items(()),
10196 );
10197 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10198 }
10199 })
10200 .detach();
10201 });
10202
10203 project
10204 .update(cx, |project, cx| project.git_scans_complete(cx))
10205 .await;
10206
10207 let repo = project.read_with(cx, |project, cx| {
10208 project.repositories(cx).values().next().unwrap().clone()
10209 });
10210
10211 repo.update(cx, |repo, cx| {
10212 repo.stage_entries(vec![repo_path("a.txt")], cx)
10213 })
10214 .detach();
10215
10216 repo.update(cx, |repo, cx| {
10217 repo.stage_entries(vec![repo_path("a.txt")], cx)
10218 })
10219 .unwrap()
10220 .with_timeout(Duration::from_secs(1), &cx.executor())
10221 .await
10222 .unwrap();
10223
10224 cx.run_until_parked();
10225
10226 assert_eq!(
10227 pending_ops_all
10228 .lock()
10229 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10230 .unwrap()
10231 .ops,
10232 vec![
10233 pending_op::PendingOp {
10234 id: 1u16.into(),
10235 git_status: pending_op::GitStatus::Staged,
10236 job_status: pending_op::JobStatus::Skipped
10237 },
10238 pending_op::PendingOp {
10239 id: 2u16.into(),
10240 git_status: pending_op::GitStatus::Staged,
10241 job_status: pending_op::JobStatus::Finished
10242 }
10243 ],
10244 );
10245
10246 repo.update(cx, |repo, _cx| {
10247 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10248
10249 assert_eq!(
10250 git_statuses,
10251 [StatusEntry {
10252 repo_path: repo_path("a.txt"),
10253 status: TrackedStatus {
10254 index_status: StatusCode::Added,
10255 worktree_status: StatusCode::Unmodified
10256 }
10257 .into(),
10258 diff_stat: Some(DiffStat {
10259 added: 1,
10260 deleted: 0,
10261 }),
10262 }]
10263 );
10264 });
10265}
10266
10267#[gpui::test]
10268async fn test_repository_pending_ops_stage_all(
10269 executor: gpui::BackgroundExecutor,
10270 cx: &mut gpui::TestAppContext,
10271) {
10272 init_test(cx);
10273
10274 let fs = FakeFs::new(executor);
10275 fs.insert_tree(
10276 path!("/root"),
10277 json!({
10278 "my-repo": {
10279 ".git": {},
10280 "a.txt": "a",
10281 "b.txt": "b"
10282 }
10283
10284 }),
10285 )
10286 .await;
10287
10288 fs.set_status_for_repo(
10289 path!("/root/my-repo/.git").as_ref(),
10290 &[
10291 ("a.txt", FileStatus::Untracked),
10292 ("b.txt", FileStatus::Untracked),
10293 ],
10294 );
10295
10296 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10297 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10298 project.update(cx, |project, cx| {
10299 let pending_ops_all = pending_ops_all.clone();
10300 cx.subscribe(project.git_store(), move |_, _, e, _| {
10301 if let GitStoreEvent::RepositoryUpdated(
10302 _,
10303 RepositoryEvent::PendingOpsChanged { pending_ops },
10304 _,
10305 ) = e
10306 {
10307 let merged = merge_pending_ops_snapshots(
10308 pending_ops.items(()),
10309 pending_ops_all.lock().items(()),
10310 );
10311 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10312 }
10313 })
10314 .detach();
10315 });
10316 project
10317 .update(cx, |project, cx| project.git_scans_complete(cx))
10318 .await;
10319
10320 let repo = project.read_with(cx, |project, cx| {
10321 project.repositories(cx).values().next().unwrap().clone()
10322 });
10323
10324 repo.update(cx, |repo, cx| {
10325 repo.stage_entries(vec![repo_path("a.txt")], cx)
10326 })
10327 .await
10328 .unwrap();
10329 repo.update(cx, |repo, cx| repo.stage_all(cx))
10330 .await
10331 .unwrap();
10332 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10333 .await
10334 .unwrap();
10335
10336 cx.run_until_parked();
10337
10338 assert_eq!(
10339 pending_ops_all
10340 .lock()
10341 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10342 .unwrap()
10343 .ops,
10344 vec![
10345 pending_op::PendingOp {
10346 id: 1u16.into(),
10347 git_status: pending_op::GitStatus::Staged,
10348 job_status: pending_op::JobStatus::Finished
10349 },
10350 pending_op::PendingOp {
10351 id: 2u16.into(),
10352 git_status: pending_op::GitStatus::Unstaged,
10353 job_status: pending_op::JobStatus::Finished
10354 },
10355 ],
10356 );
10357 assert_eq!(
10358 pending_ops_all
10359 .lock()
10360 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10361 .unwrap()
10362 .ops,
10363 vec![
10364 pending_op::PendingOp {
10365 id: 1u16.into(),
10366 git_status: pending_op::GitStatus::Staged,
10367 job_status: pending_op::JobStatus::Finished
10368 },
10369 pending_op::PendingOp {
10370 id: 2u16.into(),
10371 git_status: pending_op::GitStatus::Unstaged,
10372 job_status: pending_op::JobStatus::Finished
10373 },
10374 ],
10375 );
10376
10377 repo.update(cx, |repo, _cx| {
10378 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10379
10380 assert_eq!(
10381 git_statuses,
10382 [
10383 StatusEntry {
10384 repo_path: repo_path("a.txt"),
10385 status: FileStatus::Untracked,
10386 diff_stat: None,
10387 },
10388 StatusEntry {
10389 repo_path: repo_path("b.txt"),
10390 status: FileStatus::Untracked,
10391 diff_stat: None,
10392 },
10393 ]
10394 );
10395 });
10396}
10397
10398#[gpui::test]
10399async fn test_repository_subfolder_git_status(
10400 executor: gpui::BackgroundExecutor,
10401 cx: &mut gpui::TestAppContext,
10402) {
10403 init_test(cx);
10404
10405 let fs = FakeFs::new(executor);
10406 fs.insert_tree(
10407 path!("/root"),
10408 json!({
10409 "my-repo": {
10410 ".git": {},
10411 "a.txt": "a",
10412 "sub-folder-1": {
10413 "sub-folder-2": {
10414 "c.txt": "cc",
10415 "d": {
10416 "e.txt": "eee"
10417 }
10418 },
10419 }
10420 },
10421 }),
10422 )
10423 .await;
10424
10425 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10426 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10427
10428 fs.set_status_for_repo(
10429 path!("/root/my-repo/.git").as_ref(),
10430 &[(E_TXT, FileStatus::Untracked)],
10431 );
10432
10433 let project = Project::test(
10434 fs.clone(),
10435 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10436 cx,
10437 )
10438 .await;
10439
10440 project
10441 .update(cx, |project, cx| project.git_scans_complete(cx))
10442 .await;
10443 cx.run_until_parked();
10444
10445 let repository = project.read_with(cx, |project, cx| {
10446 project.repositories(cx).values().next().unwrap().clone()
10447 });
10448
10449 // Ensure that the git status is loaded correctly
10450 repository.read_with(cx, |repository, _cx| {
10451 assert_eq!(
10452 repository.work_directory_abs_path,
10453 Path::new(path!("/root/my-repo")).into()
10454 );
10455
10456 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10457 assert_eq!(
10458 repository
10459 .status_for_path(&repo_path(E_TXT))
10460 .unwrap()
10461 .status,
10462 FileStatus::Untracked
10463 );
10464 });
10465
10466 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10467 project
10468 .update(cx, |project, cx| project.git_scans_complete(cx))
10469 .await;
10470 cx.run_until_parked();
10471
10472 repository.read_with(cx, |repository, _cx| {
10473 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10474 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10475 });
10476}
10477
10478// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10479#[cfg(any())]
10480#[gpui::test]
10481async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10482 init_test(cx);
10483 cx.executor().allow_parking();
10484
10485 let root = TempTree::new(json!({
10486 "project": {
10487 "a.txt": "a",
10488 },
10489 }));
10490 let root_path = root.path();
10491
10492 let repo = git_init(&root_path.join("project"));
10493 git_add("a.txt", &repo);
10494 git_commit("init", &repo);
10495
10496 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10497
10498 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10499 tree.flush_fs_events(cx).await;
10500 project
10501 .update(cx, |project, cx| project.git_scans_complete(cx))
10502 .await;
10503 cx.executor().run_until_parked();
10504
10505 let repository = project.read_with(cx, |project, cx| {
10506 project.repositories(cx).values().next().unwrap().clone()
10507 });
10508
10509 git_branch("other-branch", &repo);
10510 git_checkout("refs/heads/other-branch", &repo);
10511 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10512 git_add("a.txt", &repo);
10513 git_commit("capitalize", &repo);
10514 let commit = repo
10515 .head()
10516 .expect("Failed to get HEAD")
10517 .peel_to_commit()
10518 .expect("HEAD is not a commit");
10519 git_checkout("refs/heads/main", &repo);
10520 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10521 git_add("a.txt", &repo);
10522 git_commit("improve letter", &repo);
10523 git_cherry_pick(&commit, &repo);
10524 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10525 .expect("No CHERRY_PICK_HEAD");
10526 pretty_assertions::assert_eq!(
10527 git_status(&repo),
10528 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10529 );
10530 tree.flush_fs_events(cx).await;
10531 project
10532 .update(cx, |project, cx| project.git_scans_complete(cx))
10533 .await;
10534 cx.executor().run_until_parked();
10535 let conflicts = repository.update(cx, |repository, _| {
10536 repository
10537 .merge_conflicts
10538 .iter()
10539 .cloned()
10540 .collect::<Vec<_>>()
10541 });
10542 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10543
10544 git_add("a.txt", &repo);
10545 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10546 git_commit("whatevs", &repo);
10547 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10548 .expect("Failed to remove CHERRY_PICK_HEAD");
10549 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10550 tree.flush_fs_events(cx).await;
10551 let conflicts = repository.update(cx, |repository, _| {
10552 repository
10553 .merge_conflicts
10554 .iter()
10555 .cloned()
10556 .collect::<Vec<_>>()
10557 });
10558 pretty_assertions::assert_eq!(conflicts, []);
10559}
10560
10561#[gpui::test]
10562async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10563 init_test(cx);
10564 let fs = FakeFs::new(cx.background_executor.clone());
10565 fs.insert_tree(
10566 path!("/root"),
10567 json!({
10568 ".git": {},
10569 ".gitignore": "*.txt\n",
10570 "a.xml": "<a></a>",
10571 "b.txt": "Some text"
10572 }),
10573 )
10574 .await;
10575
10576 fs.set_head_and_index_for_repo(
10577 path!("/root/.git").as_ref(),
10578 &[
10579 (".gitignore", "*.txt\n".into()),
10580 ("a.xml", "<a></a>".into()),
10581 ],
10582 );
10583
10584 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10585
10586 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10587 tree.flush_fs_events(cx).await;
10588 project
10589 .update(cx, |project, cx| project.git_scans_complete(cx))
10590 .await;
10591 cx.executor().run_until_parked();
10592
10593 let repository = project.read_with(cx, |project, cx| {
10594 project.repositories(cx).values().next().unwrap().clone()
10595 });
10596
10597 // One file is unmodified, the other is ignored.
10598 cx.read(|cx| {
10599 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10600 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10601 });
10602
10603 // Change the gitignore, and stage the newly non-ignored file.
10604 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10605 .await
10606 .unwrap();
10607 fs.set_index_for_repo(
10608 Path::new(path!("/root/.git")),
10609 &[
10610 (".gitignore", "*.txt\n".into()),
10611 ("a.xml", "<a></a>".into()),
10612 ("b.txt", "Some text".into()),
10613 ],
10614 );
10615
10616 cx.executor().run_until_parked();
10617 cx.read(|cx| {
10618 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10619 assert_entry_git_state(
10620 tree.read(cx),
10621 repository.read(cx),
10622 "b.txt",
10623 Some(StatusCode::Added),
10624 false,
10625 );
10626 });
10627}
10628
10629// NOTE:
10630// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10631// a directory which some program has already open.
10632// This is a limitation of the Windows.
10633// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10634// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10635#[gpui::test]
10636#[cfg_attr(target_os = "windows", ignore)]
10637async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10638 init_test(cx);
10639 cx.executor().allow_parking();
10640 let root = TempTree::new(json!({
10641 "projects": {
10642 "project1": {
10643 "a": "",
10644 "b": "",
10645 }
10646 },
10647
10648 }));
10649 let root_path = root.path();
10650
10651 let repo = git_init(&root_path.join("projects/project1"));
10652 git_add("a", &repo);
10653 git_commit("init", &repo);
10654 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10655
10656 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10657
10658 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10659 tree.flush_fs_events(cx).await;
10660 project
10661 .update(cx, |project, cx| project.git_scans_complete(cx))
10662 .await;
10663 cx.executor().run_until_parked();
10664
10665 let repository = project.read_with(cx, |project, cx| {
10666 project.repositories(cx).values().next().unwrap().clone()
10667 });
10668
10669 repository.read_with(cx, |repository, _| {
10670 assert_eq!(
10671 repository.work_directory_abs_path.as_ref(),
10672 root_path.join("projects/project1").as_path()
10673 );
10674 assert_eq!(
10675 repository
10676 .status_for_path(&repo_path("a"))
10677 .map(|entry| entry.status),
10678 Some(StatusCode::Modified.worktree()),
10679 );
10680 assert_eq!(
10681 repository
10682 .status_for_path(&repo_path("b"))
10683 .map(|entry| entry.status),
10684 Some(FileStatus::Untracked),
10685 );
10686 });
10687
10688 std::fs::rename(
10689 root_path.join("projects/project1"),
10690 root_path.join("projects/project2"),
10691 )
10692 .unwrap();
10693 tree.flush_fs_events(cx).await;
10694
10695 repository.read_with(cx, |repository, _| {
10696 assert_eq!(
10697 repository.work_directory_abs_path.as_ref(),
10698 root_path.join("projects/project2").as_path()
10699 );
10700 assert_eq!(
10701 repository.status_for_path(&repo_path("a")).unwrap().status,
10702 StatusCode::Modified.worktree(),
10703 );
10704 assert_eq!(
10705 repository.status_for_path(&repo_path("b")).unwrap().status,
10706 FileStatus::Untracked,
10707 );
10708 });
10709}
10710
10711// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10712// you can't rename a directory which some program has already open. This is a
10713// limitation of the Windows. See:
10714// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10715// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10716#[gpui::test]
10717#[cfg_attr(target_os = "windows", ignore)]
10718async fn test_file_status(cx: &mut gpui::TestAppContext) {
10719 init_test(cx);
10720 cx.executor().allow_parking();
10721 const IGNORE_RULE: &str = "**/target";
10722
10723 let root = TempTree::new(json!({
10724 "project": {
10725 "a.txt": "a",
10726 "b.txt": "bb",
10727 "c": {
10728 "d": {
10729 "e.txt": "eee"
10730 }
10731 },
10732 "f.txt": "ffff",
10733 "target": {
10734 "build_file": "???"
10735 },
10736 ".gitignore": IGNORE_RULE
10737 },
10738
10739 }));
10740 let root_path = root.path();
10741
10742 const A_TXT: &str = "a.txt";
10743 const B_TXT: &str = "b.txt";
10744 const E_TXT: &str = "c/d/e.txt";
10745 const F_TXT: &str = "f.txt";
10746 const DOTGITIGNORE: &str = ".gitignore";
10747 const BUILD_FILE: &str = "target/build_file";
10748
10749 // Set up git repository before creating the worktree.
10750 let work_dir = root.path().join("project");
10751 let mut repo = git_init(work_dir.as_path());
10752 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10753 git_add(A_TXT, &repo);
10754 git_add(E_TXT, &repo);
10755 git_add(DOTGITIGNORE, &repo);
10756 git_commit("Initial commit", &repo);
10757
10758 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10759
10760 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10761 tree.flush_fs_events(cx).await;
10762 project
10763 .update(cx, |project, cx| project.git_scans_complete(cx))
10764 .await;
10765 cx.executor().run_until_parked();
10766
10767 let repository = project.read_with(cx, |project, cx| {
10768 project.repositories(cx).values().next().unwrap().clone()
10769 });
10770
10771 // Check that the right git state is observed on startup
10772 repository.read_with(cx, |repository, _cx| {
10773 assert_eq!(
10774 repository.work_directory_abs_path.as_ref(),
10775 root_path.join("project").as_path()
10776 );
10777
10778 assert_eq!(
10779 repository
10780 .status_for_path(&repo_path(B_TXT))
10781 .unwrap()
10782 .status,
10783 FileStatus::Untracked,
10784 );
10785 assert_eq!(
10786 repository
10787 .status_for_path(&repo_path(F_TXT))
10788 .unwrap()
10789 .status,
10790 FileStatus::Untracked,
10791 );
10792 });
10793
10794 // Modify a file in the working copy.
10795 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10796 tree.flush_fs_events(cx).await;
10797 project
10798 .update(cx, |project, cx| project.git_scans_complete(cx))
10799 .await;
10800 cx.executor().run_until_parked();
10801
10802 // The worktree detects that the file's git status has changed.
10803 repository.read_with(cx, |repository, _| {
10804 assert_eq!(
10805 repository
10806 .status_for_path(&repo_path(A_TXT))
10807 .unwrap()
10808 .status,
10809 StatusCode::Modified.worktree(),
10810 );
10811 });
10812
10813 // Create a commit in the git repository.
10814 git_add(A_TXT, &repo);
10815 git_add(B_TXT, &repo);
10816 git_commit("Committing modified and added", &repo);
10817 tree.flush_fs_events(cx).await;
10818 project
10819 .update(cx, |project, cx| project.git_scans_complete(cx))
10820 .await;
10821 cx.executor().run_until_parked();
10822
10823 // The worktree detects that the files' git status have changed.
10824 repository.read_with(cx, |repository, _cx| {
10825 assert_eq!(
10826 repository
10827 .status_for_path(&repo_path(F_TXT))
10828 .unwrap()
10829 .status,
10830 FileStatus::Untracked,
10831 );
10832 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10833 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10834 });
10835
10836 // Modify files in the working copy and perform git operations on other files.
10837 git_reset(0, &repo);
10838 git_remove_index(Path::new(B_TXT), &repo);
10839 git_stash(&mut repo);
10840 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10841 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10842 tree.flush_fs_events(cx).await;
10843 project
10844 .update(cx, |project, cx| project.git_scans_complete(cx))
10845 .await;
10846 cx.executor().run_until_parked();
10847
10848 // Check that more complex repo changes are tracked
10849 repository.read_with(cx, |repository, _cx| {
10850 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10851 assert_eq!(
10852 repository
10853 .status_for_path(&repo_path(B_TXT))
10854 .unwrap()
10855 .status,
10856 FileStatus::Untracked,
10857 );
10858 assert_eq!(
10859 repository
10860 .status_for_path(&repo_path(E_TXT))
10861 .unwrap()
10862 .status,
10863 StatusCode::Modified.worktree(),
10864 );
10865 });
10866
10867 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10868 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10869 std::fs::write(
10870 work_dir.join(DOTGITIGNORE),
10871 [IGNORE_RULE, "f.txt"].join("\n"),
10872 )
10873 .unwrap();
10874
10875 git_add(Path::new(DOTGITIGNORE), &repo);
10876 git_commit("Committing modified git ignore", &repo);
10877
10878 tree.flush_fs_events(cx).await;
10879 cx.executor().run_until_parked();
10880
10881 let mut renamed_dir_name = "first_directory/second_directory";
10882 const RENAMED_FILE: &str = "rf.txt";
10883
10884 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10885 std::fs::write(
10886 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10887 "new-contents",
10888 )
10889 .unwrap();
10890
10891 tree.flush_fs_events(cx).await;
10892 project
10893 .update(cx, |project, cx| project.git_scans_complete(cx))
10894 .await;
10895 cx.executor().run_until_parked();
10896
10897 repository.read_with(cx, |repository, _cx| {
10898 assert_eq!(
10899 repository
10900 .status_for_path(&RepoPath::from_rel_path(
10901 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10902 ))
10903 .unwrap()
10904 .status,
10905 FileStatus::Untracked,
10906 );
10907 });
10908
10909 renamed_dir_name = "new_first_directory/second_directory";
10910
10911 std::fs::rename(
10912 work_dir.join("first_directory"),
10913 work_dir.join("new_first_directory"),
10914 )
10915 .unwrap();
10916
10917 tree.flush_fs_events(cx).await;
10918 project
10919 .update(cx, |project, cx| project.git_scans_complete(cx))
10920 .await;
10921 cx.executor().run_until_parked();
10922
10923 repository.read_with(cx, |repository, _cx| {
10924 assert_eq!(
10925 repository
10926 .status_for_path(&RepoPath::from_rel_path(
10927 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10928 ))
10929 .unwrap()
10930 .status,
10931 FileStatus::Untracked,
10932 );
10933 });
10934}
10935
10936#[gpui::test]
10937#[ignore]
10938async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10939 init_test(cx);
10940 cx.executor().allow_parking();
10941
10942 const IGNORE_RULE: &str = "**/target";
10943
10944 let root = TempTree::new(json!({
10945 "project": {
10946 "src": {
10947 "main.rs": "fn main() {}"
10948 },
10949 "target": {
10950 "debug": {
10951 "important_text.txt": "important text",
10952 },
10953 },
10954 ".gitignore": IGNORE_RULE
10955 },
10956
10957 }));
10958 let root_path = root.path();
10959
10960 // Set up git repository before creating the worktree.
10961 let work_dir = root.path().join("project");
10962 let repo = git_init(work_dir.as_path());
10963 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10964 git_add("src/main.rs", &repo);
10965 git_add(".gitignore", &repo);
10966 git_commit("Initial commit", &repo);
10967
10968 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10969 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10970 let project_events = Arc::new(Mutex::new(Vec::new()));
10971 project.update(cx, |project, cx| {
10972 let repo_events = repository_updates.clone();
10973 cx.subscribe(project.git_store(), move |_, _, e, _| {
10974 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10975 repo_events.lock().push(e.clone());
10976 }
10977 })
10978 .detach();
10979 let project_events = project_events.clone();
10980 cx.subscribe_self(move |_, e, _| {
10981 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10982 project_events.lock().extend(
10983 updates
10984 .iter()
10985 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10986 .filter(|(path, _)| path != "fs-event-sentinel"),
10987 );
10988 }
10989 })
10990 .detach();
10991 });
10992
10993 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10994 tree.flush_fs_events(cx).await;
10995 tree.update(cx, |tree, cx| {
10996 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10997 })
10998 .await
10999 .unwrap();
11000 tree.update(cx, |tree, _| {
11001 assert_eq!(
11002 tree.entries(true, 0)
11003 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11004 .collect::<Vec<_>>(),
11005 vec![
11006 (rel_path(""), false),
11007 (rel_path("project/"), false),
11008 (rel_path("project/.gitignore"), false),
11009 (rel_path("project/src"), false),
11010 (rel_path("project/src/main.rs"), false),
11011 (rel_path("project/target"), true),
11012 (rel_path("project/target/debug"), true),
11013 (rel_path("project/target/debug/important_text.txt"), true),
11014 ]
11015 );
11016 });
11017
11018 assert_eq!(
11019 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11020 vec![RepositoryEvent::StatusesChanged,],
11021 "Initial worktree scan should produce a repo update event"
11022 );
11023 assert_eq!(
11024 project_events.lock().drain(..).collect::<Vec<_>>(),
11025 vec![
11026 ("project/target".to_string(), PathChange::Loaded),
11027 ("project/target/debug".to_string(), PathChange::Loaded),
11028 (
11029 "project/target/debug/important_text.txt".to_string(),
11030 PathChange::Loaded
11031 ),
11032 ],
11033 "Initial project changes should show that all not-ignored and all opened files are loaded"
11034 );
11035
11036 let deps_dir = work_dir.join("target").join("debug").join("deps");
11037 std::fs::create_dir_all(&deps_dir).unwrap();
11038 tree.flush_fs_events(cx).await;
11039 project
11040 .update(cx, |project, cx| project.git_scans_complete(cx))
11041 .await;
11042 cx.executor().run_until_parked();
11043 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11044 tree.flush_fs_events(cx).await;
11045 project
11046 .update(cx, |project, cx| project.git_scans_complete(cx))
11047 .await;
11048 cx.executor().run_until_parked();
11049 std::fs::remove_dir_all(&deps_dir).unwrap();
11050 tree.flush_fs_events(cx).await;
11051 project
11052 .update(cx, |project, cx| project.git_scans_complete(cx))
11053 .await;
11054 cx.executor().run_until_parked();
11055
11056 tree.update(cx, |tree, _| {
11057 assert_eq!(
11058 tree.entries(true, 0)
11059 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11060 .collect::<Vec<_>>(),
11061 vec![
11062 (rel_path(""), false),
11063 (rel_path("project/"), false),
11064 (rel_path("project/.gitignore"), false),
11065 (rel_path("project/src"), false),
11066 (rel_path("project/src/main.rs"), false),
11067 (rel_path("project/target"), true),
11068 (rel_path("project/target/debug"), true),
11069 (rel_path("project/target/debug/important_text.txt"), true),
11070 ],
11071 "No stray temp files should be left after the flycheck changes"
11072 );
11073 });
11074
11075 assert_eq!(
11076 repository_updates
11077 .lock()
11078 .iter()
11079 .cloned()
11080 .collect::<Vec<_>>(),
11081 Vec::new(),
11082 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11083 );
11084 assert_eq!(
11085 project_events.lock().as_slice(),
11086 vec![
11087 ("project/target/debug/deps".to_string(), PathChange::Added),
11088 ("project/target/debug/deps".to_string(), PathChange::Removed),
11089 ],
11090 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11091 No updates for more nested directories should happen as those are ignored",
11092 );
11093}
11094
11095// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11096// to different timings/ordering of events.
11097#[ignore]
11098#[gpui::test]
11099async fn test_odd_events_for_ignored_dirs(
11100 executor: BackgroundExecutor,
11101 cx: &mut gpui::TestAppContext,
11102) {
11103 init_test(cx);
11104 let fs = FakeFs::new(executor);
11105 fs.insert_tree(
11106 path!("/root"),
11107 json!({
11108 ".git": {},
11109 ".gitignore": "**/target/",
11110 "src": {
11111 "main.rs": "fn main() {}",
11112 },
11113 "target": {
11114 "debug": {
11115 "foo.txt": "foo",
11116 "deps": {}
11117 }
11118 }
11119 }),
11120 )
11121 .await;
11122 fs.set_head_and_index_for_repo(
11123 path!("/root/.git").as_ref(),
11124 &[
11125 (".gitignore", "**/target/".into()),
11126 ("src/main.rs", "fn main() {}".into()),
11127 ],
11128 );
11129
11130 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11131 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11132 let project_events = Arc::new(Mutex::new(Vec::new()));
11133 project.update(cx, |project, cx| {
11134 let repository_updates = repository_updates.clone();
11135 cx.subscribe(project.git_store(), move |_, _, e, _| {
11136 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11137 repository_updates.lock().push(e.clone());
11138 }
11139 })
11140 .detach();
11141 let project_events = project_events.clone();
11142 cx.subscribe_self(move |_, e, _| {
11143 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11144 project_events.lock().extend(
11145 updates
11146 .iter()
11147 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11148 .filter(|(path, _)| path != "fs-event-sentinel"),
11149 );
11150 }
11151 })
11152 .detach();
11153 });
11154
11155 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11156 tree.update(cx, |tree, cx| {
11157 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11158 })
11159 .await
11160 .unwrap();
11161 tree.flush_fs_events(cx).await;
11162 project
11163 .update(cx, |project, cx| project.git_scans_complete(cx))
11164 .await;
11165 cx.run_until_parked();
11166 tree.update(cx, |tree, _| {
11167 assert_eq!(
11168 tree.entries(true, 0)
11169 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11170 .collect::<Vec<_>>(),
11171 vec![
11172 (rel_path(""), false),
11173 (rel_path(".gitignore"), false),
11174 (rel_path("src"), false),
11175 (rel_path("src/main.rs"), false),
11176 (rel_path("target"), true),
11177 (rel_path("target/debug"), true),
11178 (rel_path("target/debug/deps"), true),
11179 (rel_path("target/debug/foo.txt"), true),
11180 ]
11181 );
11182 });
11183
11184 assert_eq!(
11185 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11186 vec![
11187 RepositoryEvent::HeadChanged,
11188 RepositoryEvent::StatusesChanged,
11189 RepositoryEvent::StatusesChanged,
11190 ],
11191 "Initial worktree scan should produce a repo update event"
11192 );
11193 assert_eq!(
11194 project_events.lock().drain(..).collect::<Vec<_>>(),
11195 vec![
11196 ("target".to_string(), PathChange::Loaded),
11197 ("target/debug".to_string(), PathChange::Loaded),
11198 ("target/debug/deps".to_string(), PathChange::Loaded),
11199 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11200 ],
11201 "All non-ignored entries and all opened firs should be getting a project event",
11202 );
11203
11204 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11205 // This may happen multiple times during a single flycheck, but once is enough for testing.
11206 fs.emit_fs_event("/root/target/debug/deps", None);
11207 tree.flush_fs_events(cx).await;
11208 project
11209 .update(cx, |project, cx| project.git_scans_complete(cx))
11210 .await;
11211 cx.executor().run_until_parked();
11212
11213 assert_eq!(
11214 repository_updates
11215 .lock()
11216 .iter()
11217 .cloned()
11218 .collect::<Vec<_>>(),
11219 Vec::new(),
11220 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11221 );
11222 assert_eq!(
11223 project_events.lock().as_slice(),
11224 Vec::new(),
11225 "No further project events should happen, as only ignored dirs received FS events",
11226 );
11227}
11228
11229#[gpui::test]
11230async fn test_repos_in_invisible_worktrees(
11231 executor: BackgroundExecutor,
11232 cx: &mut gpui::TestAppContext,
11233) {
11234 init_test(cx);
11235 let fs = FakeFs::new(executor);
11236 fs.insert_tree(
11237 path!("/root"),
11238 json!({
11239 "dir1": {
11240 ".git": {},
11241 "dep1": {
11242 ".git": {},
11243 "src": {
11244 "a.txt": "",
11245 },
11246 },
11247 "b.txt": "",
11248 },
11249 }),
11250 )
11251 .await;
11252
11253 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11254 let _visible_worktree =
11255 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11256 project
11257 .update(cx, |project, cx| project.git_scans_complete(cx))
11258 .await;
11259
11260 let repos = project.read_with(cx, |project, cx| {
11261 project
11262 .repositories(cx)
11263 .values()
11264 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11265 .collect::<Vec<_>>()
11266 });
11267 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11268
11269 let (_invisible_worktree, _) = project
11270 .update(cx, |project, cx| {
11271 project.worktree_store().update(cx, |worktree_store, cx| {
11272 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11273 })
11274 })
11275 .await
11276 .expect("failed to create worktree");
11277 project
11278 .update(cx, |project, cx| project.git_scans_complete(cx))
11279 .await;
11280
11281 let repos = project.read_with(cx, |project, cx| {
11282 project
11283 .repositories(cx)
11284 .values()
11285 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11286 .collect::<Vec<_>>()
11287 });
11288 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11289}
11290
11291#[gpui::test(iterations = 10)]
11292async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11293 init_test(cx);
11294 cx.update(|cx| {
11295 cx.update_global::<SettingsStore, _>(|store, cx| {
11296 store.update_user_settings(cx, |settings| {
11297 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11298 });
11299 });
11300 });
11301 let fs = FakeFs::new(cx.background_executor.clone());
11302 fs.insert_tree(
11303 path!("/root"),
11304 json!({
11305 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11306 "tree": {
11307 ".git": {},
11308 ".gitignore": "ignored-dir\n",
11309 "tracked-dir": {
11310 "tracked-file1": "",
11311 "ancestor-ignored-file1": "",
11312 },
11313 "ignored-dir": {
11314 "ignored-file1": ""
11315 }
11316 }
11317 }),
11318 )
11319 .await;
11320 fs.set_head_and_index_for_repo(
11321 path!("/root/tree/.git").as_ref(),
11322 &[
11323 (".gitignore", "ignored-dir\n".into()),
11324 ("tracked-dir/tracked-file1", "".into()),
11325 ],
11326 );
11327
11328 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11329
11330 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11331 tree.flush_fs_events(cx).await;
11332 project
11333 .update(cx, |project, cx| project.git_scans_complete(cx))
11334 .await;
11335 cx.executor().run_until_parked();
11336
11337 let repository = project.read_with(cx, |project, cx| {
11338 project.repositories(cx).values().next().unwrap().clone()
11339 });
11340
11341 tree.read_with(cx, |tree, _| {
11342 tree.as_local()
11343 .unwrap()
11344 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11345 })
11346 .recv()
11347 .await;
11348
11349 cx.read(|cx| {
11350 assert_entry_git_state(
11351 tree.read(cx),
11352 repository.read(cx),
11353 "tracked-dir/tracked-file1",
11354 None,
11355 false,
11356 );
11357 assert_entry_git_state(
11358 tree.read(cx),
11359 repository.read(cx),
11360 "tracked-dir/ancestor-ignored-file1",
11361 None,
11362 false,
11363 );
11364 assert_entry_git_state(
11365 tree.read(cx),
11366 repository.read(cx),
11367 "ignored-dir/ignored-file1",
11368 None,
11369 true,
11370 );
11371 });
11372
11373 fs.create_file(
11374 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11375 Default::default(),
11376 )
11377 .await
11378 .unwrap();
11379 fs.set_index_for_repo(
11380 path!("/root/tree/.git").as_ref(),
11381 &[
11382 (".gitignore", "ignored-dir\n".into()),
11383 ("tracked-dir/tracked-file1", "".into()),
11384 ("tracked-dir/tracked-file2", "".into()),
11385 ],
11386 );
11387 fs.create_file(
11388 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11389 Default::default(),
11390 )
11391 .await
11392 .unwrap();
11393 fs.create_file(
11394 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11395 Default::default(),
11396 )
11397 .await
11398 .unwrap();
11399
11400 cx.executor().run_until_parked();
11401 cx.read(|cx| {
11402 assert_entry_git_state(
11403 tree.read(cx),
11404 repository.read(cx),
11405 "tracked-dir/tracked-file2",
11406 Some(StatusCode::Added),
11407 false,
11408 );
11409 assert_entry_git_state(
11410 tree.read(cx),
11411 repository.read(cx),
11412 "tracked-dir/ancestor-ignored-file2",
11413 None,
11414 false,
11415 );
11416 assert_entry_git_state(
11417 tree.read(cx),
11418 repository.read(cx),
11419 "ignored-dir/ignored-file2",
11420 None,
11421 true,
11422 );
11423 assert!(
11424 tree.read(cx)
11425 .entry_for_path(&rel_path(".git"))
11426 .unwrap()
11427 .is_ignored
11428 );
11429 });
11430}
11431
11432#[gpui::test]
11433async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11434 init_test(cx);
11435
11436 let fs = FakeFs::new(cx.executor());
11437 fs.insert_tree(
11438 path!("/project"),
11439 json!({
11440 ".git": {
11441 "worktrees": {
11442 "some-worktree": {
11443 "commondir": "../..\n",
11444 // For is_git_dir
11445 "HEAD": "",
11446 "config": ""
11447 }
11448 },
11449 "modules": {
11450 "subdir": {
11451 "some-submodule": {
11452 // For is_git_dir
11453 "HEAD": "",
11454 "config": "",
11455 }
11456 }
11457 }
11458 },
11459 "src": {
11460 "a.txt": "A",
11461 },
11462 "some-worktree": {
11463 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11464 "src": {
11465 "b.txt": "B",
11466 }
11467 },
11468 "subdir": {
11469 "some-submodule": {
11470 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11471 "c.txt": "C",
11472 }
11473 }
11474 }),
11475 )
11476 .await;
11477
11478 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11479 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11480 scan_complete.await;
11481
11482 let mut repositories = project.update(cx, |project, cx| {
11483 project
11484 .repositories(cx)
11485 .values()
11486 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11487 .collect::<Vec<_>>()
11488 });
11489 repositories.sort();
11490 pretty_assertions::assert_eq!(
11491 repositories,
11492 [
11493 Path::new(path!("/project")).into(),
11494 Path::new(path!("/project/some-worktree")).into(),
11495 Path::new(path!("/project/subdir/some-submodule")).into(),
11496 ]
11497 );
11498
11499 // Generate a git-related event for the worktree and check that it's refreshed.
11500 fs.with_git_state(
11501 path!("/project/some-worktree/.git").as_ref(),
11502 true,
11503 |state| {
11504 state
11505 .head_contents
11506 .insert(repo_path("src/b.txt"), "b".to_owned());
11507 state
11508 .index_contents
11509 .insert(repo_path("src/b.txt"), "b".to_owned());
11510 },
11511 )
11512 .unwrap();
11513 cx.run_until_parked();
11514
11515 let buffer = project
11516 .update(cx, |project, cx| {
11517 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11518 })
11519 .await
11520 .unwrap();
11521 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11522 let (repo, _) = project
11523 .git_store()
11524 .read(cx)
11525 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11526 .unwrap();
11527 pretty_assertions::assert_eq!(
11528 repo.read(cx).work_directory_abs_path,
11529 Path::new(path!("/project/some-worktree")).into(),
11530 );
11531 pretty_assertions::assert_eq!(
11532 repo.read(cx).original_repo_abs_path,
11533 Path::new(path!("/project")).into(),
11534 );
11535 assert!(
11536 repo.read(cx).linked_worktree_path().is_some(),
11537 "linked worktree should be detected as a linked worktree"
11538 );
11539 let barrier = repo.update(cx, |repo, _| repo.barrier());
11540 (repo.clone(), barrier)
11541 });
11542 barrier.await.unwrap();
11543 worktree_repo.update(cx, |repo, _| {
11544 pretty_assertions::assert_eq!(
11545 repo.status_for_path(&repo_path("src/b.txt"))
11546 .unwrap()
11547 .status,
11548 StatusCode::Modified.worktree(),
11549 );
11550 });
11551
11552 // The same for the submodule.
11553 fs.with_git_state(
11554 path!("/project/subdir/some-submodule/.git").as_ref(),
11555 true,
11556 |state| {
11557 state
11558 .head_contents
11559 .insert(repo_path("c.txt"), "c".to_owned());
11560 state
11561 .index_contents
11562 .insert(repo_path("c.txt"), "c".to_owned());
11563 },
11564 )
11565 .unwrap();
11566 cx.run_until_parked();
11567
11568 let buffer = project
11569 .update(cx, |project, cx| {
11570 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11571 })
11572 .await
11573 .unwrap();
11574 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11575 let (repo, _) = project
11576 .git_store()
11577 .read(cx)
11578 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11579 .unwrap();
11580 pretty_assertions::assert_eq!(
11581 repo.read(cx).work_directory_abs_path,
11582 Path::new(path!("/project/subdir/some-submodule")).into(),
11583 );
11584 pretty_assertions::assert_eq!(
11585 repo.read(cx).original_repo_abs_path,
11586 Path::new(path!("/project/subdir/some-submodule")).into(),
11587 );
11588 assert!(
11589 repo.read(cx).linked_worktree_path().is_none(),
11590 "submodule should not be detected as a linked worktree"
11591 );
11592 let barrier = repo.update(cx, |repo, _| repo.barrier());
11593 (repo.clone(), barrier)
11594 });
11595 barrier.await.unwrap();
11596 submodule_repo.update(cx, |repo, _| {
11597 pretty_assertions::assert_eq!(
11598 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11599 StatusCode::Modified.worktree(),
11600 );
11601 });
11602}
11603
11604#[gpui::test]
11605async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11606 init_test(cx);
11607 let fs = FakeFs::new(cx.background_executor.clone());
11608 fs.insert_tree(
11609 path!("/root"),
11610 json!({
11611 "project": {
11612 ".git": {},
11613 "child1": {
11614 "a.txt": "A",
11615 },
11616 "child2": {
11617 "b.txt": "B",
11618 }
11619 }
11620 }),
11621 )
11622 .await;
11623
11624 let project = Project::test(
11625 fs.clone(),
11626 [
11627 path!("/root/project/child1").as_ref(),
11628 path!("/root/project/child2").as_ref(),
11629 ],
11630 cx,
11631 )
11632 .await;
11633
11634 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11635 tree.flush_fs_events(cx).await;
11636 project
11637 .update(cx, |project, cx| project.git_scans_complete(cx))
11638 .await;
11639 cx.executor().run_until_parked();
11640
11641 let repos = project.read_with(cx, |project, cx| {
11642 project
11643 .repositories(cx)
11644 .values()
11645 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11646 .collect::<Vec<_>>()
11647 });
11648 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11649}
11650
11651#[gpui::test]
11652async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11653 init_test(cx);
11654
11655 let file_1_committed = String::from(r#"file_1_committed"#);
11656 let file_1_staged = String::from(r#"file_1_staged"#);
11657 let file_2_committed = String::from(r#"file_2_committed"#);
11658 let file_2_staged = String::from(r#"file_2_staged"#);
11659 let buffer_contents = String::from(r#"buffer"#);
11660
11661 let fs = FakeFs::new(cx.background_executor.clone());
11662 fs.insert_tree(
11663 path!("/dir"),
11664 json!({
11665 ".git": {},
11666 "src": {
11667 "file_1.rs": file_1_committed.clone(),
11668 "file_2.rs": file_2_committed.clone(),
11669 }
11670 }),
11671 )
11672 .await;
11673
11674 fs.set_head_for_repo(
11675 path!("/dir/.git").as_ref(),
11676 &[
11677 ("src/file_1.rs", file_1_committed.clone()),
11678 ("src/file_2.rs", file_2_committed.clone()),
11679 ],
11680 "deadbeef",
11681 );
11682 fs.set_index_for_repo(
11683 path!("/dir/.git").as_ref(),
11684 &[
11685 ("src/file_1.rs", file_1_staged.clone()),
11686 ("src/file_2.rs", file_2_staged.clone()),
11687 ],
11688 );
11689
11690 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11691
11692 let buffer = project
11693 .update(cx, |project, cx| {
11694 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11695 })
11696 .await
11697 .unwrap();
11698
11699 buffer.update(cx, |buffer, cx| {
11700 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11701 });
11702
11703 let unstaged_diff = project
11704 .update(cx, |project, cx| {
11705 project.open_unstaged_diff(buffer.clone(), cx)
11706 })
11707 .await
11708 .unwrap();
11709
11710 cx.run_until_parked();
11711
11712 unstaged_diff.update(cx, |unstaged_diff, cx| {
11713 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11714 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11715 });
11716
11717 // Save the buffer as `file_2.rs`, which should trigger the
11718 // `BufferChangedFilePath` event.
11719 project
11720 .update(cx, |project, cx| {
11721 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11722 let path = ProjectPath {
11723 worktree_id,
11724 path: rel_path("src/file_2.rs").into(),
11725 };
11726 project.save_buffer_as(buffer.clone(), path, cx)
11727 })
11728 .await
11729 .unwrap();
11730
11731 cx.run_until_parked();
11732
11733 // Verify that the diff bases have been updated to file_2's contents due to
11734 // the `BufferChangedFilePath` event being handled.
11735 unstaged_diff.update(cx, |unstaged_diff, cx| {
11736 let snapshot = buffer.read(cx).snapshot();
11737 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11738 assert_eq!(
11739 base_text, file_2_staged,
11740 "Diff bases should be automatically updated to file_2 staged content"
11741 );
11742
11743 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11744 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11745 });
11746
11747 let uncommitted_diff = project
11748 .update(cx, |project, cx| {
11749 project.open_uncommitted_diff(buffer.clone(), cx)
11750 })
11751 .await
11752 .unwrap();
11753
11754 cx.run_until_parked();
11755
11756 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11757 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11758 assert_eq!(
11759 base_text, file_2_committed,
11760 "Uncommitted diff should compare against file_2 committed content"
11761 );
11762 });
11763}
11764
11765async fn search(
11766 project: &Entity<Project>,
11767 query: SearchQuery,
11768 cx: &mut gpui::TestAppContext,
11769) -> Result<HashMap<String, Vec<Range<usize>>>> {
11770 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11771 let mut results = HashMap::default();
11772 while let Ok(search_result) = search_rx.rx.recv().await {
11773 match search_result {
11774 SearchResult::Buffer { buffer, ranges } => {
11775 results.entry(buffer).or_insert(ranges);
11776 }
11777 SearchResult::LimitReached => {}
11778 }
11779 }
11780 Ok(results
11781 .into_iter()
11782 .map(|(buffer, ranges)| {
11783 buffer.update(cx, |buffer, cx| {
11784 let path = buffer
11785 .file()
11786 .unwrap()
11787 .full_path(cx)
11788 .to_string_lossy()
11789 .to_string();
11790 let ranges = ranges
11791 .into_iter()
11792 .map(|range| range.to_offset(buffer))
11793 .collect::<Vec<_>>();
11794 (path, ranges)
11795 })
11796 })
11797 .collect())
11798}
11799
11800#[gpui::test]
11801async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11802 init_test(cx);
11803
11804 let fs = FakeFs::new(cx.executor());
11805
11806 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11807 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11808 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11809 fs.insert_tree(path!("/dir"), json!({})).await;
11810 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11811
11812 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11813
11814 let buffer = project
11815 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11816 .await
11817 .unwrap();
11818
11819 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11820 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11821 });
11822 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11823 assert_eq!(initial_text, "Hi");
11824 assert!(!initial_dirty);
11825
11826 let reload_receiver = buffer.update(cx, |buffer, cx| {
11827 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11828 });
11829 cx.executor().run_until_parked();
11830
11831 // Wait for reload to complete
11832 let _ = reload_receiver.await;
11833
11834 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11835 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11836 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11837 });
11838 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11839 assert_eq!(reloaded_text, "楈");
11840 assert!(!reloaded_dirty);
11841
11842 // Undo the reload
11843 buffer.update(cx, |buffer, cx| {
11844 buffer.undo(cx);
11845 });
11846
11847 buffer.read_with(cx, |buffer, _| {
11848 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11849 assert_eq!(buffer.text(), "Hi");
11850 assert!(!buffer.is_dirty());
11851 });
11852
11853 buffer.update(cx, |buffer, cx| {
11854 buffer.redo(cx);
11855 });
11856
11857 buffer.read_with(cx, |buffer, _| {
11858 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11859 assert_ne!(buffer.text(), "Hi");
11860 assert!(!buffer.is_dirty());
11861 });
11862}
11863
11864#[gpui::test]
11865async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11866 init_test(cx);
11867
11868 let fs = FakeFs::new(cx.executor());
11869 fs.insert_tree(
11870 path!("/root"),
11871 json!({
11872 "a": {
11873 ".git": {},
11874 ".zed": {
11875 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11876 },
11877 "src": { "main.rs": "" }
11878 },
11879 "b": {
11880 ".git": {},
11881 ".zed": {
11882 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11883 },
11884 "src": { "lib.rs": "" }
11885 },
11886 }),
11887 )
11888 .await;
11889
11890 let repos_created = Rc::new(RefCell::new(Vec::new()));
11891 let _observe = {
11892 let repos_created = repos_created.clone();
11893 cx.update(|cx| {
11894 cx.observe_new::<Repository>(move |repo, _, cx| {
11895 repos_created.borrow_mut().push(cx.entity().downgrade());
11896 let _ = repo;
11897 })
11898 })
11899 };
11900
11901 let project = Project::test(
11902 fs.clone(),
11903 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11904 cx,
11905 )
11906 .await;
11907
11908 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11909 scan_complete.await;
11910
11911 project.read_with(cx, |project, cx| {
11912 assert!(
11913 project.worktree_store().read(cx).initial_scan_completed(),
11914 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11915 );
11916 });
11917
11918 let created_repos_len = repos_created.borrow().len();
11919 assert_eq!(
11920 created_repos_len, 2,
11921 "Expected 2 repositories to be created during scan, got {}",
11922 created_repos_len
11923 );
11924
11925 project.read_with(cx, |project, cx| {
11926 let git_store = project.git_store().read(cx);
11927 assert_eq!(
11928 git_store.repositories().len(),
11929 2,
11930 "Expected 2 repositories in GitStore"
11931 );
11932 });
11933}
11934
11935pub fn init_test(cx: &mut gpui::TestAppContext) {
11936 zlog::init_test();
11937
11938 cx.update(|cx| {
11939 let settings_store = SettingsStore::test(cx);
11940 cx.set_global(settings_store);
11941 release_channel::init(semver::Version::new(0, 0, 0), cx);
11942 });
11943}
11944
11945fn json_lang() -> Arc<Language> {
11946 Arc::new(Language::new(
11947 LanguageConfig {
11948 name: "JSON".into(),
11949 matcher: LanguageMatcher {
11950 path_suffixes: vec!["json".to_string()],
11951 ..Default::default()
11952 },
11953 ..Default::default()
11954 },
11955 None,
11956 ))
11957}
11958
11959fn js_lang() -> Arc<Language> {
11960 Arc::new(Language::new(
11961 LanguageConfig {
11962 name: "JavaScript".into(),
11963 matcher: LanguageMatcher {
11964 path_suffixes: vec!["js".to_string()],
11965 ..Default::default()
11966 },
11967 ..Default::default()
11968 },
11969 None,
11970 ))
11971}
11972
11973fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11974 struct PythonMootToolchainLister(Arc<FakeFs>);
11975 #[async_trait]
11976 impl ToolchainLister for PythonMootToolchainLister {
11977 async fn list(
11978 &self,
11979 worktree_root: PathBuf,
11980 subroot_relative_path: Arc<RelPath>,
11981 _: Option<HashMap<String, String>>,
11982 ) -> ToolchainList {
11983 // This lister will always return a path .venv directories within ancestors
11984 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11985 let mut toolchains = vec![];
11986 for ancestor in ancestors {
11987 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11988 if self.0.is_dir(&venv_path).await {
11989 toolchains.push(Toolchain {
11990 name: SharedString::new_static("Python Venv"),
11991 path: venv_path.to_string_lossy().into_owned().into(),
11992 language_name: LanguageName(SharedString::new_static("Python")),
11993 as_json: serde_json::Value::Null,
11994 })
11995 }
11996 }
11997 ToolchainList {
11998 toolchains,
11999 ..Default::default()
12000 }
12001 }
12002 async fn resolve(
12003 &self,
12004 _: PathBuf,
12005 _: Option<HashMap<String, String>>,
12006 ) -> anyhow::Result<Toolchain> {
12007 Err(anyhow::anyhow!("Not implemented"))
12008 }
12009 fn meta(&self) -> ToolchainMetadata {
12010 ToolchainMetadata {
12011 term: SharedString::new_static("Virtual Environment"),
12012 new_toolchain_placeholder: SharedString::new_static(
12013 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12014 ),
12015 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12016 }
12017 }
12018 fn activation_script(
12019 &self,
12020 _: &Toolchain,
12021 _: ShellKind,
12022 _: &gpui::App,
12023 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12024 Box::pin(async { vec![] })
12025 }
12026 }
12027 Arc::new(
12028 Language::new(
12029 LanguageConfig {
12030 name: "Python".into(),
12031 matcher: LanguageMatcher {
12032 path_suffixes: vec!["py".to_string()],
12033 ..Default::default()
12034 },
12035 ..Default::default()
12036 },
12037 None, // We're not testing Python parsing with this language.
12038 )
12039 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12040 "pyproject.toml",
12041 ))))
12042 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12043 )
12044}
12045
12046fn typescript_lang() -> Arc<Language> {
12047 Arc::new(Language::new(
12048 LanguageConfig {
12049 name: "TypeScript".into(),
12050 matcher: LanguageMatcher {
12051 path_suffixes: vec!["ts".to_string()],
12052 ..Default::default()
12053 },
12054 ..Default::default()
12055 },
12056 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12057 ))
12058}
12059
12060fn tsx_lang() -> Arc<Language> {
12061 Arc::new(Language::new(
12062 LanguageConfig {
12063 name: "tsx".into(),
12064 matcher: LanguageMatcher {
12065 path_suffixes: vec!["tsx".to_string()],
12066 ..Default::default()
12067 },
12068 ..Default::default()
12069 },
12070 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12071 ))
12072}
12073
12074fn get_all_tasks(
12075 project: &Entity<Project>,
12076 task_contexts: Arc<TaskContexts>,
12077 cx: &mut App,
12078) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12079 let new_tasks = project.update(cx, |project, cx| {
12080 project.task_store().update(cx, |task_store, cx| {
12081 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12082 this.used_and_current_resolved_tasks(task_contexts, cx)
12083 })
12084 })
12085 });
12086
12087 cx.background_spawn(async move {
12088 let (mut old, new) = new_tasks.await;
12089 old.extend(new);
12090 old
12091 })
12092}
12093
12094#[track_caller]
12095fn assert_entry_git_state(
12096 tree: &Worktree,
12097 repository: &Repository,
12098 path: &str,
12099 index_status: Option<StatusCode>,
12100 is_ignored: bool,
12101) {
12102 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12103 let entry = tree
12104 .entry_for_path(&rel_path(path))
12105 .unwrap_or_else(|| panic!("entry {path} not found"));
12106 let status = repository
12107 .status_for_path(&repo_path(path))
12108 .map(|entry| entry.status);
12109 let expected = index_status.map(|index_status| {
12110 TrackedStatus {
12111 index_status,
12112 worktree_status: StatusCode::Unmodified,
12113 }
12114 .into()
12115 });
12116 assert_eq!(
12117 status, expected,
12118 "expected {path} to have git status: {expected:?}"
12119 );
12120 assert_eq!(
12121 entry.is_ignored, is_ignored,
12122 "expected {path} to have is_ignored: {is_ignored}"
12123 );
12124}
12125
12126#[track_caller]
12127fn git_init(path: &Path) -> git2::Repository {
12128 let mut init_opts = RepositoryInitOptions::new();
12129 init_opts.initial_head("main");
12130 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12131}
12132
12133#[track_caller]
12134fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12135 let path = path.as_ref();
12136 let mut index = repo.index().expect("Failed to get index");
12137 index.add_path(path).expect("Failed to add file");
12138 index.write().expect("Failed to write index");
12139}
12140
12141#[track_caller]
12142fn git_remove_index(path: &Path, repo: &git2::Repository) {
12143 let mut index = repo.index().expect("Failed to get index");
12144 index.remove_path(path).expect("Failed to add file");
12145 index.write().expect("Failed to write index");
12146}
12147
12148#[track_caller]
12149fn git_commit(msg: &'static str, repo: &git2::Repository) {
12150 use git2::Signature;
12151
12152 let signature = Signature::now("test", "test@zed.dev").unwrap();
12153 let oid = repo.index().unwrap().write_tree().unwrap();
12154 let tree = repo.find_tree(oid).unwrap();
12155 if let Ok(head) = repo.head() {
12156 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12157
12158 let parent_commit = parent_obj.as_commit().unwrap();
12159
12160 repo.commit(
12161 Some("HEAD"),
12162 &signature,
12163 &signature,
12164 msg,
12165 &tree,
12166 &[parent_commit],
12167 )
12168 .expect("Failed to commit with parent");
12169 } else {
12170 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12171 .expect("Failed to commit");
12172 }
12173}
12174
12175#[cfg(any())]
12176#[track_caller]
12177fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12178 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12179}
12180
12181#[track_caller]
12182fn git_stash(repo: &mut git2::Repository) {
12183 use git2::Signature;
12184
12185 let signature = Signature::now("test", "test@zed.dev").unwrap();
12186 repo.stash_save(&signature, "N/A", None)
12187 .expect("Failed to stash");
12188}
12189
12190#[track_caller]
12191fn git_reset(offset: usize, repo: &git2::Repository) {
12192 let head = repo.head().expect("Couldn't get repo head");
12193 let object = head.peel(git2::ObjectType::Commit).unwrap();
12194 let commit = object.as_commit().unwrap();
12195 let new_head = commit
12196 .parents()
12197 .inspect(|parnet| {
12198 parnet.message();
12199 })
12200 .nth(offset)
12201 .expect("Not enough history");
12202 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12203 .expect("Could not reset");
12204}
12205
12206#[cfg(any())]
12207#[track_caller]
12208fn git_branch(name: &str, repo: &git2::Repository) {
12209 let head = repo
12210 .head()
12211 .expect("Couldn't get repo head")
12212 .peel_to_commit()
12213 .expect("HEAD is not a commit");
12214 repo.branch(name, &head, false).expect("Failed to commit");
12215}
12216
12217#[cfg(any())]
12218#[track_caller]
12219fn git_checkout(name: &str, repo: &git2::Repository) {
12220 repo.set_head(name).expect("Failed to set head");
12221 repo.checkout_head(None).expect("Failed to check out head");
12222}
12223
12224#[cfg(any())]
12225#[track_caller]
12226fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12227 repo.statuses(None)
12228 .unwrap()
12229 .iter()
12230 .map(|status| (status.path().unwrap().to_string(), status.status()))
12231 .collect()
12232}
12233
12234#[gpui::test]
12235async fn test_find_project_path_abs(
12236 background_executor: BackgroundExecutor,
12237 cx: &mut gpui::TestAppContext,
12238) {
12239 // find_project_path should work with absolute paths
12240 init_test(cx);
12241
12242 let fs = FakeFs::new(background_executor);
12243 fs.insert_tree(
12244 path!("/root"),
12245 json!({
12246 "project1": {
12247 "file1.txt": "content1",
12248 "subdir": {
12249 "file2.txt": "content2"
12250 }
12251 },
12252 "project2": {
12253 "file3.txt": "content3"
12254 }
12255 }),
12256 )
12257 .await;
12258
12259 let project = Project::test(
12260 fs.clone(),
12261 [
12262 path!("/root/project1").as_ref(),
12263 path!("/root/project2").as_ref(),
12264 ],
12265 cx,
12266 )
12267 .await;
12268
12269 // Make sure the worktrees are fully initialized
12270 project
12271 .update(cx, |project, cx| project.git_scans_complete(cx))
12272 .await;
12273 cx.run_until_parked();
12274
12275 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12276 project.read_with(cx, |project, cx| {
12277 let worktrees: Vec<_> = project.worktrees(cx).collect();
12278 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12279 let id1 = worktrees[0].read(cx).id();
12280 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12281 let id2 = worktrees[1].read(cx).id();
12282 (abs_path1, id1, abs_path2, id2)
12283 });
12284
12285 project.update(cx, |project, cx| {
12286 let abs_path = project1_abs_path.join("file1.txt");
12287 let found_path = project.find_project_path(abs_path, cx).unwrap();
12288 assert_eq!(found_path.worktree_id, project1_id);
12289 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12290
12291 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12292 let found_path = project.find_project_path(abs_path, cx).unwrap();
12293 assert_eq!(found_path.worktree_id, project1_id);
12294 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12295
12296 let abs_path = project2_abs_path.join("file3.txt");
12297 let found_path = project.find_project_path(abs_path, cx).unwrap();
12298 assert_eq!(found_path.worktree_id, project2_id);
12299 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12300
12301 let abs_path = project1_abs_path.join("nonexistent.txt");
12302 let found_path = project.find_project_path(abs_path, cx);
12303 assert!(
12304 found_path.is_some(),
12305 "Should find project path for nonexistent file in worktree"
12306 );
12307
12308 // Test with an absolute path outside any worktree
12309 let abs_path = Path::new("/some/other/path");
12310 let found_path = project.find_project_path(abs_path, cx);
12311 assert!(
12312 found_path.is_none(),
12313 "Should not find project path for path outside any worktree"
12314 );
12315 });
12316}
12317
12318#[gpui::test]
12319async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12320 init_test(cx);
12321
12322 let fs = FakeFs::new(cx.executor());
12323 fs.insert_tree(
12324 path!("/root"),
12325 json!({
12326 "a": {
12327 ".git": {},
12328 "src": {
12329 "main.rs": "fn main() {}",
12330 }
12331 },
12332 "b": {
12333 ".git": {},
12334 "src": {
12335 "main.rs": "fn main() {}",
12336 },
12337 "script": {
12338 "run.sh": "#!/bin/bash"
12339 }
12340 }
12341 }),
12342 )
12343 .await;
12344
12345 let project = Project::test(
12346 fs.clone(),
12347 [
12348 path!("/root/a").as_ref(),
12349 path!("/root/b/script").as_ref(),
12350 path!("/root/b").as_ref(),
12351 ],
12352 cx,
12353 )
12354 .await;
12355 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12356 scan_complete.await;
12357
12358 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12359 assert_eq!(worktrees.len(), 3);
12360
12361 let worktree_id_by_abs_path = worktrees
12362 .into_iter()
12363 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12364 .collect::<HashMap<_, _>>();
12365 let worktree_id = worktree_id_by_abs_path
12366 .get(Path::new(path!("/root/b/script")))
12367 .unwrap();
12368
12369 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12370 assert_eq!(repos.len(), 2);
12371
12372 project.update(cx, |project, cx| {
12373 project.remove_worktree(*worktree_id, cx);
12374 });
12375 cx.run_until_parked();
12376
12377 let mut repo_paths = project
12378 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12379 .values()
12380 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12381 .collect::<Vec<_>>();
12382 repo_paths.sort();
12383
12384 pretty_assertions::assert_eq!(
12385 repo_paths,
12386 [
12387 Path::new(path!("/root/a")).into(),
12388 Path::new(path!("/root/b")).into(),
12389 ]
12390 );
12391
12392 let active_repo_path = project
12393 .read_with(cx, |p, cx| {
12394 p.active_repository(cx)
12395 .map(|r| r.read(cx).work_directory_abs_path.clone())
12396 })
12397 .unwrap();
12398 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12399
12400 let worktree_id = worktree_id_by_abs_path
12401 .get(Path::new(path!("/root/a")))
12402 .unwrap();
12403 project.update(cx, |project, cx| {
12404 project.remove_worktree(*worktree_id, cx);
12405 });
12406 cx.run_until_parked();
12407
12408 let active_repo_path = project
12409 .read_with(cx, |p, cx| {
12410 p.active_repository(cx)
12411 .map(|r| r.read(cx).work_directory_abs_path.clone())
12412 })
12413 .unwrap();
12414 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12415
12416 let worktree_id = worktree_id_by_abs_path
12417 .get(Path::new(path!("/root/b")))
12418 .unwrap();
12419 project.update(cx, |project, cx| {
12420 project.remove_worktree(*worktree_id, cx);
12421 });
12422 cx.run_until_parked();
12423
12424 let active_repo_path = project.read_with(cx, |p, cx| {
12425 p.active_repository(cx)
12426 .map(|r| r.read(cx).work_directory_abs_path.clone())
12427 });
12428 assert!(active_repo_path.is_none());
12429}
12430
12431#[gpui::test]
12432async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12433 use DiffHunkSecondaryStatus::*;
12434 init_test(cx);
12435
12436 let committed_contents = r#"
12437 one
12438 two
12439 three
12440 "#
12441 .unindent();
12442 let file_contents = r#"
12443 one
12444 TWO
12445 three
12446 "#
12447 .unindent();
12448
12449 let fs = FakeFs::new(cx.background_executor.clone());
12450 fs.insert_tree(
12451 path!("/dir"),
12452 json!({
12453 ".git": {},
12454 "file.txt": file_contents.clone()
12455 }),
12456 )
12457 .await;
12458
12459 fs.set_head_and_index_for_repo(
12460 path!("/dir/.git").as_ref(),
12461 &[("file.txt", committed_contents.clone())],
12462 );
12463
12464 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12465
12466 let buffer = project
12467 .update(cx, |project, cx| {
12468 project.open_local_buffer(path!("/dir/file.txt"), cx)
12469 })
12470 .await
12471 .unwrap();
12472 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12473 let uncommitted_diff = project
12474 .update(cx, |project, cx| {
12475 project.open_uncommitted_diff(buffer.clone(), cx)
12476 })
12477 .await
12478 .unwrap();
12479
12480 // The hunk is initially unstaged.
12481 uncommitted_diff.read_with(cx, |diff, cx| {
12482 assert_hunks(
12483 diff.snapshot(cx).hunks(&snapshot),
12484 &snapshot,
12485 &diff.base_text_string(cx).unwrap(),
12486 &[(
12487 1..2,
12488 "two\n",
12489 "TWO\n",
12490 DiffHunkStatus::modified(HasSecondaryHunk),
12491 )],
12492 );
12493 });
12494
12495 // Get the repository handle.
12496 let repo = project.read_with(cx, |project, cx| {
12497 project.repositories(cx).values().next().unwrap().clone()
12498 });
12499
12500 // Stage the file.
12501 let stage_task = repo.update(cx, |repo, cx| {
12502 repo.stage_entries(vec![repo_path("file.txt")], cx)
12503 });
12504
12505 // Run a few ticks to let the job start and mark hunks as pending,
12506 // but don't run_until_parked which would complete the entire operation.
12507 for _ in 0..10 {
12508 cx.executor().tick();
12509 let [hunk]: [_; 1] = uncommitted_diff
12510 .read_with(cx, |diff, cx| {
12511 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12512 })
12513 .try_into()
12514 .unwrap();
12515 match hunk.secondary_status {
12516 HasSecondaryHunk => {}
12517 SecondaryHunkRemovalPending => break,
12518 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12519 _ => panic!("unexpected hunk state"),
12520 }
12521 }
12522 uncommitted_diff.read_with(cx, |diff, cx| {
12523 assert_hunks(
12524 diff.snapshot(cx).hunks(&snapshot),
12525 &snapshot,
12526 &diff.base_text_string(cx).unwrap(),
12527 &[(
12528 1..2,
12529 "two\n",
12530 "TWO\n",
12531 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12532 )],
12533 );
12534 });
12535
12536 // Let the staging complete.
12537 stage_task.await.unwrap();
12538 cx.run_until_parked();
12539
12540 // The hunk is now fully staged.
12541 uncommitted_diff.read_with(cx, |diff, cx| {
12542 assert_hunks(
12543 diff.snapshot(cx).hunks(&snapshot),
12544 &snapshot,
12545 &diff.base_text_string(cx).unwrap(),
12546 &[(
12547 1..2,
12548 "two\n",
12549 "TWO\n",
12550 DiffHunkStatus::modified(NoSecondaryHunk),
12551 )],
12552 );
12553 });
12554
12555 // Simulate a commit by updating HEAD to match the current file contents.
12556 // The FakeGitRepository's commit method is a no-op, so we need to manually
12557 // update HEAD to simulate the commit completing.
12558 fs.set_head_for_repo(
12559 path!("/dir/.git").as_ref(),
12560 &[("file.txt", file_contents.clone())],
12561 "newhead",
12562 );
12563 cx.run_until_parked();
12564
12565 // After committing, there are no more hunks.
12566 uncommitted_diff.read_with(cx, |diff, cx| {
12567 assert_hunks(
12568 diff.snapshot(cx).hunks(&snapshot),
12569 &snapshot,
12570 &diff.base_text_string(cx).unwrap(),
12571 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12572 );
12573 });
12574}
12575
12576#[gpui::test]
12577async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12578 init_test(cx);
12579
12580 // Configure read_only_files setting
12581 cx.update(|cx| {
12582 cx.update_global::<SettingsStore, _>(|store, cx| {
12583 store.update_user_settings(cx, |settings| {
12584 settings.project.worktree.read_only_files = Some(vec![
12585 "**/generated/**".to_string(),
12586 "**/*.gen.rs".to_string(),
12587 ]);
12588 });
12589 });
12590 });
12591
12592 let fs = FakeFs::new(cx.background_executor.clone());
12593 fs.insert_tree(
12594 path!("/root"),
12595 json!({
12596 "src": {
12597 "main.rs": "fn main() {}",
12598 "types.gen.rs": "// Generated file",
12599 },
12600 "generated": {
12601 "schema.rs": "// Auto-generated schema",
12602 }
12603 }),
12604 )
12605 .await;
12606
12607 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12608
12609 // Open a regular file - should be read-write
12610 let regular_buffer = project
12611 .update(cx, |project, cx| {
12612 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12613 })
12614 .await
12615 .unwrap();
12616
12617 regular_buffer.read_with(cx, |buffer, _| {
12618 assert!(!buffer.read_only(), "Regular file should not be read-only");
12619 });
12620
12621 // Open a file matching *.gen.rs pattern - should be read-only
12622 let gen_buffer = project
12623 .update(cx, |project, cx| {
12624 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12625 })
12626 .await
12627 .unwrap();
12628
12629 gen_buffer.read_with(cx, |buffer, _| {
12630 assert!(
12631 buffer.read_only(),
12632 "File matching *.gen.rs pattern should be read-only"
12633 );
12634 });
12635
12636 // Open a file in generated directory - should be read-only
12637 let generated_buffer = project
12638 .update(cx, |project, cx| {
12639 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12640 })
12641 .await
12642 .unwrap();
12643
12644 generated_buffer.read_with(cx, |buffer, _| {
12645 assert!(
12646 buffer.read_only(),
12647 "File in generated directory should be read-only"
12648 );
12649 });
12650}
12651
12652#[gpui::test]
12653async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12654 init_test(cx);
12655
12656 // Explicitly set read_only_files to empty (default behavior)
12657 cx.update(|cx| {
12658 cx.update_global::<SettingsStore, _>(|store, cx| {
12659 store.update_user_settings(cx, |settings| {
12660 settings.project.worktree.read_only_files = Some(vec![]);
12661 });
12662 });
12663 });
12664
12665 let fs = FakeFs::new(cx.background_executor.clone());
12666 fs.insert_tree(
12667 path!("/root"),
12668 json!({
12669 "src": {
12670 "main.rs": "fn main() {}",
12671 },
12672 "generated": {
12673 "schema.rs": "// Auto-generated schema",
12674 }
12675 }),
12676 )
12677 .await;
12678
12679 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12680
12681 // All files should be read-write when read_only_files is empty
12682 let main_buffer = project
12683 .update(cx, |project, cx| {
12684 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12685 })
12686 .await
12687 .unwrap();
12688
12689 main_buffer.read_with(cx, |buffer, _| {
12690 assert!(
12691 !buffer.read_only(),
12692 "Files should not be read-only when read_only_files is empty"
12693 );
12694 });
12695
12696 let generated_buffer = project
12697 .update(cx, |project, cx| {
12698 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12699 })
12700 .await
12701 .unwrap();
12702
12703 generated_buffer.read_with(cx, |buffer, _| {
12704 assert!(
12705 !buffer.read_only(),
12706 "Generated files should not be read-only when read_only_files is empty"
12707 );
12708 });
12709}
12710
12711#[gpui::test]
12712async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12713 init_test(cx);
12714
12715 // Configure to make lock files read-only
12716 cx.update(|cx| {
12717 cx.update_global::<SettingsStore, _>(|store, cx| {
12718 store.update_user_settings(cx, |settings| {
12719 settings.project.worktree.read_only_files = Some(vec![
12720 "**/*.lock".to_string(),
12721 "**/package-lock.json".to_string(),
12722 ]);
12723 });
12724 });
12725 });
12726
12727 let fs = FakeFs::new(cx.background_executor.clone());
12728 fs.insert_tree(
12729 path!("/root"),
12730 json!({
12731 "Cargo.lock": "# Lock file",
12732 "Cargo.toml": "[package]",
12733 "package-lock.json": "{}",
12734 "package.json": "{}",
12735 }),
12736 )
12737 .await;
12738
12739 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12740
12741 // Cargo.lock should be read-only
12742 let cargo_lock = project
12743 .update(cx, |project, cx| {
12744 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12745 })
12746 .await
12747 .unwrap();
12748
12749 cargo_lock.read_with(cx, |buffer, _| {
12750 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12751 });
12752
12753 // Cargo.toml should be read-write
12754 let cargo_toml = project
12755 .update(cx, |project, cx| {
12756 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12757 })
12758 .await
12759 .unwrap();
12760
12761 cargo_toml.read_with(cx, |buffer, _| {
12762 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12763 });
12764
12765 // package-lock.json should be read-only
12766 let package_lock = project
12767 .update(cx, |project, cx| {
12768 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12769 })
12770 .await
12771 .unwrap();
12772
12773 package_lock.read_with(cx, |buffer, _| {
12774 assert!(buffer.read_only(), "package-lock.json should be read-only");
12775 });
12776
12777 // package.json should be read-write
12778 let package_json = project
12779 .update(cx, |project, cx| {
12780 project.open_local_buffer(path!("/root/package.json"), cx)
12781 })
12782 .await
12783 .unwrap();
12784
12785 package_json.read_with(cx, |buffer, _| {
12786 assert!(!buffer.read_only(), "package.json should not be read-only");
12787 });
12788}
12789
12790mod disable_ai_settings_tests {
12791 use gpui::TestAppContext;
12792 use project::*;
12793 use settings::{Settings, SettingsStore};
12794
12795 #[gpui::test]
12796 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12797 cx.update(|cx| {
12798 settings::init(cx);
12799
12800 // Test 1: Default is false (AI enabled)
12801 assert!(
12802 !DisableAiSettings::get_global(cx).disable_ai,
12803 "Default should allow AI"
12804 );
12805 });
12806
12807 let disable_true = serde_json::json!({
12808 "disable_ai": true
12809 })
12810 .to_string();
12811 let disable_false = serde_json::json!({
12812 "disable_ai": false
12813 })
12814 .to_string();
12815
12816 cx.update_global::<SettingsStore, _>(|store, cx| {
12817 store.set_user_settings(&disable_false, cx).unwrap();
12818 store.set_global_settings(&disable_true, cx).unwrap();
12819 });
12820 cx.update(|cx| {
12821 assert!(
12822 DisableAiSettings::get_global(cx).disable_ai,
12823 "Local false cannot override global true"
12824 );
12825 });
12826
12827 cx.update_global::<SettingsStore, _>(|store, cx| {
12828 store.set_global_settings(&disable_false, cx).unwrap();
12829 store.set_user_settings(&disable_true, cx).unwrap();
12830 });
12831
12832 cx.update(|cx| {
12833 assert!(
12834 DisableAiSettings::get_global(cx).disable_ai,
12835 "Local false cannot override global true"
12836 );
12837 });
12838 }
12839
12840 #[gpui::test]
12841 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12842 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12843 use worktree::WorktreeId;
12844
12845 cx.update(|cx| {
12846 settings::init(cx);
12847
12848 // Default should allow AI
12849 assert!(
12850 !DisableAiSettings::get_global(cx).disable_ai,
12851 "Default should allow AI"
12852 );
12853 });
12854
12855 let worktree_id = WorktreeId::from_usize(1);
12856 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12857 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12858 };
12859 let project_path = rel_path("project");
12860 let settings_location = SettingsLocation {
12861 worktree_id,
12862 path: project_path.as_ref(),
12863 };
12864
12865 // Test: Project-level disable_ai=true should disable AI for files in that project
12866 cx.update_global::<SettingsStore, _>(|store, cx| {
12867 store
12868 .set_local_settings(
12869 worktree_id,
12870 LocalSettingsPath::InWorktree(project_path.clone()),
12871 LocalSettingsKind::Settings,
12872 Some(r#"{ "disable_ai": true }"#),
12873 cx,
12874 )
12875 .unwrap();
12876 });
12877
12878 cx.update(|cx| {
12879 let settings = DisableAiSettings::get(Some(settings_location), cx);
12880 assert!(
12881 settings.disable_ai,
12882 "Project-level disable_ai=true should disable AI for files in that project"
12883 );
12884 // Global should now also be true since project-level disable_ai is merged into global
12885 assert!(
12886 DisableAiSettings::get_global(cx).disable_ai,
12887 "Global setting should be affected by project-level disable_ai=true"
12888 );
12889 });
12890
12891 // Test: Setting project-level to false should allow AI for that project
12892 cx.update_global::<SettingsStore, _>(|store, cx| {
12893 store
12894 .set_local_settings(
12895 worktree_id,
12896 LocalSettingsPath::InWorktree(project_path.clone()),
12897 LocalSettingsKind::Settings,
12898 Some(r#"{ "disable_ai": false }"#),
12899 cx,
12900 )
12901 .unwrap();
12902 });
12903
12904 cx.update(|cx| {
12905 let settings = DisableAiSettings::get(Some(settings_location), cx);
12906 assert!(
12907 !settings.disable_ai,
12908 "Project-level disable_ai=false should allow AI"
12909 );
12910 // Global should also be false now
12911 assert!(
12912 !DisableAiSettings::get_global(cx).disable_ai,
12913 "Global setting should be false when project-level is false"
12914 );
12915 });
12916
12917 // Test: User-level true + project-level false = AI disabled (saturation)
12918 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12919 cx.update_global::<SettingsStore, _>(|store, cx| {
12920 store.set_user_settings(&disable_true, cx).unwrap();
12921 store
12922 .set_local_settings(
12923 worktree_id,
12924 LocalSettingsPath::InWorktree(project_path.clone()),
12925 LocalSettingsKind::Settings,
12926 Some(r#"{ "disable_ai": false }"#),
12927 cx,
12928 )
12929 .unwrap();
12930 });
12931
12932 cx.update(|cx| {
12933 let settings = DisableAiSettings::get(Some(settings_location), cx);
12934 assert!(
12935 settings.disable_ai,
12936 "Project-level false cannot override user-level true (SaturatingBool)"
12937 );
12938 });
12939 }
12940}