1#![allow(clippy::format_collect)]
2
3mod bookmark_store;
4mod color_extractor;
5mod context_server_store;
6mod debugger;
7mod ext_agent_tests;
8mod extension_agent_tests;
9mod git_store;
10mod image_store;
11mod lsp_command;
12mod lsp_store;
13mod manifest_tree;
14mod project_search;
15mod search;
16mod search_history;
17mod signature_help;
18mod task_inventory;
19mod trusted_worktrees;
20mod yarn;
21
22use anyhow::Result;
23use async_trait::async_trait;
24use buffer_diff::{
25 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
26 assert_hunks,
27};
28use collections::{BTreeSet, HashMap, HashSet};
29use encoding_rs;
30use fs::{FakeFs, PathEventKind};
31use futures::{StreamExt, future};
32use git::{
33 GitHostingProviderRegistry,
34 repository::{RepoPath, repo_path},
35 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
36};
37use git2::RepositoryInitOptions;
38use gpui::{
39 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
40 TestAppContext, UpdateGlobal,
41};
42use itertools::Itertools;
43use language::{
44 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
45 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
46 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
47 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
48 ToolchainMetadata,
49 language_settings::{Formatter, FormatterList, LanguageSettings, LanguageSettingsContent},
50 markdown_lang, rust_lang, tree_sitter_typescript,
51};
52use lsp::{
53 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
54 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
55 Uri, WillRenameFiles, notification::DidRenameFiles,
56};
57use parking_lot::Mutex;
58use paths::{config_dir, global_gitignore_path, tasks_file};
59use postage::stream::Stream as _;
60use pretty_assertions::{assert_eq, assert_matches};
61use project::{
62 Event, TaskContexts,
63 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
64 search::{SearchQuery, SearchResult},
65 task_store::{TaskSettingsLocation, TaskStore},
66 *,
67};
68use rand::{Rng as _, rngs::StdRng};
69use serde_json::json;
70use settings::SettingsStore;
71#[cfg(not(windows))]
72use std::os;
73use std::{
74 cell::RefCell,
75 env, mem,
76 num::NonZeroU32,
77 ops::Range,
78 path::{Path, PathBuf},
79 rc::Rc,
80 str::FromStr,
81 sync::{Arc, OnceLock, atomic},
82 task::Poll,
83 time::Duration,
84};
85use sum_tree::SumTree;
86use task::{ResolvedTask, ShellKind, TaskContext};
87use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
88use unindent::Unindent as _;
89use util::{
90 TryFutureExt as _, assert_set_eq, maybe, path,
91 paths::{PathMatcher, PathStyle},
92 rel_path::{RelPath, rel_path},
93 test::{TempTree, marked_text_offsets},
94 uri,
95};
96use worktree::WorktreeModelHandle as _;
97
98#[gpui::test]
99async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
100 cx.executor().allow_parking();
101
102 let (tx, mut rx) = futures::channel::mpsc::unbounded();
103 let _thread = std::thread::spawn(move || {
104 #[cfg(not(target_os = "windows"))]
105 std::fs::metadata("/tmp").unwrap();
106 #[cfg(target_os = "windows")]
107 std::fs::metadata("C:/Windows").unwrap();
108 std::thread::sleep(Duration::from_millis(1000));
109 tx.unbounded_send(1).unwrap();
110 });
111 rx.next().await.unwrap();
112}
113
114#[gpui::test]
115async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
116 cx.executor().allow_parking();
117
118 let io_task = smol::unblock(move || {
119 println!("sleeping on thread {:?}", std::thread::current().id());
120 std::thread::sleep(Duration::from_millis(10));
121 1
122 });
123
124 let task = cx.foreground_executor().spawn(async move {
125 io_task.await;
126 });
127
128 task.await;
129}
130
131#[gpui::test]
132async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
133 cx: &mut gpui::TestAppContext,
134) {
135 init_test(cx);
136
137 let fs = FakeFs::new(cx.executor());
138 fs.insert_tree(
139 path!("/root"),
140 json!({
141 "dir-project": {
142 "src": {
143 "main.rs": "fn main() {}"
144 }
145 },
146 "single-file.rs": "fn helper() {}"
147 }),
148 )
149 .await;
150
151 let project = Project::test(
152 fs,
153 [
154 Path::new(path!("/root/single-file.rs")),
155 Path::new(path!("/root/dir-project")),
156 ],
157 cx,
158 )
159 .await;
160
161 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
162 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
163
164 assert_eq!(
165 ordered_paths,
166 vec![
167 PathBuf::from(path!("/root/dir-project")),
168 PathBuf::from(path!("/root")),
169 ]
170 );
171}
172
173#[gpui::test]
174async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
175 cx: &mut gpui::TestAppContext,
176) {
177 init_test(cx);
178
179 let fs = FakeFs::new(cx.executor());
180 let project = Project::test(fs, [], cx).await;
181
182 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
183 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
184
185 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
186}
187
188// NOTE:
189// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
190// we assume that they are not supported out of the box.
191#[cfg(not(windows))]
192#[gpui::test]
193async fn test_symlinks(cx: &mut gpui::TestAppContext) {
194 init_test(cx);
195 cx.executor().allow_parking();
196
197 let dir = TempTree::new(json!({
198 "root": {
199 "apple": "",
200 "banana": {
201 "carrot": {
202 "date": "",
203 "endive": "",
204 }
205 },
206 "fennel": {
207 "grape": "",
208 }
209 }
210 }));
211
212 let root_link_path = dir.path().join("root_link");
213 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
214 os::unix::fs::symlink(
215 dir.path().join("root/fennel"),
216 dir.path().join("root/finnochio"),
217 )
218 .unwrap();
219
220 let project = Project::test(
221 Arc::new(RealFs::new(None, cx.executor())),
222 [root_link_path.as_ref()],
223 cx,
224 )
225 .await;
226
227 project.update(cx, |project, cx| {
228 let tree = project.worktrees(cx).next().unwrap().read(cx);
229 assert_eq!(tree.file_count(), 5);
230 assert_eq!(
231 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
232 tree.entry_for_path(rel_path("finnochio/grape"))
233 .unwrap()
234 .inode
235 );
236 });
237}
238
239#[gpui::test]
240async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
241 init_test(cx);
242
243 let dir = TempTree::new(json!({
244 ".editorconfig": r#"
245 root = true
246 [*.rs]
247 indent_style = tab
248 indent_size = 3
249 end_of_line = lf
250 insert_final_newline = true
251 trim_trailing_whitespace = true
252 max_line_length = 120
253 [*.js]
254 tab_width = 10
255 max_line_length = off
256 "#,
257 ".zed": {
258 "settings.json": r#"{
259 "tab_size": 8,
260 "hard_tabs": false,
261 "ensure_final_newline_on_save": false,
262 "remove_trailing_whitespace_on_save": false,
263 "preferred_line_length": 64,
264 "soft_wrap": "editor_width",
265 }"#,
266 },
267 "a.rs": "fn a() {\n A\n}",
268 "b": {
269 ".editorconfig": r#"
270 [*.rs]
271 indent_size = 2
272 max_line_length = off,
273 "#,
274 "b.rs": "fn b() {\n B\n}",
275 },
276 "c.js": "def c\n C\nend",
277 "d": {
278 ".editorconfig": r#"
279 [*.rs]
280 indent_size = 1
281 "#,
282 "d.rs": "fn d() {\n D\n}",
283 },
284 "README.json": "tabs are better\n",
285 }));
286
287 let path = dir.path();
288 let fs = FakeFs::new(cx.executor());
289 fs.insert_tree_from_real_fs(path, path).await;
290 let project = Project::test(fs, [path], cx).await;
291
292 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
293 language_registry.add(js_lang());
294 language_registry.add(json_lang());
295 language_registry.add(rust_lang());
296
297 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
298
299 cx.executor().run_until_parked();
300
301 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
302 let buffer = project
303 .update(cx, |project, cx| {
304 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
305 })
306 .await
307 .unwrap();
308 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
309 };
310
311 let settings_a = settings_for("a.rs", cx).await;
312 let settings_b = settings_for("b/b.rs", cx).await;
313 let settings_c = settings_for("c.js", cx).await;
314 let settings_d = settings_for("d/d.rs", cx).await;
315 let settings_readme = settings_for("README.json", cx).await;
316 // .editorconfig overrides .zed/settings
317 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
318 assert_eq!(settings_a.hard_tabs, true);
319 assert_eq!(settings_a.ensure_final_newline_on_save, true);
320 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
321 assert_eq!(settings_a.preferred_line_length, 120);
322
323 // .editorconfig in b/ overrides .editorconfig in root
324 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
325
326 // .editorconfig in subdirectory overrides .editorconfig in root
327 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
328
329 // "indent_size" is not set, so "tab_width" is used
330 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
331
332 // When max_line_length is "off", default to .zed/settings.json
333 assert_eq!(settings_b.preferred_line_length, 64);
334 assert_eq!(settings_c.preferred_line_length, 64);
335
336 // README.md should not be affected by .editorconfig's globe "*.rs"
337 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
338}
339
340#[gpui::test]
341async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
342 init_test(cx);
343
344 let fs = FakeFs::new(cx.executor());
345 fs.insert_tree(
346 path!("/grandparent"),
347 json!({
348 ".editorconfig": "[*]\nindent_size = 4\n",
349 "parent": {
350 ".editorconfig": "[*.rs]\nindent_size = 2\n",
351 "worktree": {
352 ".editorconfig": "[*.md]\nindent_size = 3\n",
353 "main.rs": "fn main() {}",
354 "README.md": "# README",
355 "other.txt": "other content",
356 }
357 }
358 }),
359 )
360 .await;
361
362 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
363
364 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
365 language_registry.add(rust_lang());
366 language_registry.add(markdown_lang());
367
368 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
369
370 cx.executor().run_until_parked();
371 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
372 let buffer = project
373 .update(cx, |project, cx| {
374 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
375 })
376 .await
377 .unwrap();
378 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
379 };
380
381 let settings_rs = settings_for("main.rs", cx).await;
382 let settings_md = settings_for("README.md", cx).await;
383 let settings_txt = settings_for("other.txt", cx).await;
384
385 // main.rs gets indent_size = 2 from parent's external .editorconfig
386 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
387
388 // README.md gets indent_size = 3 from internal worktree .editorconfig
389 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
390
391 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
392 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
393}
394
395#[gpui::test]
396async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
397 init_test(cx);
398
399 let fs = FakeFs::new(cx.executor());
400 fs.insert_tree(
401 path!("/worktree"),
402 json!({
403 ".editorconfig": "[*]\nindent_size = 99\n",
404 "src": {
405 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
406 "file.rs": "fn main() {}",
407 }
408 }),
409 )
410 .await;
411
412 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
413
414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
415 language_registry.add(rust_lang());
416
417 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
418
419 cx.executor().run_until_parked();
420
421 let buffer = project
422 .update(cx, |project, cx| {
423 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
424 })
425 .await
426 .unwrap();
427 cx.update(|cx| {
428 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
429 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
430 });
431}
432
433#[gpui::test]
434async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
435 init_test(cx);
436
437 let fs = FakeFs::new(cx.executor());
438 fs.insert_tree(
439 path!("/parent"),
440 json!({
441 ".editorconfig": "[*]\nindent_size = 99\n",
442 "worktree": {
443 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
444 "file.rs": "fn main() {}",
445 }
446 }),
447 )
448 .await;
449
450 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
451
452 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
453 language_registry.add(rust_lang());
454
455 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
456
457 cx.executor().run_until_parked();
458
459 let buffer = project
460 .update(cx, |project, cx| {
461 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
462 })
463 .await
464 .unwrap();
465
466 cx.update(|cx| {
467 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
468
469 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
470 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
471 });
472}
473
474#[gpui::test]
475async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
476 init_test(cx);
477
478 let fs = FakeFs::new(cx.executor());
479 fs.insert_tree(
480 path!("/grandparent"),
481 json!({
482 ".editorconfig": "[*]\nindent_size = 99\n",
483 "parent": {
484 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
485 "worktree": {
486 "file.rs": "fn main() {}",
487 }
488 }
489 }),
490 )
491 .await;
492
493 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
494
495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
496 language_registry.add(rust_lang());
497
498 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
499
500 cx.executor().run_until_parked();
501
502 let buffer = project
503 .update(cx, |project, cx| {
504 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
505 })
506 .await
507 .unwrap();
508
509 cx.update(|cx| {
510 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
511
512 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
513 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
514 });
515}
516
517#[gpui::test]
518async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
519 init_test(cx);
520
521 let fs = FakeFs::new(cx.executor());
522 fs.insert_tree(
523 path!("/parent"),
524 json!({
525 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
526 "worktree_a": {
527 "file.rs": "fn a() {}",
528 ".editorconfig": "[*]\ninsert_final_newline = true\n",
529 },
530 "worktree_b": {
531 "file.rs": "fn b() {}",
532 ".editorconfig": "[*]\ninsert_final_newline = false\n",
533 }
534 }),
535 )
536 .await;
537
538 let project = Project::test(
539 fs,
540 [
541 path!("/parent/worktree_a").as_ref(),
542 path!("/parent/worktree_b").as_ref(),
543 ],
544 cx,
545 )
546 .await;
547
548 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
549 language_registry.add(rust_lang());
550
551 cx.executor().run_until_parked();
552
553 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
554 assert_eq!(worktrees.len(), 2);
555
556 for worktree in worktrees {
557 let buffer = project
558 .update(cx, |project, cx| {
559 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
560 })
561 .await
562 .unwrap();
563
564 cx.update(|cx| {
565 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
566
567 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
568 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
569 });
570 }
571}
572
573#[gpui::test]
574async fn test_external_editorconfig_not_loaded_without_internal_config(
575 cx: &mut gpui::TestAppContext,
576) {
577 init_test(cx);
578
579 let fs = FakeFs::new(cx.executor());
580 fs.insert_tree(
581 path!("/parent"),
582 json!({
583 ".editorconfig": "[*]\nindent_size = 99\n",
584 "worktree": {
585 "file.rs": "fn main() {}",
586 }
587 }),
588 )
589 .await;
590
591 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
592
593 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
594 language_registry.add(rust_lang());
595
596 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
597
598 cx.executor().run_until_parked();
599
600 let buffer = project
601 .update(cx, |project, cx| {
602 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
603 })
604 .await
605 .unwrap();
606
607 cx.update(|cx| {
608 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
609
610 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
611 // because without an internal .editorconfig, external configs are not loaded
612 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
613 });
614}
615
616#[gpui::test]
617async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
618 init_test(cx);
619
620 let fs = FakeFs::new(cx.executor());
621 fs.insert_tree(
622 path!("/parent"),
623 json!({
624 ".editorconfig": "[*]\nindent_size = 4\n",
625 "worktree": {
626 ".editorconfig": "[*]\n",
627 "file.rs": "fn main() {}",
628 }
629 }),
630 )
631 .await;
632
633 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
634
635 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
636 language_registry.add(rust_lang());
637
638 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
639
640 cx.executor().run_until_parked();
641
642 let buffer = project
643 .update(cx, |project, cx| {
644 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
645 })
646 .await
647 .unwrap();
648
649 cx.update(|cx| {
650 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
651
652 // Test initial settings: tab_size = 4 from parent's external .editorconfig
653 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
654 });
655
656 fs.atomic_write(
657 PathBuf::from(path!("/parent/.editorconfig")),
658 "[*]\nindent_size = 8\n".to_owned(),
659 )
660 .await
661 .unwrap();
662
663 cx.executor().run_until_parked();
664
665 let buffer = project
666 .update(cx, |project, cx| {
667 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
668 })
669 .await
670 .unwrap();
671
672 cx.update(|cx| {
673 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
674
675 // Test settings updated: tab_size = 8
676 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
677 });
678}
679
680#[gpui::test]
681async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
682 init_test(cx);
683
684 let fs = FakeFs::new(cx.executor());
685 fs.insert_tree(
686 path!("/parent"),
687 json!({
688 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
689 "existing_worktree": {
690 ".editorconfig": "[*]\n",
691 "file.rs": "fn a() {}",
692 },
693 "new_worktree": {
694 ".editorconfig": "[*]\n",
695 "file.rs": "fn b() {}",
696 }
697 }),
698 )
699 .await;
700
701 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
702
703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
704 language_registry.add(rust_lang());
705
706 cx.executor().run_until_parked();
707
708 let buffer = project
709 .update(cx, |project, cx| {
710 let id = project.worktrees(cx).next().unwrap().read(cx).id();
711 project.open_buffer((id, rel_path("file.rs")), cx)
712 })
713 .await
714 .unwrap();
715
716 cx.update(|cx| {
717 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
718
719 // Test existing worktree has tab_size = 7
720 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
721 });
722
723 let (new_worktree, _) = project
724 .update(cx, |project, cx| {
725 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
726 })
727 .await
728 .unwrap();
729
730 cx.executor().run_until_parked();
731
732 let buffer = project
733 .update(cx, |project, cx| {
734 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
735 })
736 .await
737 .unwrap();
738
739 cx.update(|cx| {
740 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
741
742 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
743 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
744 });
745}
746
747#[gpui::test]
748async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
749 init_test(cx);
750
751 let fs = FakeFs::new(cx.executor());
752 fs.insert_tree(
753 path!("/parent"),
754 json!({
755 ".editorconfig": "[*]\nindent_size = 6\n",
756 "worktree": {
757 ".editorconfig": "[*]\n",
758 "file.rs": "fn main() {}",
759 }
760 }),
761 )
762 .await;
763
764 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
765
766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
767 language_registry.add(rust_lang());
768
769 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
770 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
771
772 cx.executor().run_until_parked();
773
774 cx.update(|cx| {
775 let store = cx.global::<SettingsStore>();
776 let (worktree_ids, external_paths, watcher_paths) =
777 store.editorconfig_store.read(cx).test_state();
778
779 // Test external config is loaded
780 assert!(worktree_ids.contains(&worktree_id));
781 assert!(!external_paths.is_empty());
782 assert!(!watcher_paths.is_empty());
783 });
784
785 project.update(cx, |project, cx| {
786 project.remove_worktree(worktree_id, cx);
787 });
788
789 cx.executor().run_until_parked();
790
791 cx.update(|cx| {
792 let store = cx.global::<SettingsStore>();
793 let (worktree_ids, external_paths, watcher_paths) =
794 store.editorconfig_store.read(cx).test_state();
795
796 // Test worktree state, external configs, and watchers all removed
797 assert!(!worktree_ids.contains(&worktree_id));
798 assert!(external_paths.is_empty());
799 assert!(watcher_paths.is_empty());
800 });
801}
802
803#[gpui::test]
804async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
805 cx: &mut gpui::TestAppContext,
806) {
807 init_test(cx);
808
809 let fs = FakeFs::new(cx.executor());
810 fs.insert_tree(
811 path!("/parent"),
812 json!({
813 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
814 "worktree_a": {
815 ".editorconfig": "[*]\n",
816 "file.rs": "fn a() {}",
817 },
818 "worktree_b": {
819 ".editorconfig": "[*]\n",
820 "file.rs": "fn b() {}",
821 }
822 }),
823 )
824 .await;
825
826 let project = Project::test(
827 fs,
828 [
829 path!("/parent/worktree_a").as_ref(),
830 path!("/parent/worktree_b").as_ref(),
831 ],
832 cx,
833 )
834 .await;
835
836 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
837 language_registry.add(rust_lang());
838
839 cx.executor().run_until_parked();
840
841 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
842 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
843 assert_eq!(worktrees.len(), 2);
844
845 let worktree_a = &worktrees[0];
846 let worktree_b = &worktrees[1];
847 let worktree_a_id = worktree_a.read(cx).id();
848 let worktree_b_id = worktree_b.read(cx).id();
849 (worktree_a_id, worktree_b.clone(), worktree_b_id)
850 });
851
852 cx.update(|cx| {
853 let store = cx.global::<SettingsStore>();
854 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
855
856 // Test both worktrees have settings and share external config
857 assert!(worktree_ids.contains(&worktree_a_id));
858 assert!(worktree_ids.contains(&worktree_b_id));
859 assert_eq!(external_paths.len(), 1); // single shared external config
860 });
861
862 project.update(cx, |project, cx| {
863 project.remove_worktree(worktree_a_id, cx);
864 });
865
866 cx.executor().run_until_parked();
867
868 cx.update(|cx| {
869 let store = cx.global::<SettingsStore>();
870 let (worktree_ids, external_paths, watcher_paths) =
871 store.editorconfig_store.read(cx).test_state();
872
873 // Test worktree_a is gone but external config remains for worktree_b
874 assert!(!worktree_ids.contains(&worktree_a_id));
875 assert!(worktree_ids.contains(&worktree_b_id));
876 // External config should still exist because worktree_b uses it
877 assert_eq!(external_paths.len(), 1);
878 assert_eq!(watcher_paths.len(), 1);
879 });
880
881 let buffer = project
882 .update(cx, |project, cx| {
883 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
884 })
885 .await
886 .unwrap();
887
888 cx.update(|cx| {
889 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
890
891 // Test worktree_b still has correct settings
892 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
893 });
894}
895
896#[gpui::test]
897async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899 cx.update(|cx| {
900 GitHostingProviderRegistry::default_global(cx);
901 git_hosting_providers::init(cx);
902 });
903
904 let fs = FakeFs::new(cx.executor());
905 let str_path = path!("/dir");
906 let path = Path::new(str_path);
907
908 fs.insert_tree(
909 path!("/dir"),
910 json!({
911 ".zed": {
912 "settings.json": r#"{
913 "git_hosting_providers": [
914 {
915 "provider": "gitlab",
916 "base_url": "https://google.com",
917 "name": "foo"
918 }
919 ]
920 }"#
921 },
922 }),
923 )
924 .await;
925
926 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
927 let (_worktree, _) =
928 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
929 cx.executor().run_until_parked();
930
931 cx.update(|cx| {
932 let provider = GitHostingProviderRegistry::global(cx);
933 assert!(
934 provider
935 .list_hosting_providers()
936 .into_iter()
937 .any(|provider| provider.name() == "foo")
938 );
939 });
940
941 fs.atomic_write(
942 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
943 "{}".into(),
944 )
945 .await
946 .unwrap();
947
948 cx.run_until_parked();
949
950 cx.update(|cx| {
951 let provider = GitHostingProviderRegistry::global(cx);
952 assert!(
953 !provider
954 .list_hosting_providers()
955 .into_iter()
956 .any(|provider| provider.name() == "foo")
957 );
958 });
959}
960
961#[gpui::test]
962async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
963 init_test(cx);
964 TaskStore::init(None);
965
966 let fs = FakeFs::new(cx.executor());
967 fs.insert_tree(
968 path!("/dir"),
969 json!({
970 ".zed": {
971 "settings.json": r#"{ "tab_size": 8 }"#,
972 "tasks.json": r#"[{
973 "label": "cargo check all",
974 "command": "cargo",
975 "args": ["check", "--all"]
976 },]"#,
977 },
978 "a": {
979 "a.rs": "fn a() {\n A\n}"
980 },
981 "b": {
982 ".zed": {
983 "settings.json": r#"{ "tab_size": 2 }"#,
984 "tasks.json": r#"[{
985 "label": "cargo check",
986 "command": "cargo",
987 "args": ["check"]
988 },]"#,
989 },
990 "b.rs": "fn b() {\n B\n}"
991 }
992 }),
993 )
994 .await;
995
996 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
997 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
998
999 cx.executor().run_until_parked();
1000 let worktree_id = cx.update(|cx| {
1001 project.update(cx, |project, cx| {
1002 project.worktrees(cx).next().unwrap().read(cx).id()
1003 })
1004 });
1005
1006 let mut task_contexts = TaskContexts::default();
1007 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1008 let task_contexts = Arc::new(task_contexts);
1009
1010 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1011 id: worktree_id,
1012 directory_in_worktree: rel_path(".zed").into(),
1013 id_base: "local worktree tasks from directory \".zed\"".into(),
1014 };
1015
1016 let buffer_a = project
1017 .update(cx, |project, cx| {
1018 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1019 })
1020 .await
1021 .unwrap();
1022 let buffer_b = project
1023 .update(cx, |project, cx| {
1024 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1025 })
1026 .await
1027 .unwrap();
1028 cx.update(|cx| {
1029 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1030 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1031
1032 assert_eq!(settings_a.tab_size.get(), 8);
1033 assert_eq!(settings_b.tab_size.get(), 2);
1034 });
1035
1036 let all_tasks = cx
1037 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1038 .await
1039 .into_iter()
1040 .map(|(source_kind, task)| {
1041 let resolved = task.resolved;
1042 (
1043 source_kind,
1044 task.resolved_label,
1045 resolved.args,
1046 resolved.env,
1047 )
1048 })
1049 .collect::<Vec<_>>();
1050 assert_eq!(
1051 all_tasks,
1052 vec![
1053 (
1054 TaskSourceKind::Worktree {
1055 id: worktree_id,
1056 directory_in_worktree: rel_path("b/.zed").into(),
1057 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1058 },
1059 "cargo check".to_string(),
1060 vec!["check".to_string()],
1061 HashMap::default(),
1062 ),
1063 (
1064 topmost_local_task_source_kind.clone(),
1065 "cargo check all".to_string(),
1066 vec!["check".to_string(), "--all".to_string()],
1067 HashMap::default(),
1068 ),
1069 ]
1070 );
1071
1072 let (_, resolved_task) = cx
1073 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1074 .await
1075 .into_iter()
1076 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1077 .expect("should have one global task");
1078 project.update(cx, |project, cx| {
1079 let task_inventory = project
1080 .task_store()
1081 .read(cx)
1082 .task_inventory()
1083 .cloned()
1084 .unwrap();
1085 task_inventory.update(cx, |inventory, _| {
1086 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1087 inventory
1088 .update_file_based_tasks(
1089 TaskSettingsLocation::Global(tasks_file()),
1090 Some(
1091 &json!([{
1092 "label": "cargo check unstable",
1093 "command": "cargo",
1094 "args": [
1095 "check",
1096 "--all",
1097 "--all-targets"
1098 ],
1099 "env": {
1100 "RUSTFLAGS": "-Zunstable-options"
1101 }
1102 }])
1103 .to_string(),
1104 ),
1105 )
1106 .unwrap();
1107 });
1108 });
1109 cx.run_until_parked();
1110
1111 let all_tasks = cx
1112 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1113 .await
1114 .into_iter()
1115 .map(|(source_kind, task)| {
1116 let resolved = task.resolved;
1117 (
1118 source_kind,
1119 task.resolved_label,
1120 resolved.args,
1121 resolved.env,
1122 )
1123 })
1124 .collect::<Vec<_>>();
1125 assert_eq!(
1126 all_tasks,
1127 vec![
1128 (
1129 topmost_local_task_source_kind.clone(),
1130 "cargo check all".to_string(),
1131 vec!["check".to_string(), "--all".to_string()],
1132 HashMap::default(),
1133 ),
1134 (
1135 TaskSourceKind::Worktree {
1136 id: worktree_id,
1137 directory_in_worktree: rel_path("b/.zed").into(),
1138 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1139 },
1140 "cargo check".to_string(),
1141 vec!["check".to_string()],
1142 HashMap::default(),
1143 ),
1144 (
1145 TaskSourceKind::AbsPath {
1146 abs_path: paths::tasks_file().clone(),
1147 id_base: "global tasks.json".into(),
1148 },
1149 "cargo check unstable".to_string(),
1150 vec![
1151 "check".to_string(),
1152 "--all".to_string(),
1153 "--all-targets".to_string(),
1154 ],
1155 HashMap::from_iter(Some((
1156 "RUSTFLAGS".to_string(),
1157 "-Zunstable-options".to_string()
1158 ))),
1159 ),
1160 ]
1161 );
1162}
1163
1164#[gpui::test]
1165async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1166 init_test(cx);
1167 TaskStore::init(None);
1168
1169 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1170 // event is emitted before we havd a chance to setup the event subscription.
1171 let fs = FakeFs::new(cx.executor());
1172 fs.insert_tree(
1173 path!("/dir"),
1174 json!({
1175 ".zed": {
1176 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1177 },
1178 "file.rs": ""
1179 }),
1180 )
1181 .await;
1182
1183 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1184 let saw_toast = Rc::new(RefCell::new(false));
1185
1186 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1187 // later assert that the `Event::Toast` even is emitted.
1188 fs.save(
1189 path!("/dir/.zed/tasks.json").as_ref(),
1190 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1191 Default::default(),
1192 )
1193 .await
1194 .unwrap();
1195
1196 project.update(cx, |_, cx| {
1197 let saw_toast = saw_toast.clone();
1198
1199 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1200 Event::Toast {
1201 notification_id,
1202 message,
1203 link: Some(ToastLink { url, .. }),
1204 } => {
1205 assert!(notification_id.starts_with("local-tasks-"));
1206 assert!(message.contains("ZED_FOO"));
1207 assert_eq!(*url, "https://zed.dev/docs/tasks");
1208 *saw_toast.borrow_mut() = true;
1209 }
1210 _ => {}
1211 })
1212 .detach();
1213 });
1214
1215 cx.run_until_parked();
1216 assert!(
1217 *saw_toast.borrow(),
1218 "Expected `Event::Toast` was never emitted"
1219 );
1220}
1221
1222#[gpui::test]
1223async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1224 init_test(cx);
1225 TaskStore::init(None);
1226
1227 let fs = FakeFs::new(cx.executor());
1228 fs.insert_tree(
1229 path!("/dir"),
1230 json!({
1231 ".zed": {
1232 "tasks.json": r#"[{
1233 "label": "test worktree root",
1234 "command": "echo $ZED_WORKTREE_ROOT"
1235 }]"#,
1236 },
1237 "a": {
1238 "a.rs": "fn a() {\n A\n}"
1239 },
1240 }),
1241 )
1242 .await;
1243
1244 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1245 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1246
1247 cx.executor().run_until_parked();
1248 let worktree_id = cx.update(|cx| {
1249 project.update(cx, |project, cx| {
1250 project.worktrees(cx).next().unwrap().read(cx).id()
1251 })
1252 });
1253
1254 let active_non_worktree_item_tasks = cx
1255 .update(|cx| {
1256 get_all_tasks(
1257 &project,
1258 Arc::new(TaskContexts {
1259 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1260 active_worktree_context: None,
1261 other_worktree_contexts: Vec::new(),
1262 lsp_task_sources: HashMap::default(),
1263 latest_selection: None,
1264 }),
1265 cx,
1266 )
1267 })
1268 .await;
1269 assert!(
1270 active_non_worktree_item_tasks.is_empty(),
1271 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1272 );
1273
1274 let active_worktree_tasks = cx
1275 .update(|cx| {
1276 get_all_tasks(
1277 &project,
1278 Arc::new(TaskContexts {
1279 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1280 active_worktree_context: Some((worktree_id, {
1281 let mut worktree_context = TaskContext::default();
1282 worktree_context
1283 .task_variables
1284 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1285 worktree_context
1286 })),
1287 other_worktree_contexts: Vec::new(),
1288 lsp_task_sources: HashMap::default(),
1289 latest_selection: None,
1290 }),
1291 cx,
1292 )
1293 })
1294 .await;
1295 assert_eq!(
1296 active_worktree_tasks
1297 .into_iter()
1298 .map(|(source_kind, task)| {
1299 let resolved = task.resolved;
1300 (source_kind, resolved.command.unwrap())
1301 })
1302 .collect::<Vec<_>>(),
1303 vec![(
1304 TaskSourceKind::Worktree {
1305 id: worktree_id,
1306 directory_in_worktree: rel_path(".zed").into(),
1307 id_base: "local worktree tasks from directory \".zed\"".into(),
1308 },
1309 "echo /dir".to_string(),
1310 )]
1311 );
1312}
1313
1314#[gpui::test]
1315async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1316 cx: &mut gpui::TestAppContext,
1317) {
1318 pub(crate) struct PyprojectTomlManifestProvider;
1319
1320 impl ManifestProvider for PyprojectTomlManifestProvider {
1321 fn name(&self) -> ManifestName {
1322 SharedString::new_static("pyproject.toml").into()
1323 }
1324
1325 fn search(
1326 &self,
1327 ManifestQuery {
1328 path,
1329 depth,
1330 delegate,
1331 }: ManifestQuery,
1332 ) -> Option<Arc<RelPath>> {
1333 const WORKSPACE_LOCKFILES: &[&str] =
1334 &["uv.lock", "poetry.lock", "pdm.lock", "Pipfile.lock"];
1335
1336 let mut innermost_pyproject = None;
1337 let mut outermost_workspace_root = None;
1338
1339 for path in path.ancestors().take(depth) {
1340 let pyproject_path = path.join(rel_path("pyproject.toml"));
1341 if delegate.exists(&pyproject_path, Some(false)) {
1342 if innermost_pyproject.is_none() {
1343 innermost_pyproject = Some(Arc::from(path));
1344 }
1345
1346 let has_lockfile = WORKSPACE_LOCKFILES.iter().any(|lockfile| {
1347 let lockfile_path = path.join(rel_path(lockfile));
1348 delegate.exists(&lockfile_path, Some(false))
1349 });
1350 if has_lockfile {
1351 outermost_workspace_root = Some(Arc::from(path));
1352 }
1353 }
1354 }
1355
1356 outermost_workspace_root.or(innermost_pyproject)
1357 }
1358 }
1359
1360 init_test(cx);
1361 let fs = FakeFs::new(cx.executor());
1362
1363 fs.insert_tree(
1364 path!("/the-root"),
1365 json!({
1366 ".zed": {
1367 "settings.json": r#"
1368 {
1369 "languages": {
1370 "Python": {
1371 "language_servers": ["ty"]
1372 }
1373 }
1374 }"#
1375 },
1376 "project-a": {
1377 ".venv": {},
1378 "file.py": "",
1379 "pyproject.toml": ""
1380 },
1381 "project-b": {
1382 ".venv": {},
1383 "source_file.py":"",
1384 "another_file.py": "",
1385 "pyproject.toml": ""
1386 }
1387 }),
1388 )
1389 .await;
1390 cx.update(|cx| {
1391 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1392 });
1393
1394 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1396 let _fake_python_server = language_registry.register_fake_lsp(
1397 "Python",
1398 FakeLspAdapter {
1399 name: "ty",
1400 capabilities: lsp::ServerCapabilities {
1401 ..Default::default()
1402 },
1403 ..Default::default()
1404 },
1405 );
1406
1407 language_registry.add(python_lang(fs.clone()));
1408 let (first_buffer, _handle) = project
1409 .update(cx, |project, cx| {
1410 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1411 })
1412 .await
1413 .unwrap();
1414 cx.executor().run_until_parked();
1415 let servers = project.update(cx, |project, cx| {
1416 project.lsp_store().update(cx, |this, cx| {
1417 first_buffer.update(cx, |buffer, cx| {
1418 this.running_language_servers_for_local_buffer(buffer, cx)
1419 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1420 .collect::<Vec<_>>()
1421 })
1422 })
1423 });
1424 cx.executor().run_until_parked();
1425 assert_eq!(servers.len(), 1);
1426 let (adapter, server) = servers.into_iter().next().unwrap();
1427 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1428 assert_eq!(server.server_id(), LanguageServerId(0));
1429 // `workspace_folders` are set to the rooting point.
1430 assert_eq!(
1431 server.workspace_folders(),
1432 BTreeSet::from_iter(
1433 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1434 )
1435 );
1436
1437 let (second_project_buffer, _other_handle) = project
1438 .update(cx, |project, cx| {
1439 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1440 })
1441 .await
1442 .unwrap();
1443 cx.executor().run_until_parked();
1444 let servers = project.update(cx, |project, cx| {
1445 project.lsp_store().update(cx, |this, cx| {
1446 second_project_buffer.update(cx, |buffer, cx| {
1447 this.running_language_servers_for_local_buffer(buffer, cx)
1448 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1449 .collect::<Vec<_>>()
1450 })
1451 })
1452 });
1453 cx.executor().run_until_parked();
1454 assert_eq!(servers.len(), 1);
1455 let (adapter, server) = servers.into_iter().next().unwrap();
1456 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1457 // We're not using venvs at all here, so both folders should fall under the same root.
1458 assert_eq!(server.server_id(), LanguageServerId(0));
1459 // Now, let's select a different toolchain for one of subprojects.
1460
1461 let Toolchains {
1462 toolchains: available_toolchains_for_b,
1463 root_path,
1464 ..
1465 } = project
1466 .update(cx, |this, cx| {
1467 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1468 this.available_toolchains(
1469 ProjectPath {
1470 worktree_id,
1471 path: rel_path("project-b/source_file.py").into(),
1472 },
1473 LanguageName::new_static("Python"),
1474 cx,
1475 )
1476 })
1477 .await
1478 .expect("A toolchain to be discovered");
1479 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1480 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1481 let currently_active_toolchain = project
1482 .update(cx, |this, cx| {
1483 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1484 this.active_toolchain(
1485 ProjectPath {
1486 worktree_id,
1487 path: rel_path("project-b/source_file.py").into(),
1488 },
1489 LanguageName::new_static("Python"),
1490 cx,
1491 )
1492 })
1493 .await;
1494
1495 assert!(currently_active_toolchain.is_none());
1496 let _ = project
1497 .update(cx, |this, cx| {
1498 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1499 this.activate_toolchain(
1500 ProjectPath {
1501 worktree_id,
1502 path: root_path,
1503 },
1504 available_toolchains_for_b
1505 .toolchains
1506 .into_iter()
1507 .next()
1508 .unwrap(),
1509 cx,
1510 )
1511 })
1512 .await
1513 .unwrap();
1514 cx.run_until_parked();
1515 let servers = project.update(cx, |project, cx| {
1516 project.lsp_store().update(cx, |this, cx| {
1517 second_project_buffer.update(cx, |buffer, cx| {
1518 this.running_language_servers_for_local_buffer(buffer, cx)
1519 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1520 .collect::<Vec<_>>()
1521 })
1522 })
1523 });
1524 cx.executor().run_until_parked();
1525 assert_eq!(servers.len(), 1);
1526 let (adapter, server) = servers.into_iter().next().unwrap();
1527 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1528 // There's a new language server in town.
1529 assert_eq!(server.server_id(), LanguageServerId(1));
1530}
1531
1532#[gpui::test]
1533async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1534 init_test(cx);
1535
1536 let fs = FakeFs::new(cx.executor());
1537 fs.insert_tree(
1538 path!("/dir"),
1539 json!({
1540 "test.rs": "const A: i32 = 1;",
1541 "test2.rs": "",
1542 "Cargo.toml": "a = 1",
1543 "package.json": "{\"a\": 1}",
1544 }),
1545 )
1546 .await;
1547
1548 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1549 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1550
1551 let mut fake_rust_servers = language_registry.register_fake_lsp(
1552 "Rust",
1553 FakeLspAdapter {
1554 name: "the-rust-language-server",
1555 capabilities: lsp::ServerCapabilities {
1556 completion_provider: Some(lsp::CompletionOptions {
1557 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1558 ..Default::default()
1559 }),
1560 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1561 lsp::TextDocumentSyncOptions {
1562 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1563 ..Default::default()
1564 },
1565 )),
1566 ..Default::default()
1567 },
1568 ..Default::default()
1569 },
1570 );
1571 let mut fake_json_servers = language_registry.register_fake_lsp(
1572 "JSON",
1573 FakeLspAdapter {
1574 name: "the-json-language-server",
1575 capabilities: lsp::ServerCapabilities {
1576 completion_provider: Some(lsp::CompletionOptions {
1577 trigger_characters: Some(vec![":".to_string()]),
1578 ..Default::default()
1579 }),
1580 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1581 lsp::TextDocumentSyncOptions {
1582 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1583 ..Default::default()
1584 },
1585 )),
1586 ..Default::default()
1587 },
1588 ..Default::default()
1589 },
1590 );
1591
1592 // Open a buffer without an associated language server.
1593 let (toml_buffer, _handle) = project
1594 .update(cx, |project, cx| {
1595 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1596 })
1597 .await
1598 .unwrap();
1599
1600 // Open a buffer with an associated language server before the language for it has been loaded.
1601 let (rust_buffer, _handle2) = project
1602 .update(cx, |project, cx| {
1603 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1604 })
1605 .await
1606 .unwrap();
1607 rust_buffer.update(cx, |buffer, _| {
1608 assert_eq!(buffer.language().map(|l| l.name()), None);
1609 });
1610
1611 // Now we add the languages to the project, and ensure they get assigned to all
1612 // the relevant open buffers.
1613 language_registry.add(json_lang());
1614 language_registry.add(rust_lang());
1615 cx.executor().run_until_parked();
1616 rust_buffer.update(cx, |buffer, _| {
1617 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1618 });
1619
1620 // A server is started up, and it is notified about Rust files.
1621 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1622 assert_eq!(
1623 fake_rust_server
1624 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1625 .await
1626 .text_document,
1627 lsp::TextDocumentItem {
1628 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1629 version: 0,
1630 text: "const A: i32 = 1;".to_string(),
1631 language_id: "rust".to_string(),
1632 }
1633 );
1634
1635 // The buffer is configured based on the language server's capabilities.
1636 rust_buffer.update(cx, |buffer, _| {
1637 assert_eq!(
1638 buffer
1639 .completion_triggers()
1640 .iter()
1641 .cloned()
1642 .collect::<Vec<_>>(),
1643 &[".".to_string(), "::".to_string()]
1644 );
1645 });
1646 toml_buffer.update(cx, |buffer, _| {
1647 assert!(buffer.completion_triggers().is_empty());
1648 });
1649
1650 // Edit a buffer. The changes are reported to the language server.
1651 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1652 assert_eq!(
1653 fake_rust_server
1654 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1655 .await
1656 .text_document,
1657 lsp::VersionedTextDocumentIdentifier::new(
1658 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1659 1
1660 )
1661 );
1662
1663 // Open a third buffer with a different associated language server.
1664 let (json_buffer, _json_handle) = project
1665 .update(cx, |project, cx| {
1666 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1667 })
1668 .await
1669 .unwrap();
1670
1671 // A json language server is started up and is only notified about the json buffer.
1672 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1673 assert_eq!(
1674 fake_json_server
1675 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1676 .await
1677 .text_document,
1678 lsp::TextDocumentItem {
1679 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1680 version: 0,
1681 text: "{\"a\": 1}".to_string(),
1682 language_id: "json".to_string(),
1683 }
1684 );
1685
1686 // This buffer is configured based on the second language server's
1687 // capabilities.
1688 json_buffer.update(cx, |buffer, _| {
1689 assert_eq!(
1690 buffer
1691 .completion_triggers()
1692 .iter()
1693 .cloned()
1694 .collect::<Vec<_>>(),
1695 &[":".to_string()]
1696 );
1697 });
1698
1699 // When opening another buffer whose language server is already running,
1700 // it is also configured based on the existing language server's capabilities.
1701 let (rust_buffer2, _handle4) = project
1702 .update(cx, |project, cx| {
1703 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1704 })
1705 .await
1706 .unwrap();
1707 rust_buffer2.update(cx, |buffer, _| {
1708 assert_eq!(
1709 buffer
1710 .completion_triggers()
1711 .iter()
1712 .cloned()
1713 .collect::<Vec<_>>(),
1714 &[".".to_string(), "::".to_string()]
1715 );
1716 });
1717
1718 // Changes are reported only to servers matching the buffer's language.
1719 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1720 rust_buffer2.update(cx, |buffer, cx| {
1721 buffer.edit([(0..0, "let x = 1;")], None, cx)
1722 });
1723 assert_eq!(
1724 fake_rust_server
1725 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1726 .await
1727 .text_document,
1728 lsp::VersionedTextDocumentIdentifier::new(
1729 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1730 1
1731 )
1732 );
1733
1734 // Save notifications are reported to all servers.
1735 project
1736 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1737 .await
1738 .unwrap();
1739 assert_eq!(
1740 fake_rust_server
1741 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1742 .await
1743 .text_document,
1744 lsp::TextDocumentIdentifier::new(
1745 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1746 )
1747 );
1748 assert_eq!(
1749 fake_json_server
1750 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1751 .await
1752 .text_document,
1753 lsp::TextDocumentIdentifier::new(
1754 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1755 )
1756 );
1757
1758 // Renames are reported only to servers matching the buffer's language.
1759 fs.rename(
1760 Path::new(path!("/dir/test2.rs")),
1761 Path::new(path!("/dir/test3.rs")),
1762 Default::default(),
1763 )
1764 .await
1765 .unwrap();
1766 assert_eq!(
1767 fake_rust_server
1768 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1769 .await
1770 .text_document,
1771 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1772 );
1773 assert_eq!(
1774 fake_rust_server
1775 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1776 .await
1777 .text_document,
1778 lsp::TextDocumentItem {
1779 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1780 version: 0,
1781 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1782 language_id: "rust".to_string(),
1783 },
1784 );
1785
1786 rust_buffer2.update(cx, |buffer, cx| {
1787 buffer.update_diagnostics(
1788 LanguageServerId(0),
1789 DiagnosticSet::from_sorted_entries(
1790 vec![DiagnosticEntry {
1791 diagnostic: Default::default(),
1792 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1793 }],
1794 &buffer.snapshot(),
1795 ),
1796 cx,
1797 );
1798 assert_eq!(
1799 buffer
1800 .snapshot()
1801 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1802 .count(),
1803 1
1804 );
1805 });
1806
1807 // When the rename changes the extension of the file, the buffer gets closed on the old
1808 // language server and gets opened on the new one.
1809 fs.rename(
1810 Path::new(path!("/dir/test3.rs")),
1811 Path::new(path!("/dir/test3.json")),
1812 Default::default(),
1813 )
1814 .await
1815 .unwrap();
1816 assert_eq!(
1817 fake_rust_server
1818 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1819 .await
1820 .text_document,
1821 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1822 );
1823 assert_eq!(
1824 fake_json_server
1825 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1826 .await
1827 .text_document,
1828 lsp::TextDocumentItem {
1829 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1830 version: 0,
1831 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1832 language_id: "json".to_string(),
1833 },
1834 );
1835
1836 // We clear the diagnostics, since the language has changed.
1837 rust_buffer2.update(cx, |buffer, _| {
1838 assert_eq!(
1839 buffer
1840 .snapshot()
1841 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1842 .count(),
1843 0
1844 );
1845 });
1846
1847 // The renamed file's version resets after changing language server.
1848 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1849 assert_eq!(
1850 fake_json_server
1851 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1852 .await
1853 .text_document,
1854 lsp::VersionedTextDocumentIdentifier::new(
1855 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1856 1
1857 )
1858 );
1859
1860 // Restart language servers
1861 project.update(cx, |project, cx| {
1862 project.restart_language_servers_for_buffers(
1863 vec![rust_buffer.clone(), json_buffer.clone()],
1864 HashSet::default(),
1865 cx,
1866 );
1867 });
1868
1869 let mut rust_shutdown_requests = fake_rust_server
1870 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1871 let mut json_shutdown_requests = fake_json_server
1872 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1873 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1874
1875 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1876 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1877
1878 // Ensure rust document is reopened in new rust language server
1879 assert_eq!(
1880 fake_rust_server
1881 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1882 .await
1883 .text_document,
1884 lsp::TextDocumentItem {
1885 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1886 version: 0,
1887 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1888 language_id: "rust".to_string(),
1889 }
1890 );
1891
1892 // Ensure json documents are reopened in new json language server
1893 assert_set_eq!(
1894 [
1895 fake_json_server
1896 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1897 .await
1898 .text_document,
1899 fake_json_server
1900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1901 .await
1902 .text_document,
1903 ],
1904 [
1905 lsp::TextDocumentItem {
1906 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1907 version: 0,
1908 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1909 language_id: "json".to_string(),
1910 },
1911 lsp::TextDocumentItem {
1912 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1913 version: 0,
1914 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1915 language_id: "json".to_string(),
1916 }
1917 ]
1918 );
1919
1920 // Close notifications are reported only to servers matching the buffer's language.
1921 cx.update(|_| drop(_json_handle));
1922 let close_message = lsp::DidCloseTextDocumentParams {
1923 text_document: lsp::TextDocumentIdentifier::new(
1924 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1925 ),
1926 };
1927 assert_eq!(
1928 fake_json_server
1929 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1930 .await,
1931 close_message,
1932 );
1933}
1934
1935#[gpui::test]
1936async fn test_late_lsp_adapter_registration(cx: &mut gpui::TestAppContext) {
1937 init_test(cx);
1938
1939 let fs = FakeFs::new(cx.executor());
1940 fs.insert_tree(
1941 path!("/dir"),
1942 json!({
1943 "test.rs": "const A: i32 = 1;",
1944 }),
1945 )
1946 .await;
1947
1948 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1949 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1950
1951 // Add the language first so the buffer gets assigned a language.
1952 language_registry.add(rust_lang());
1953 cx.executor().run_until_parked();
1954
1955 // Open a buffer — it gets assigned the Rust language but there is no LSP adapter yet.
1956 let (rust_buffer, _handle) = project
1957 .update(cx, |project, cx| {
1958 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1959 })
1960 .await
1961 .unwrap();
1962
1963 rust_buffer.update(cx, |buffer, _| {
1964 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1965 });
1966
1967 // Now register the LSP adapter late (simulating an extension loading after startup).
1968 let mut fake_rust_servers = language_registry.register_fake_lsp(
1969 "Rust",
1970 FakeLspAdapter {
1971 name: "the-rust-language-server",
1972 capabilities: lsp::ServerCapabilities {
1973 completion_provider: Some(lsp::CompletionOptions {
1974 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1975 ..Default::default()
1976 }),
1977 ..Default::default()
1978 },
1979 ..Default::default()
1980 },
1981 );
1982 cx.executor().run_until_parked();
1983
1984 // The language server should start and receive a DidOpenTextDocument notification
1985 // for the already-open buffer.
1986 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1987 assert_eq!(
1988 fake_rust_server
1989 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1990 .await
1991 .text_document,
1992 lsp::TextDocumentItem {
1993 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1994 version: 0,
1995 text: "const A: i32 = 1;".to_string(),
1996 language_id: "rust".to_string(),
1997 }
1998 );
1999
2000 // The buffer should be configured with the language server's capabilities.
2001 rust_buffer.update(cx, |buffer, _| {
2002 assert_eq!(
2003 buffer
2004 .completion_triggers()
2005 .iter()
2006 .cloned()
2007 .collect::<Vec<_>>(),
2008 &[".".to_string(), "::".to_string()]
2009 );
2010 });
2011}
2012
2013#[gpui::test]
2014async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
2015 init_test(cx);
2016
2017 let settings_json_contents = json!({
2018 "languages": {
2019 "Rust": {
2020 "language_servers": ["my_fake_lsp", "lsp_on_path"]
2021 }
2022 },
2023 "lsp": {
2024 "my_fake_lsp": {
2025 "binary": {
2026 // file exists, so this is treated as a relative path
2027 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
2028 }
2029 },
2030 "lsp_on_path": {
2031 "binary": {
2032 // file doesn't exist, so it will fall back on PATH env var
2033 "path": path!("lsp_on_path.exe").to_string(),
2034 }
2035 }
2036 },
2037 });
2038
2039 let fs = FakeFs::new(cx.executor());
2040 fs.insert_tree(
2041 path!("/the-root"),
2042 json!({
2043 ".zed": {
2044 "settings.json": settings_json_contents.to_string(),
2045 },
2046 ".relative_path": {
2047 "to": {
2048 "my_fake_lsp.exe": "",
2049 },
2050 },
2051 "src": {
2052 "main.rs": "",
2053 }
2054 }),
2055 )
2056 .await;
2057
2058 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2059 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2060 language_registry.add(rust_lang());
2061
2062 let mut my_fake_lsp = language_registry.register_fake_lsp(
2063 "Rust",
2064 FakeLspAdapter {
2065 name: "my_fake_lsp",
2066 ..Default::default()
2067 },
2068 );
2069 let mut lsp_on_path = language_registry.register_fake_lsp(
2070 "Rust",
2071 FakeLspAdapter {
2072 name: "lsp_on_path",
2073 ..Default::default()
2074 },
2075 );
2076
2077 cx.run_until_parked();
2078
2079 // Start the language server by opening a buffer with a compatible file extension.
2080 project
2081 .update(cx, |project, cx| {
2082 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2083 })
2084 .await
2085 .unwrap();
2086
2087 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2088 assert_eq!(
2089 lsp_path.to_string_lossy(),
2090 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2091 );
2092
2093 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2094 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2095}
2096
2097#[gpui::test]
2098async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2099 init_test(cx);
2100
2101 let settings_json_contents = json!({
2102 "languages": {
2103 "Rust": {
2104 "language_servers": ["tilde_lsp"]
2105 }
2106 },
2107 "lsp": {
2108 "tilde_lsp": {
2109 "binary": {
2110 "path": "~/.local/bin/rust-analyzer",
2111 }
2112 }
2113 },
2114 });
2115
2116 let fs = FakeFs::new(cx.executor());
2117 fs.insert_tree(
2118 path!("/root"),
2119 json!({
2120 ".zed": {
2121 "settings.json": settings_json_contents.to_string(),
2122 },
2123 "src": {
2124 "main.rs": "fn main() {}",
2125 }
2126 }),
2127 )
2128 .await;
2129
2130 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2131 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2132 language_registry.add(rust_lang());
2133
2134 let mut tilde_lsp = language_registry.register_fake_lsp(
2135 "Rust",
2136 FakeLspAdapter {
2137 name: "tilde_lsp",
2138 ..Default::default()
2139 },
2140 );
2141 cx.run_until_parked();
2142
2143 project
2144 .update(cx, |project, cx| {
2145 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2146 })
2147 .await
2148 .unwrap();
2149
2150 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2151 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2152 assert_eq!(
2153 lsp_path, expected_path,
2154 "Tilde path should expand to home directory"
2155 );
2156}
2157
2158#[gpui::test]
2159async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2160 cx: &mut gpui::TestAppContext,
2161) {
2162 init_test(cx);
2163
2164 let fs = FakeFs::new(cx.executor());
2165 fs.insert_tree(
2166 path!("/the-root"),
2167 json!({
2168 "Cargo.lock": "",
2169 "src": {
2170 "a.rs": "",
2171 }
2172 }),
2173 )
2174 .await;
2175
2176 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2177 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2178 (project.languages().clone(), project.lsp_store())
2179 });
2180 language_registry.add(rust_lang());
2181 let mut fake_servers = language_registry.register_fake_lsp(
2182 "Rust",
2183 FakeLspAdapter {
2184 name: "the-language-server",
2185 ..Default::default()
2186 },
2187 );
2188
2189 cx.executor().run_until_parked();
2190
2191 project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2194 })
2195 .await
2196 .unwrap();
2197
2198 let fake_server = fake_servers.next().await.unwrap();
2199 cx.executor().run_until_parked();
2200
2201 let file_changes = Arc::new(Mutex::new(Vec::new()));
2202 fake_server
2203 .request::<lsp::request::RegisterCapability>(
2204 lsp::RegistrationParams {
2205 registrations: vec![lsp::Registration {
2206 id: Default::default(),
2207 method: "workspace/didChangeWatchedFiles".to_string(),
2208 register_options: serde_json::to_value(
2209 lsp::DidChangeWatchedFilesRegistrationOptions {
2210 watchers: vec![lsp::FileSystemWatcher {
2211 glob_pattern: lsp::GlobPattern::String(
2212 path!("/the-root/Cargo.lock").to_string(),
2213 ),
2214 kind: None,
2215 }],
2216 },
2217 )
2218 .ok(),
2219 }],
2220 },
2221 DEFAULT_LSP_REQUEST_TIMEOUT,
2222 )
2223 .await
2224 .into_response()
2225 .unwrap();
2226 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2227 let file_changes = file_changes.clone();
2228 move |params, _| {
2229 let mut file_changes = file_changes.lock();
2230 file_changes.extend(params.changes);
2231 }
2232 });
2233
2234 cx.executor().run_until_parked();
2235 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2236
2237 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2238 cx.executor().run_until_parked();
2239
2240 assert_eq!(
2241 &*file_changes.lock(),
2242 &[lsp::FileEvent {
2243 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2244 typ: lsp::FileChangeType::CHANGED,
2245 }]
2246 );
2247}
2248
2249#[gpui::test]
2250async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2251 init_test(cx);
2252
2253 let fs = FakeFs::new(cx.executor());
2254 fs.insert_tree(
2255 path!("/the-root"),
2256 json!({
2257 ".gitignore": "target\n",
2258 "Cargo.lock": "",
2259 "src": {
2260 "a.rs": "",
2261 "b.rs": "",
2262 },
2263 "target": {
2264 "x": {
2265 "out": {
2266 "x.rs": ""
2267 }
2268 },
2269 "y": {
2270 "out": {
2271 "y.rs": "",
2272 }
2273 },
2274 "z": {
2275 "out": {
2276 "z.rs": ""
2277 }
2278 }
2279 }
2280 }),
2281 )
2282 .await;
2283 fs.insert_tree(
2284 path!("/the-registry"),
2285 json!({
2286 "dep1": {
2287 "src": {
2288 "dep1.rs": "",
2289 }
2290 },
2291 "dep2": {
2292 "src": {
2293 "dep2.rs": "",
2294 }
2295 },
2296 }),
2297 )
2298 .await;
2299 fs.insert_tree(
2300 path!("/the/stdlib"),
2301 json!({
2302 "LICENSE": "",
2303 "src": {
2304 "string.rs": "",
2305 }
2306 }),
2307 )
2308 .await;
2309
2310 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2311 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2312 (project.languages().clone(), project.lsp_store())
2313 });
2314 language_registry.add(rust_lang());
2315 let mut fake_servers = language_registry.register_fake_lsp(
2316 "Rust",
2317 FakeLspAdapter {
2318 name: "the-language-server",
2319 ..Default::default()
2320 },
2321 );
2322
2323 cx.executor().run_until_parked();
2324
2325 // Start the language server by opening a buffer with a compatible file extension.
2326 project
2327 .update(cx, |project, cx| {
2328 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2329 })
2330 .await
2331 .unwrap();
2332
2333 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2334 project.update(cx, |project, cx| {
2335 let worktree = project.worktrees(cx).next().unwrap();
2336 assert_eq!(
2337 worktree
2338 .read(cx)
2339 .snapshot()
2340 .entries(true, 0)
2341 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2342 .collect::<Vec<_>>(),
2343 &[
2344 ("", false),
2345 (".gitignore", false),
2346 ("Cargo.lock", false),
2347 ("src", false),
2348 ("src/a.rs", false),
2349 ("src/b.rs", false),
2350 ("target", true),
2351 ]
2352 );
2353 });
2354
2355 let prev_read_dir_count = fs.read_dir_call_count();
2356
2357 let fake_server = fake_servers.next().await.unwrap();
2358 cx.executor().run_until_parked();
2359 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2360 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2361 id
2362 });
2363
2364 // Simulate jumping to a definition in a dependency outside of the worktree.
2365 let _out_of_worktree_buffer = project
2366 .update(cx, |project, cx| {
2367 project.open_local_buffer_via_lsp(
2368 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2369 server_id,
2370 cx,
2371 )
2372 })
2373 .await
2374 .unwrap();
2375
2376 // Keep track of the FS events reported to the language server.
2377 let file_changes = Arc::new(Mutex::new(Vec::new()));
2378 fake_server
2379 .request::<lsp::request::RegisterCapability>(
2380 lsp::RegistrationParams {
2381 registrations: vec![lsp::Registration {
2382 id: Default::default(),
2383 method: "workspace/didChangeWatchedFiles".to_string(),
2384 register_options: serde_json::to_value(
2385 lsp::DidChangeWatchedFilesRegistrationOptions {
2386 watchers: vec![
2387 lsp::FileSystemWatcher {
2388 glob_pattern: lsp::GlobPattern::String(
2389 path!("/the-root/Cargo.toml").to_string(),
2390 ),
2391 kind: None,
2392 },
2393 lsp::FileSystemWatcher {
2394 glob_pattern: lsp::GlobPattern::String(
2395 path!("/the-root/src/*.{rs,c}").to_string(),
2396 ),
2397 kind: None,
2398 },
2399 lsp::FileSystemWatcher {
2400 glob_pattern: lsp::GlobPattern::String(
2401 path!("/the-root/target/y/**/*.rs").to_string(),
2402 ),
2403 kind: None,
2404 },
2405 lsp::FileSystemWatcher {
2406 glob_pattern: lsp::GlobPattern::String(
2407 path!("/the/stdlib/src/**/*.rs").to_string(),
2408 ),
2409 kind: None,
2410 },
2411 lsp::FileSystemWatcher {
2412 glob_pattern: lsp::GlobPattern::String(
2413 path!("**/Cargo.lock").to_string(),
2414 ),
2415 kind: None,
2416 },
2417 ],
2418 },
2419 )
2420 .ok(),
2421 }],
2422 },
2423 DEFAULT_LSP_REQUEST_TIMEOUT,
2424 )
2425 .await
2426 .into_response()
2427 .unwrap();
2428 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2429 let file_changes = file_changes.clone();
2430 move |params, _| {
2431 let mut file_changes = file_changes.lock();
2432 file_changes.extend(params.changes);
2433 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2434 }
2435 });
2436
2437 cx.executor().run_until_parked();
2438 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2439 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2440
2441 let mut new_watched_paths = fs.watched_paths();
2442 new_watched_paths.retain(|path| {
2443 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2444 });
2445 assert_eq!(
2446 &new_watched_paths,
2447 &[
2448 Path::new(path!("/the-root")),
2449 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2450 Path::new(path!("/the/stdlib/src"))
2451 ]
2452 );
2453
2454 // Now the language server has asked us to watch an ignored directory path,
2455 // so we recursively load it.
2456 project.update(cx, |project, cx| {
2457 let worktree = project.visible_worktrees(cx).next().unwrap();
2458 assert_eq!(
2459 worktree
2460 .read(cx)
2461 .snapshot()
2462 .entries(true, 0)
2463 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2464 .collect::<Vec<_>>(),
2465 &[
2466 ("", false),
2467 (".gitignore", false),
2468 ("Cargo.lock", false),
2469 ("src", false),
2470 ("src/a.rs", false),
2471 ("src/b.rs", false),
2472 ("target", true),
2473 ("target/x", true),
2474 ("target/y", true),
2475 ("target/y/out", true),
2476 ("target/y/out/y.rs", true),
2477 ("target/z", true),
2478 ]
2479 );
2480 });
2481
2482 // Perform some file system mutations, two of which match the watched patterns,
2483 // and one of which does not.
2484 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2485 .await
2486 .unwrap();
2487 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2488 .await
2489 .unwrap();
2490 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2491 .await
2492 .unwrap();
2493 fs.create_file(
2494 path!("/the-root/target/x/out/x2.rs").as_ref(),
2495 Default::default(),
2496 )
2497 .await
2498 .unwrap();
2499 fs.create_file(
2500 path!("/the-root/target/y/out/y2.rs").as_ref(),
2501 Default::default(),
2502 )
2503 .await
2504 .unwrap();
2505 fs.save(
2506 path!("/the-root/Cargo.lock").as_ref(),
2507 &"".into(),
2508 Default::default(),
2509 )
2510 .await
2511 .unwrap();
2512 fs.save(
2513 path!("/the-stdlib/LICENSE").as_ref(),
2514 &"".into(),
2515 Default::default(),
2516 )
2517 .await
2518 .unwrap();
2519 fs.save(
2520 path!("/the/stdlib/src/string.rs").as_ref(),
2521 &"".into(),
2522 Default::default(),
2523 )
2524 .await
2525 .unwrap();
2526
2527 // The language server receives events for the FS mutations that match its watch patterns.
2528 cx.executor().run_until_parked();
2529 assert_eq!(
2530 &*file_changes.lock(),
2531 &[
2532 lsp::FileEvent {
2533 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2534 typ: lsp::FileChangeType::CHANGED,
2535 },
2536 lsp::FileEvent {
2537 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2538 typ: lsp::FileChangeType::DELETED,
2539 },
2540 lsp::FileEvent {
2541 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2542 typ: lsp::FileChangeType::CREATED,
2543 },
2544 lsp::FileEvent {
2545 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2546 typ: lsp::FileChangeType::CREATED,
2547 },
2548 lsp::FileEvent {
2549 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2550 typ: lsp::FileChangeType::CHANGED,
2551 },
2552 ]
2553 );
2554}
2555
2556#[gpui::test]
2557async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2558 init_test(cx);
2559
2560 let fs = FakeFs::new(cx.executor());
2561 fs.insert_tree(
2562 path!("/dir"),
2563 json!({
2564 "a.rs": "let a = 1;",
2565 "b.rs": "let b = 2;"
2566 }),
2567 )
2568 .await;
2569
2570 let project = Project::test(
2571 fs,
2572 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2573 cx,
2574 )
2575 .await;
2576 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2577
2578 let buffer_a = project
2579 .update(cx, |project, cx| {
2580 project.open_local_buffer(path!("/dir/a.rs"), cx)
2581 })
2582 .await
2583 .unwrap();
2584 let buffer_b = project
2585 .update(cx, |project, cx| {
2586 project.open_local_buffer(path!("/dir/b.rs"), cx)
2587 })
2588 .await
2589 .unwrap();
2590
2591 lsp_store.update(cx, |lsp_store, cx| {
2592 lsp_store
2593 .update_diagnostics(
2594 LanguageServerId(0),
2595 lsp::PublishDiagnosticsParams {
2596 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2597 version: None,
2598 diagnostics: vec![lsp::Diagnostic {
2599 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2600 severity: Some(lsp::DiagnosticSeverity::ERROR),
2601 message: "error 1".to_string(),
2602 ..Default::default()
2603 }],
2604 },
2605 None,
2606 DiagnosticSourceKind::Pushed,
2607 &[],
2608 cx,
2609 )
2610 .unwrap();
2611 lsp_store
2612 .update_diagnostics(
2613 LanguageServerId(0),
2614 lsp::PublishDiagnosticsParams {
2615 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2616 version: None,
2617 diagnostics: vec![lsp::Diagnostic {
2618 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2619 severity: Some(DiagnosticSeverity::WARNING),
2620 message: "error 2".to_string(),
2621 ..Default::default()
2622 }],
2623 },
2624 None,
2625 DiagnosticSourceKind::Pushed,
2626 &[],
2627 cx,
2628 )
2629 .unwrap();
2630 });
2631
2632 buffer_a.update(cx, |buffer, _| {
2633 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2634 assert_eq!(
2635 chunks
2636 .iter()
2637 .map(|(s, d)| (s.as_str(), *d))
2638 .collect::<Vec<_>>(),
2639 &[
2640 ("let ", None),
2641 ("a", Some(DiagnosticSeverity::ERROR)),
2642 (" = 1;", None),
2643 ]
2644 );
2645 });
2646 buffer_b.update(cx, |buffer, _| {
2647 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2648 assert_eq!(
2649 chunks
2650 .iter()
2651 .map(|(s, d)| (s.as_str(), *d))
2652 .collect::<Vec<_>>(),
2653 &[
2654 ("let ", None),
2655 ("b", Some(DiagnosticSeverity::WARNING)),
2656 (" = 2;", None),
2657 ]
2658 );
2659 });
2660}
2661
2662#[gpui::test]
2663async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2664 init_test(cx);
2665
2666 let fs = FakeFs::new(cx.executor());
2667 fs.insert_tree(
2668 path!("/root"),
2669 json!({
2670 "dir": {
2671 ".git": {
2672 "HEAD": "ref: refs/heads/main",
2673 },
2674 ".gitignore": "b.rs",
2675 "a.rs": "let a = 1;",
2676 "b.rs": "let b = 2;",
2677 },
2678 "other.rs": "let b = c;"
2679 }),
2680 )
2681 .await;
2682
2683 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2684 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2685 let (worktree, _) = project
2686 .update(cx, |project, cx| {
2687 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2688 })
2689 .await
2690 .unwrap();
2691 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2692
2693 let (worktree, _) = project
2694 .update(cx, |project, cx| {
2695 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2696 })
2697 .await
2698 .unwrap();
2699 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2700
2701 let server_id = LanguageServerId(0);
2702 lsp_store.update(cx, |lsp_store, cx| {
2703 lsp_store
2704 .update_diagnostics(
2705 server_id,
2706 lsp::PublishDiagnosticsParams {
2707 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2708 version: None,
2709 diagnostics: vec![lsp::Diagnostic {
2710 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2711 severity: Some(lsp::DiagnosticSeverity::ERROR),
2712 message: "unused variable 'b'".to_string(),
2713 ..Default::default()
2714 }],
2715 },
2716 None,
2717 DiagnosticSourceKind::Pushed,
2718 &[],
2719 cx,
2720 )
2721 .unwrap();
2722 lsp_store
2723 .update_diagnostics(
2724 server_id,
2725 lsp::PublishDiagnosticsParams {
2726 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2727 version: None,
2728 diagnostics: vec![lsp::Diagnostic {
2729 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2730 severity: Some(lsp::DiagnosticSeverity::ERROR),
2731 message: "unknown variable 'c'".to_string(),
2732 ..Default::default()
2733 }],
2734 },
2735 None,
2736 DiagnosticSourceKind::Pushed,
2737 &[],
2738 cx,
2739 )
2740 .unwrap();
2741 });
2742
2743 let main_ignored_buffer = project
2744 .update(cx, |project, cx| {
2745 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2746 })
2747 .await
2748 .unwrap();
2749 main_ignored_buffer.update(cx, |buffer, _| {
2750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2751 assert_eq!(
2752 chunks
2753 .iter()
2754 .map(|(s, d)| (s.as_str(), *d))
2755 .collect::<Vec<_>>(),
2756 &[
2757 ("let ", None),
2758 ("b", Some(DiagnosticSeverity::ERROR)),
2759 (" = 2;", None),
2760 ],
2761 "Gigitnored buffers should still get in-buffer diagnostics",
2762 );
2763 });
2764 let other_buffer = project
2765 .update(cx, |project, cx| {
2766 project.open_buffer((other_worktree_id, rel_path("")), cx)
2767 })
2768 .await
2769 .unwrap();
2770 other_buffer.update(cx, |buffer, _| {
2771 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2772 assert_eq!(
2773 chunks
2774 .iter()
2775 .map(|(s, d)| (s.as_str(), *d))
2776 .collect::<Vec<_>>(),
2777 &[
2778 ("let b = ", None),
2779 ("c", Some(DiagnosticSeverity::ERROR)),
2780 (";", None),
2781 ],
2782 "Buffers from hidden projects should still get in-buffer diagnostics"
2783 );
2784 });
2785
2786 project.update(cx, |project, cx| {
2787 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2788 assert_eq!(
2789 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2790 vec![(
2791 ProjectPath {
2792 worktree_id: main_worktree_id,
2793 path: rel_path("b.rs").into(),
2794 },
2795 server_id,
2796 DiagnosticSummary {
2797 error_count: 1,
2798 warning_count: 0,
2799 }
2800 )]
2801 );
2802 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2803 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2804 });
2805}
2806
2807#[gpui::test]
2808async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2809 init_test(cx);
2810
2811 let progress_token = "the-progress-token";
2812
2813 let fs = FakeFs::new(cx.executor());
2814 fs.insert_tree(
2815 path!("/dir"),
2816 json!({
2817 "a.rs": "fn a() { A }",
2818 "b.rs": "const y: i32 = 1",
2819 }),
2820 )
2821 .await;
2822
2823 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2825
2826 language_registry.add(rust_lang());
2827 let mut fake_servers = language_registry.register_fake_lsp(
2828 "Rust",
2829 FakeLspAdapter {
2830 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2831 disk_based_diagnostics_sources: vec!["disk".into()],
2832 ..Default::default()
2833 },
2834 );
2835
2836 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2837
2838 // Cause worktree to start the fake language server
2839 let _ = project
2840 .update(cx, |project, cx| {
2841 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2842 })
2843 .await
2844 .unwrap();
2845
2846 let mut events = cx.events(&project);
2847
2848 let fake_server = fake_servers.next().await.unwrap();
2849 assert_eq!(
2850 events.next().await.unwrap(),
2851 Event::LanguageServerAdded(
2852 LanguageServerId(0),
2853 fake_server.server.name(),
2854 Some(worktree_id)
2855 ),
2856 );
2857
2858 fake_server
2859 .start_progress(format!("{}/0", progress_token))
2860 .await;
2861 assert_eq!(
2862 events.next().await.unwrap(),
2863 Event::DiskBasedDiagnosticsStarted {
2864 language_server_id: LanguageServerId(0),
2865 }
2866 );
2867
2868 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2869 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2870 version: None,
2871 diagnostics: vec![lsp::Diagnostic {
2872 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2873 severity: Some(lsp::DiagnosticSeverity::ERROR),
2874 message: "undefined variable 'A'".to_string(),
2875 ..Default::default()
2876 }],
2877 });
2878 assert_eq!(
2879 events.next().await.unwrap(),
2880 Event::DiagnosticsUpdated {
2881 language_server_id: LanguageServerId(0),
2882 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2883 }
2884 );
2885
2886 fake_server.end_progress(format!("{}/0", progress_token));
2887 assert_eq!(
2888 events.next().await.unwrap(),
2889 Event::DiskBasedDiagnosticsFinished {
2890 language_server_id: LanguageServerId(0)
2891 }
2892 );
2893
2894 let buffer = project
2895 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2896 .await
2897 .unwrap();
2898
2899 buffer.update(cx, |buffer, _| {
2900 let snapshot = buffer.snapshot();
2901 let diagnostics = snapshot
2902 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2903 .collect::<Vec<_>>();
2904 assert_eq!(
2905 diagnostics,
2906 &[DiagnosticEntryRef {
2907 range: Point::new(0, 9)..Point::new(0, 10),
2908 diagnostic: &Diagnostic {
2909 severity: lsp::DiagnosticSeverity::ERROR,
2910 message: "undefined variable 'A'".to_string(),
2911 group_id: 0,
2912 is_primary: true,
2913 source_kind: DiagnosticSourceKind::Pushed,
2914 ..Diagnostic::default()
2915 }
2916 }]
2917 )
2918 });
2919
2920 // Ensure publishing empty diagnostics twice only results in one update event.
2921 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2922 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2923 version: None,
2924 diagnostics: Default::default(),
2925 });
2926 assert_eq!(
2927 events.next().await.unwrap(),
2928 Event::DiagnosticsUpdated {
2929 language_server_id: LanguageServerId(0),
2930 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2931 }
2932 );
2933
2934 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2935 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2936 version: None,
2937 diagnostics: Default::default(),
2938 });
2939 cx.executor().run_until_parked();
2940 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2941}
2942
2943#[gpui::test]
2944async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2945 init_test(cx);
2946
2947 let progress_token = "the-progress-token";
2948
2949 let fs = FakeFs::new(cx.executor());
2950 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2951
2952 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2953
2954 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2955 language_registry.add(rust_lang());
2956 let mut fake_servers = language_registry.register_fake_lsp(
2957 "Rust",
2958 FakeLspAdapter {
2959 name: "the-language-server",
2960 disk_based_diagnostics_sources: vec!["disk".into()],
2961 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2962 ..FakeLspAdapter::default()
2963 },
2964 );
2965
2966 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2967
2968 let (buffer, _handle) = project
2969 .update(cx, |project, cx| {
2970 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2971 })
2972 .await
2973 .unwrap();
2974 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2975 // Simulate diagnostics starting to update.
2976 let fake_server = fake_servers.next().await.unwrap();
2977 cx.executor().run_until_parked();
2978 fake_server.start_progress(progress_token).await;
2979
2980 // Restart the server before the diagnostics finish updating.
2981 project.update(cx, |project, cx| {
2982 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2983 });
2984 let mut events = cx.events(&project);
2985
2986 // Simulate the newly started server sending more diagnostics.
2987 let fake_server = fake_servers.next().await.unwrap();
2988 cx.executor().run_until_parked();
2989 assert_eq!(
2990 events.next().await.unwrap(),
2991 Event::LanguageServerRemoved(LanguageServerId(0))
2992 );
2993 assert_eq!(
2994 events.next().await.unwrap(),
2995 Event::LanguageServerAdded(
2996 LanguageServerId(1),
2997 fake_server.server.name(),
2998 Some(worktree_id)
2999 )
3000 );
3001 fake_server.start_progress(progress_token).await;
3002 assert_eq!(
3003 events.next().await.unwrap(),
3004 Event::LanguageServerBufferRegistered {
3005 server_id: LanguageServerId(1),
3006 buffer_id,
3007 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
3008 name: Some(fake_server.server.name())
3009 }
3010 );
3011 assert_eq!(
3012 events.next().await.unwrap(),
3013 Event::DiskBasedDiagnosticsStarted {
3014 language_server_id: LanguageServerId(1)
3015 }
3016 );
3017 project.update(cx, |project, cx| {
3018 assert_eq!(
3019 project
3020 .language_servers_running_disk_based_diagnostics(cx)
3021 .collect::<Vec<_>>(),
3022 [LanguageServerId(1)]
3023 );
3024 });
3025
3026 // All diagnostics are considered done, despite the old server's diagnostic
3027 // task never completing.
3028 fake_server.end_progress(progress_token);
3029 assert_eq!(
3030 events.next().await.unwrap(),
3031 Event::DiskBasedDiagnosticsFinished {
3032 language_server_id: LanguageServerId(1)
3033 }
3034 );
3035 project.update(cx, |project, cx| {
3036 assert_eq!(
3037 project
3038 .language_servers_running_disk_based_diagnostics(cx)
3039 .collect::<Vec<_>>(),
3040 [] as [language::LanguageServerId; 0]
3041 );
3042 });
3043}
3044
3045#[gpui::test]
3046async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3051
3052 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3053
3054 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3055 language_registry.add(rust_lang());
3056 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3057
3058 let (buffer, _) = project
3059 .update(cx, |project, cx| {
3060 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3061 })
3062 .await
3063 .unwrap();
3064
3065 // Publish diagnostics
3066 let fake_server = fake_servers.next().await.unwrap();
3067 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3068 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3069 version: None,
3070 diagnostics: vec![lsp::Diagnostic {
3071 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3072 severity: Some(lsp::DiagnosticSeverity::ERROR),
3073 message: "the message".to_string(),
3074 ..Default::default()
3075 }],
3076 });
3077
3078 cx.executor().run_until_parked();
3079 buffer.update(cx, |buffer, _| {
3080 assert_eq!(
3081 buffer
3082 .snapshot()
3083 .diagnostics_in_range::<_, usize>(0..1, false)
3084 .map(|entry| entry.diagnostic.message.clone())
3085 .collect::<Vec<_>>(),
3086 ["the message".to_string()]
3087 );
3088 });
3089 project.update(cx, |project, cx| {
3090 assert_eq!(
3091 project.diagnostic_summary(false, cx),
3092 DiagnosticSummary {
3093 error_count: 1,
3094 warning_count: 0,
3095 }
3096 );
3097 });
3098
3099 project.update(cx, |project, cx| {
3100 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3101 });
3102
3103 // The diagnostics are cleared.
3104 cx.executor().run_until_parked();
3105 buffer.update(cx, |buffer, _| {
3106 assert_eq!(
3107 buffer
3108 .snapshot()
3109 .diagnostics_in_range::<_, usize>(0..1, false)
3110 .map(|entry| entry.diagnostic.message.clone())
3111 .collect::<Vec<_>>(),
3112 Vec::<String>::new(),
3113 );
3114 });
3115 project.update(cx, |project, cx| {
3116 assert_eq!(
3117 project.diagnostic_summary(false, cx),
3118 DiagnosticSummary {
3119 error_count: 0,
3120 warning_count: 0,
3121 }
3122 );
3123 });
3124}
3125
3126#[gpui::test]
3127async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3128 init_test(cx);
3129
3130 let fs = FakeFs::new(cx.executor());
3131 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3132
3133 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3135
3136 language_registry.add(rust_lang());
3137 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3138
3139 let (buffer, _handle) = project
3140 .update(cx, |project, cx| {
3141 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3142 })
3143 .await
3144 .unwrap();
3145
3146 // Before restarting the server, report diagnostics with an unknown buffer version.
3147 let fake_server = fake_servers.next().await.unwrap();
3148 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3149 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3150 version: Some(10000),
3151 diagnostics: Vec::new(),
3152 });
3153 cx.executor().run_until_parked();
3154 project.update(cx, |project, cx| {
3155 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3156 });
3157
3158 let mut fake_server = fake_servers.next().await.unwrap();
3159 let notification = fake_server
3160 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3161 .await
3162 .text_document;
3163 assert_eq!(notification.version, 0);
3164}
3165
3166#[gpui::test]
3167async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3168 init_test(cx);
3169
3170 let progress_token = "the-progress-token";
3171
3172 let fs = FakeFs::new(cx.executor());
3173 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3174
3175 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3176
3177 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3178 language_registry.add(rust_lang());
3179 let mut fake_servers = language_registry.register_fake_lsp(
3180 "Rust",
3181 FakeLspAdapter {
3182 name: "the-language-server",
3183 disk_based_diagnostics_sources: vec!["disk".into()],
3184 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3185 ..Default::default()
3186 },
3187 );
3188
3189 let (buffer, _handle) = project
3190 .update(cx, |project, cx| {
3191 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3192 })
3193 .await
3194 .unwrap();
3195
3196 // Simulate diagnostics starting to update.
3197 let mut fake_server = fake_servers.next().await.unwrap();
3198 fake_server
3199 .start_progress_with(
3200 "another-token",
3201 lsp::WorkDoneProgressBegin {
3202 cancellable: Some(false),
3203 ..Default::default()
3204 },
3205 DEFAULT_LSP_REQUEST_TIMEOUT,
3206 )
3207 .await;
3208 // Ensure progress notification is fully processed before starting the next one
3209 cx.executor().run_until_parked();
3210
3211 fake_server
3212 .start_progress_with(
3213 progress_token,
3214 lsp::WorkDoneProgressBegin {
3215 cancellable: Some(true),
3216 ..Default::default()
3217 },
3218 DEFAULT_LSP_REQUEST_TIMEOUT,
3219 )
3220 .await;
3221 // Ensure progress notification is fully processed before cancelling
3222 cx.executor().run_until_parked();
3223
3224 project.update(cx, |project, cx| {
3225 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3226 });
3227 cx.executor().run_until_parked();
3228
3229 let cancel_notification = fake_server
3230 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3231 .await;
3232 assert_eq!(
3233 cancel_notification.token,
3234 NumberOrString::String(progress_token.into())
3235 );
3236}
3237
3238#[gpui::test]
3239async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3240 init_test(cx);
3241
3242 let fs = FakeFs::new(cx.executor());
3243 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3244 .await;
3245
3246 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3247 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3248
3249 let mut fake_rust_servers = language_registry.register_fake_lsp(
3250 "Rust",
3251 FakeLspAdapter {
3252 name: "rust-lsp",
3253 ..Default::default()
3254 },
3255 );
3256 let mut fake_js_servers = language_registry.register_fake_lsp(
3257 "JavaScript",
3258 FakeLspAdapter {
3259 name: "js-lsp",
3260 ..Default::default()
3261 },
3262 );
3263 language_registry.add(rust_lang());
3264 language_registry.add(js_lang());
3265
3266 let _rs_buffer = project
3267 .update(cx, |project, cx| {
3268 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3269 })
3270 .await
3271 .unwrap();
3272 let _js_buffer = project
3273 .update(cx, |project, cx| {
3274 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3275 })
3276 .await
3277 .unwrap();
3278
3279 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3280 assert_eq!(
3281 fake_rust_server_1
3282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3283 .await
3284 .text_document
3285 .uri
3286 .as_str(),
3287 uri!("file:///dir/a.rs")
3288 );
3289
3290 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3291 assert_eq!(
3292 fake_js_server
3293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3294 .await
3295 .text_document
3296 .uri
3297 .as_str(),
3298 uri!("file:///dir/b.js")
3299 );
3300
3301 // Disable Rust language server, ensuring only that server gets stopped.
3302 cx.update(|cx| {
3303 SettingsStore::update_global(cx, |settings, cx| {
3304 settings.update_user_settings(cx, |settings| {
3305 settings.languages_mut().insert(
3306 "Rust".into(),
3307 LanguageSettingsContent {
3308 enable_language_server: Some(false),
3309 ..Default::default()
3310 },
3311 );
3312 });
3313 })
3314 });
3315 fake_rust_server_1
3316 .receive_notification::<lsp::notification::Exit>()
3317 .await;
3318
3319 // Enable Rust and disable JavaScript language servers, ensuring that the
3320 // former gets started again and that the latter stops.
3321 cx.update(|cx| {
3322 SettingsStore::update_global(cx, |settings, cx| {
3323 settings.update_user_settings(cx, |settings| {
3324 settings.languages_mut().insert(
3325 "Rust".into(),
3326 LanguageSettingsContent {
3327 enable_language_server: Some(true),
3328 ..Default::default()
3329 },
3330 );
3331 settings.languages_mut().insert(
3332 "JavaScript".into(),
3333 LanguageSettingsContent {
3334 enable_language_server: Some(false),
3335 ..Default::default()
3336 },
3337 );
3338 });
3339 })
3340 });
3341 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3342 assert_eq!(
3343 fake_rust_server_2
3344 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3345 .await
3346 .text_document
3347 .uri
3348 .as_str(),
3349 uri!("file:///dir/a.rs")
3350 );
3351 fake_js_server
3352 .receive_notification::<lsp::notification::Exit>()
3353 .await;
3354}
3355
3356#[gpui::test(iterations = 3)]
3357async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3358 init_test(cx);
3359
3360 let text = "
3361 fn a() { A }
3362 fn b() { BB }
3363 fn c() { CCC }
3364 "
3365 .unindent();
3366
3367 let fs = FakeFs::new(cx.executor());
3368 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3369
3370 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3372
3373 language_registry.add(rust_lang());
3374 let mut fake_servers = language_registry.register_fake_lsp(
3375 "Rust",
3376 FakeLspAdapter {
3377 disk_based_diagnostics_sources: vec!["disk".into()],
3378 ..Default::default()
3379 },
3380 );
3381
3382 let buffer = project
3383 .update(cx, |project, cx| {
3384 project.open_local_buffer(path!("/dir/a.rs"), cx)
3385 })
3386 .await
3387 .unwrap();
3388
3389 let _handle = project.update(cx, |project, cx| {
3390 project.register_buffer_with_language_servers(&buffer, cx)
3391 });
3392
3393 let mut fake_server = fake_servers.next().await.unwrap();
3394 let open_notification = fake_server
3395 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3396 .await;
3397
3398 // Edit the buffer, moving the content down
3399 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3400 let change_notification_1 = fake_server
3401 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3402 .await;
3403 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3404
3405 // Report some diagnostics for the initial version of the buffer
3406 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3407 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3408 version: Some(open_notification.text_document.version),
3409 diagnostics: vec![
3410 lsp::Diagnostic {
3411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3412 severity: Some(DiagnosticSeverity::ERROR),
3413 message: "undefined variable 'A'".to_string(),
3414 source: Some("disk".to_string()),
3415 ..Default::default()
3416 },
3417 lsp::Diagnostic {
3418 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3419 severity: Some(DiagnosticSeverity::ERROR),
3420 message: "undefined variable 'BB'".to_string(),
3421 source: Some("disk".to_string()),
3422 ..Default::default()
3423 },
3424 lsp::Diagnostic {
3425 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3426 severity: Some(DiagnosticSeverity::ERROR),
3427 source: Some("disk".to_string()),
3428 message: "undefined variable 'CCC'".to_string(),
3429 ..Default::default()
3430 },
3431 ],
3432 });
3433
3434 // The diagnostics have moved down since they were created.
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(
3438 buffer
3439 .snapshot()
3440 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3441 .collect::<Vec<_>>(),
3442 &[
3443 DiagnosticEntry {
3444 range: Point::new(3, 9)..Point::new(3, 11),
3445 diagnostic: Diagnostic {
3446 source: Some("disk".into()),
3447 severity: DiagnosticSeverity::ERROR,
3448 message: "undefined variable 'BB'".to_string(),
3449 is_disk_based: true,
3450 group_id: 1,
3451 is_primary: true,
3452 source_kind: DiagnosticSourceKind::Pushed,
3453 ..Diagnostic::default()
3454 },
3455 },
3456 DiagnosticEntry {
3457 range: Point::new(4, 9)..Point::new(4, 12),
3458 diagnostic: Diagnostic {
3459 source: Some("disk".into()),
3460 severity: DiagnosticSeverity::ERROR,
3461 message: "undefined variable 'CCC'".to_string(),
3462 is_disk_based: true,
3463 group_id: 2,
3464 is_primary: true,
3465 source_kind: DiagnosticSourceKind::Pushed,
3466 ..Diagnostic::default()
3467 }
3468 }
3469 ]
3470 );
3471 assert_eq!(
3472 chunks_with_diagnostics(buffer, 0..buffer.len()),
3473 [
3474 ("\n\nfn a() { ".to_string(), None),
3475 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3476 (" }\nfn b() { ".to_string(), None),
3477 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3478 (" }\nfn c() { ".to_string(), None),
3479 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3480 (" }\n".to_string(), None),
3481 ]
3482 );
3483 assert_eq!(
3484 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3485 [
3486 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3487 (" }\nfn c() { ".to_string(), None),
3488 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3489 ]
3490 );
3491 });
3492
3493 // Ensure overlapping diagnostics are highlighted correctly.
3494 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3495 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3496 version: Some(open_notification.text_document.version),
3497 diagnostics: vec![
3498 lsp::Diagnostic {
3499 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3500 severity: Some(DiagnosticSeverity::ERROR),
3501 message: "undefined variable 'A'".to_string(),
3502 source: Some("disk".to_string()),
3503 ..Default::default()
3504 },
3505 lsp::Diagnostic {
3506 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3507 severity: Some(DiagnosticSeverity::WARNING),
3508 message: "unreachable statement".to_string(),
3509 source: Some("disk".to_string()),
3510 ..Default::default()
3511 },
3512 ],
3513 });
3514
3515 cx.executor().run_until_parked();
3516 buffer.update(cx, |buffer, _| {
3517 assert_eq!(
3518 buffer
3519 .snapshot()
3520 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3521 .collect::<Vec<_>>(),
3522 &[
3523 DiagnosticEntry {
3524 range: Point::new(2, 9)..Point::new(2, 12),
3525 diagnostic: Diagnostic {
3526 source: Some("disk".into()),
3527 severity: DiagnosticSeverity::WARNING,
3528 message: "unreachable statement".to_string(),
3529 is_disk_based: true,
3530 group_id: 4,
3531 is_primary: true,
3532 source_kind: DiagnosticSourceKind::Pushed,
3533 ..Diagnostic::default()
3534 }
3535 },
3536 DiagnosticEntry {
3537 range: Point::new(2, 9)..Point::new(2, 10),
3538 diagnostic: Diagnostic {
3539 source: Some("disk".into()),
3540 severity: DiagnosticSeverity::ERROR,
3541 message: "undefined variable 'A'".to_string(),
3542 is_disk_based: true,
3543 group_id: 3,
3544 is_primary: true,
3545 source_kind: DiagnosticSourceKind::Pushed,
3546 ..Diagnostic::default()
3547 },
3548 }
3549 ]
3550 );
3551 assert_eq!(
3552 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3553 [
3554 ("fn a() { ".to_string(), None),
3555 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3556 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3557 ("\n".to_string(), None),
3558 ]
3559 );
3560 assert_eq!(
3561 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3562 [
3563 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3564 ("\n".to_string(), None),
3565 ]
3566 );
3567 });
3568
3569 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3570 // changes since the last save.
3571 buffer.update(cx, |buffer, cx| {
3572 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3573 buffer.edit(
3574 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3575 None,
3576 cx,
3577 );
3578 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3579 });
3580 let change_notification_2 = fake_server
3581 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3582 .await;
3583 assert!(
3584 change_notification_2.text_document.version > change_notification_1.text_document.version
3585 );
3586
3587 // Handle out-of-order diagnostics
3588 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3589 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3590 version: Some(change_notification_2.text_document.version),
3591 diagnostics: vec![
3592 lsp::Diagnostic {
3593 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3594 severity: Some(DiagnosticSeverity::ERROR),
3595 message: "undefined variable 'BB'".to_string(),
3596 source: Some("disk".to_string()),
3597 ..Default::default()
3598 },
3599 lsp::Diagnostic {
3600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3601 severity: Some(DiagnosticSeverity::WARNING),
3602 message: "undefined variable 'A'".to_string(),
3603 source: Some("disk".to_string()),
3604 ..Default::default()
3605 },
3606 ],
3607 });
3608
3609 cx.executor().run_until_parked();
3610 buffer.update(cx, |buffer, _| {
3611 assert_eq!(
3612 buffer
3613 .snapshot()
3614 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3615 .collect::<Vec<_>>(),
3616 &[
3617 DiagnosticEntry {
3618 range: Point::new(2, 21)..Point::new(2, 22),
3619 diagnostic: Diagnostic {
3620 source: Some("disk".into()),
3621 severity: DiagnosticSeverity::WARNING,
3622 message: "undefined variable 'A'".to_string(),
3623 is_disk_based: true,
3624 group_id: 6,
3625 is_primary: true,
3626 source_kind: DiagnosticSourceKind::Pushed,
3627 ..Diagnostic::default()
3628 }
3629 },
3630 DiagnosticEntry {
3631 range: Point::new(3, 9)..Point::new(3, 14),
3632 diagnostic: Diagnostic {
3633 source: Some("disk".into()),
3634 severity: DiagnosticSeverity::ERROR,
3635 message: "undefined variable 'BB'".to_string(),
3636 is_disk_based: true,
3637 group_id: 5,
3638 is_primary: true,
3639 source_kind: DiagnosticSourceKind::Pushed,
3640 ..Diagnostic::default()
3641 },
3642 }
3643 ]
3644 );
3645 });
3646}
3647
3648#[gpui::test]
3649async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3650 init_test(cx);
3651
3652 let text = concat!(
3653 "let one = ;\n", //
3654 "let two = \n",
3655 "let three = 3;\n",
3656 );
3657
3658 let fs = FakeFs::new(cx.executor());
3659 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3660
3661 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3662 let buffer = project
3663 .update(cx, |project, cx| {
3664 project.open_local_buffer(path!("/dir/a.rs"), cx)
3665 })
3666 .await
3667 .unwrap();
3668
3669 project.update(cx, |project, cx| {
3670 project.lsp_store().update(cx, |lsp_store, cx| {
3671 lsp_store
3672 .update_diagnostic_entries(
3673 LanguageServerId(0),
3674 PathBuf::from(path!("/dir/a.rs")),
3675 None,
3676 None,
3677 vec![
3678 DiagnosticEntry {
3679 range: Unclipped(PointUtf16::new(0, 10))
3680 ..Unclipped(PointUtf16::new(0, 10)),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::ERROR,
3683 message: "syntax error 1".to_string(),
3684 source_kind: DiagnosticSourceKind::Pushed,
3685 ..Diagnostic::default()
3686 },
3687 },
3688 DiagnosticEntry {
3689 range: Unclipped(PointUtf16::new(1, 10))
3690 ..Unclipped(PointUtf16::new(1, 10)),
3691 diagnostic: Diagnostic {
3692 severity: DiagnosticSeverity::ERROR,
3693 message: "syntax error 2".to_string(),
3694 source_kind: DiagnosticSourceKind::Pushed,
3695 ..Diagnostic::default()
3696 },
3697 },
3698 ],
3699 cx,
3700 )
3701 .unwrap();
3702 })
3703 });
3704
3705 // An empty range is extended forward to include the following character.
3706 // At the end of a line, an empty range is extended backward to include
3707 // the preceding character.
3708 buffer.update(cx, |buffer, _| {
3709 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3710 assert_eq!(
3711 chunks
3712 .iter()
3713 .map(|(s, d)| (s.as_str(), *d))
3714 .collect::<Vec<_>>(),
3715 &[
3716 ("let one = ", None),
3717 (";", Some(DiagnosticSeverity::ERROR)),
3718 ("\nlet two =", None),
3719 (" ", Some(DiagnosticSeverity::ERROR)),
3720 ("\nlet three = 3;\n", None)
3721 ]
3722 );
3723 });
3724}
3725
3726#[gpui::test]
3727async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3728 init_test(cx);
3729
3730 let fs = FakeFs::new(cx.executor());
3731 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3732 .await;
3733
3734 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3735 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3736
3737 lsp_store.update(cx, |lsp_store, cx| {
3738 lsp_store
3739 .update_diagnostic_entries(
3740 LanguageServerId(0),
3741 Path::new(path!("/dir/a.rs")).to_owned(),
3742 None,
3743 None,
3744 vec![DiagnosticEntry {
3745 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3746 diagnostic: Diagnostic {
3747 severity: DiagnosticSeverity::ERROR,
3748 is_primary: true,
3749 message: "syntax error a1".to_string(),
3750 source_kind: DiagnosticSourceKind::Pushed,
3751 ..Diagnostic::default()
3752 },
3753 }],
3754 cx,
3755 )
3756 .unwrap();
3757 lsp_store
3758 .update_diagnostic_entries(
3759 LanguageServerId(1),
3760 Path::new(path!("/dir/a.rs")).to_owned(),
3761 None,
3762 None,
3763 vec![DiagnosticEntry {
3764 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3765 diagnostic: Diagnostic {
3766 severity: DiagnosticSeverity::ERROR,
3767 is_primary: true,
3768 message: "syntax error b1".to_string(),
3769 source_kind: DiagnosticSourceKind::Pushed,
3770 ..Diagnostic::default()
3771 },
3772 }],
3773 cx,
3774 )
3775 .unwrap();
3776
3777 assert_eq!(
3778 lsp_store.diagnostic_summary(false, cx),
3779 DiagnosticSummary {
3780 error_count: 2,
3781 warning_count: 0,
3782 }
3783 );
3784 });
3785}
3786
3787#[gpui::test]
3788async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3789 cx: &mut gpui::TestAppContext,
3790) {
3791 init_test(cx);
3792
3793 let fs = FakeFs::new(cx.executor());
3794 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3795 .await;
3796
3797 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3798 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3799
3800 lsp_store.update(cx, |lsp_store, cx| {
3801 lsp_store
3802 .update_diagnostic_entries(
3803 LanguageServerId(0),
3804 Path::new(path!("/dir/a.rs")).to_owned(),
3805 None,
3806 None,
3807 vec![DiagnosticEntry {
3808 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3809 diagnostic: Diagnostic {
3810 severity: DiagnosticSeverity::ERROR,
3811 is_primary: true,
3812 message: "error in a".to_string(),
3813 source_kind: DiagnosticSourceKind::Pushed,
3814 ..Diagnostic::default()
3815 },
3816 }],
3817 cx,
3818 )
3819 .unwrap();
3820 lsp_store
3821 .update_diagnostic_entries(
3822 LanguageServerId(0),
3823 Path::new(path!("/dir/b.rs")).to_owned(),
3824 None,
3825 None,
3826 vec![DiagnosticEntry {
3827 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3828 diagnostic: Diagnostic {
3829 severity: DiagnosticSeverity::WARNING,
3830 is_primary: true,
3831 message: "warning in b".to_string(),
3832 source_kind: DiagnosticSourceKind::Pushed,
3833 ..Diagnostic::default()
3834 },
3835 }],
3836 cx,
3837 )
3838 .unwrap();
3839
3840 assert_eq!(
3841 lsp_store.diagnostic_summary(false, cx),
3842 DiagnosticSummary {
3843 error_count: 1,
3844 warning_count: 1,
3845 }
3846 );
3847 });
3848
3849 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3850 .await
3851 .unwrap();
3852 cx.executor().run_until_parked();
3853
3854 lsp_store.update(cx, |lsp_store, cx| {
3855 assert_eq!(
3856 lsp_store.diagnostic_summary(false, cx),
3857 DiagnosticSummary {
3858 error_count: 0,
3859 warning_count: 1,
3860 },
3861 );
3862 });
3863}
3864
3865#[gpui::test]
3866async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3867 init_test(cx);
3868
3869 let fs = FakeFs::new(cx.executor());
3870 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3871
3872 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3873
3874 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3875 language_registry.add(rust_lang());
3876 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3877
3878 let (buffer, _handle) = project
3879 .update(cx, |project, cx| {
3880 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3881 })
3882 .await
3883 .unwrap();
3884
3885 let fake_server = fake_servers.next().await.unwrap();
3886 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3887 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3888 version: None,
3889 diagnostics: vec![lsp::Diagnostic {
3890 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3891 severity: Some(lsp::DiagnosticSeverity::ERROR),
3892 message: "error before restart".to_string(),
3893 ..Default::default()
3894 }],
3895 });
3896 cx.executor().run_until_parked();
3897
3898 project.update(cx, |project, cx| {
3899 assert_eq!(
3900 project.diagnostic_summary(false, cx),
3901 DiagnosticSummary {
3902 error_count: 1,
3903 warning_count: 0,
3904 }
3905 );
3906 });
3907
3908 let mut events = cx.events(&project);
3909
3910 project.update(cx, |project, cx| {
3911 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3912 });
3913 cx.executor().run_until_parked();
3914
3915 let mut received_diagnostics_updated = false;
3916 while let Some(Some(event)) =
3917 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3918 {
3919 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3920 received_diagnostics_updated = true;
3921 }
3922 }
3923 assert!(
3924 received_diagnostics_updated,
3925 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3926 );
3927
3928 project.update(cx, |project, cx| {
3929 assert_eq!(
3930 project.diagnostic_summary(false, cx),
3931 DiagnosticSummary {
3932 error_count: 0,
3933 warning_count: 0,
3934 }
3935 );
3936 });
3937}
3938
3939#[gpui::test]
3940async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3941 init_test(cx);
3942
3943 let fs = FakeFs::new(cx.executor());
3944 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3945 .await;
3946
3947 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3948
3949 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3950 language_registry.add(rust_lang());
3951 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3952 let closure_pull_count = pull_count.clone();
3953 let mut fake_servers = language_registry.register_fake_lsp(
3954 "Rust",
3955 FakeLspAdapter {
3956 capabilities: lsp::ServerCapabilities {
3957 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3958 lsp::DiagnosticOptions {
3959 identifier: Some("test-reload".to_string()),
3960 inter_file_dependencies: true,
3961 workspace_diagnostics: false,
3962 work_done_progress_options: Default::default(),
3963 },
3964 )),
3965 ..lsp::ServerCapabilities::default()
3966 },
3967 initializer: Some(Box::new(move |fake_server| {
3968 let pull_count = closure_pull_count.clone();
3969 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3970 move |_, _| {
3971 let pull_count = pull_count.clone();
3972 async move {
3973 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3974 Ok(lsp::DocumentDiagnosticReportResult::Report(
3975 lsp::DocumentDiagnosticReport::Full(
3976 lsp::RelatedFullDocumentDiagnosticReport {
3977 related_documents: None,
3978 full_document_diagnostic_report:
3979 lsp::FullDocumentDiagnosticReport {
3980 result_id: None,
3981 items: Vec::new(),
3982 },
3983 },
3984 ),
3985 ))
3986 }
3987 },
3988 );
3989 })),
3990 ..FakeLspAdapter::default()
3991 },
3992 );
3993
3994 let (_buffer, _handle) = project
3995 .update(cx, |project, cx| {
3996 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3997 })
3998 .await
3999 .unwrap();
4000
4001 let fake_server = fake_servers.next().await.unwrap();
4002 cx.executor().run_until_parked();
4003
4004 // Publish initial diagnostics via the fake server.
4005 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4006 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4007 version: None,
4008 diagnostics: vec![lsp::Diagnostic {
4009 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
4010 severity: Some(lsp::DiagnosticSeverity::ERROR),
4011 message: "error in a".to_string(),
4012 ..Default::default()
4013 }],
4014 });
4015 cx.executor().run_until_parked();
4016
4017 project.update(cx, |project, cx| {
4018 assert_eq!(
4019 project.diagnostic_summary(false, cx),
4020 DiagnosticSummary {
4021 error_count: 1,
4022 warning_count: 0,
4023 }
4024 );
4025 });
4026
4027 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
4028
4029 // Change the file on disk. The FS event triggers buffer reload,
4030 // which in turn triggers pull_diagnostics_for_buffer.
4031 fs.save(
4032 path!("/dir/a.rs").as_ref(),
4033 &"fixed content".into(),
4034 LineEnding::Unix,
4035 )
4036 .await
4037 .unwrap();
4038 cx.executor().run_until_parked();
4039
4040 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
4041 assert!(
4042 pulls_after > pulls_before,
4043 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
4044 );
4045}
4046
4047#[gpui::test]
4048async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
4049 init_test(cx);
4050
4051 let text = "
4052 fn a() {
4053 f1();
4054 }
4055 fn b() {
4056 f2();
4057 }
4058 fn c() {
4059 f3();
4060 }
4061 "
4062 .unindent();
4063
4064 let fs = FakeFs::new(cx.executor());
4065 fs.insert_tree(
4066 path!("/dir"),
4067 json!({
4068 "a.rs": text.clone(),
4069 }),
4070 )
4071 .await;
4072
4073 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4074 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4075
4076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4077 language_registry.add(rust_lang());
4078 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4079
4080 let (buffer, _handle) = project
4081 .update(cx, |project, cx| {
4082 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
4083 })
4084 .await
4085 .unwrap();
4086
4087 let mut fake_server = fake_servers.next().await.unwrap();
4088 let lsp_document_version = fake_server
4089 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4090 .await
4091 .text_document
4092 .version;
4093
4094 // Simulate editing the buffer after the language server computes some edits.
4095 buffer.update(cx, |buffer, cx| {
4096 buffer.edit(
4097 [(
4098 Point::new(0, 0)..Point::new(0, 0),
4099 "// above first function\n",
4100 )],
4101 None,
4102 cx,
4103 );
4104 buffer.edit(
4105 [(
4106 Point::new(2, 0)..Point::new(2, 0),
4107 " // inside first function\n",
4108 )],
4109 None,
4110 cx,
4111 );
4112 buffer.edit(
4113 [(
4114 Point::new(6, 4)..Point::new(6, 4),
4115 "// inside second function ",
4116 )],
4117 None,
4118 cx,
4119 );
4120
4121 assert_eq!(
4122 buffer.text(),
4123 "
4124 // above first function
4125 fn a() {
4126 // inside first function
4127 f1();
4128 }
4129 fn b() {
4130 // inside second function f2();
4131 }
4132 fn c() {
4133 f3();
4134 }
4135 "
4136 .unindent()
4137 );
4138 });
4139
4140 let edits = lsp_store
4141 .update(cx, |lsp_store, cx| {
4142 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4143 &buffer,
4144 vec![
4145 // replace body of first function
4146 lsp::TextEdit {
4147 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4148 new_text: "
4149 fn a() {
4150 f10();
4151 }
4152 "
4153 .unindent(),
4154 },
4155 // edit inside second function
4156 lsp::TextEdit {
4157 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4158 new_text: "00".into(),
4159 },
4160 // edit inside third function via two distinct edits
4161 lsp::TextEdit {
4162 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4163 new_text: "4000".into(),
4164 },
4165 lsp::TextEdit {
4166 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4167 new_text: "".into(),
4168 },
4169 ],
4170 LanguageServerId(0),
4171 Some(lsp_document_version),
4172 cx,
4173 )
4174 })
4175 .await
4176 .unwrap();
4177
4178 buffer.update(cx, |buffer, cx| {
4179 for (range, new_text) in edits {
4180 buffer.edit([(range, new_text)], None, cx);
4181 }
4182 assert_eq!(
4183 buffer.text(),
4184 "
4185 // above first function
4186 fn a() {
4187 // inside first function
4188 f10();
4189 }
4190 fn b() {
4191 // inside second function f200();
4192 }
4193 fn c() {
4194 f4000();
4195 }
4196 "
4197 .unindent()
4198 );
4199 });
4200}
4201
4202#[gpui::test]
4203async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4204 init_test(cx);
4205
4206 let text = "
4207 use a::b;
4208 use a::c;
4209
4210 fn f() {
4211 b();
4212 c();
4213 }
4214 "
4215 .unindent();
4216
4217 let fs = FakeFs::new(cx.executor());
4218 fs.insert_tree(
4219 path!("/dir"),
4220 json!({
4221 "a.rs": text.clone(),
4222 }),
4223 )
4224 .await;
4225
4226 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4227 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4228 let buffer = project
4229 .update(cx, |project, cx| {
4230 project.open_local_buffer(path!("/dir/a.rs"), cx)
4231 })
4232 .await
4233 .unwrap();
4234
4235 // Simulate the language server sending us a small edit in the form of a very large diff.
4236 // Rust-analyzer does this when performing a merge-imports code action.
4237 let edits = lsp_store
4238 .update(cx, |lsp_store, cx| {
4239 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4240 &buffer,
4241 [
4242 // Replace the first use statement without editing the semicolon.
4243 lsp::TextEdit {
4244 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4245 new_text: "a::{b, c}".into(),
4246 },
4247 // Reinsert the remainder of the file between the semicolon and the final
4248 // newline of the file.
4249 lsp::TextEdit {
4250 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4251 new_text: "\n\n".into(),
4252 },
4253 lsp::TextEdit {
4254 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4255 new_text: "
4256 fn f() {
4257 b();
4258 c();
4259 }"
4260 .unindent(),
4261 },
4262 // Delete everything after the first newline of the file.
4263 lsp::TextEdit {
4264 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4265 new_text: "".into(),
4266 },
4267 ],
4268 LanguageServerId(0),
4269 None,
4270 cx,
4271 )
4272 })
4273 .await
4274 .unwrap();
4275
4276 buffer.update(cx, |buffer, cx| {
4277 let edits = edits
4278 .into_iter()
4279 .map(|(range, text)| {
4280 (
4281 range.start.to_point(buffer)..range.end.to_point(buffer),
4282 text,
4283 )
4284 })
4285 .collect::<Vec<_>>();
4286
4287 assert_eq!(
4288 edits,
4289 [
4290 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4291 (Point::new(1, 0)..Point::new(2, 0), "".into())
4292 ]
4293 );
4294
4295 for (range, new_text) in edits {
4296 buffer.edit([(range, new_text)], None, cx);
4297 }
4298 assert_eq!(
4299 buffer.text(),
4300 "
4301 use a::{b, c};
4302
4303 fn f() {
4304 b();
4305 c();
4306 }
4307 "
4308 .unindent()
4309 );
4310 });
4311}
4312
4313#[gpui::test]
4314async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4315 cx: &mut gpui::TestAppContext,
4316) {
4317 init_test(cx);
4318
4319 let text = "Path()";
4320
4321 let fs = FakeFs::new(cx.executor());
4322 fs.insert_tree(
4323 path!("/dir"),
4324 json!({
4325 "a.rs": text
4326 }),
4327 )
4328 .await;
4329
4330 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4331 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4332 let buffer = project
4333 .update(cx, |project, cx| {
4334 project.open_local_buffer(path!("/dir/a.rs"), cx)
4335 })
4336 .await
4337 .unwrap();
4338
4339 // Simulate the language server sending us a pair of edits at the same location,
4340 // with an insertion following a replacement (which violates the LSP spec).
4341 let edits = lsp_store
4342 .update(cx, |lsp_store, cx| {
4343 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4344 &buffer,
4345 [
4346 lsp::TextEdit {
4347 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4348 new_text: "Path".into(),
4349 },
4350 lsp::TextEdit {
4351 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4352 new_text: "from path import Path\n\n\n".into(),
4353 },
4354 ],
4355 LanguageServerId(0),
4356 None,
4357 cx,
4358 )
4359 })
4360 .await
4361 .unwrap();
4362
4363 buffer.update(cx, |buffer, cx| {
4364 buffer.edit(edits, None, cx);
4365 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4366 });
4367}
4368
4369#[gpui::test]
4370async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4371 init_test(cx);
4372
4373 let text = "
4374 use a::b;
4375 use a::c;
4376
4377 fn f() {
4378 b();
4379 c();
4380 }
4381 "
4382 .unindent();
4383
4384 let fs = FakeFs::new(cx.executor());
4385 fs.insert_tree(
4386 path!("/dir"),
4387 json!({
4388 "a.rs": text.clone(),
4389 }),
4390 )
4391 .await;
4392
4393 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4394 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4395 let buffer = project
4396 .update(cx, |project, cx| {
4397 project.open_local_buffer(path!("/dir/a.rs"), cx)
4398 })
4399 .await
4400 .unwrap();
4401
4402 // Simulate the language server sending us edits in a non-ordered fashion,
4403 // with ranges sometimes being inverted or pointing to invalid locations.
4404 let edits = lsp_store
4405 .update(cx, |lsp_store, cx| {
4406 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4407 &buffer,
4408 [
4409 lsp::TextEdit {
4410 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4411 new_text: "\n\n".into(),
4412 },
4413 lsp::TextEdit {
4414 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4415 new_text: "a::{b, c}".into(),
4416 },
4417 lsp::TextEdit {
4418 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4419 new_text: "".into(),
4420 },
4421 lsp::TextEdit {
4422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4423 new_text: "
4424 fn f() {
4425 b();
4426 c();
4427 }"
4428 .unindent(),
4429 },
4430 ],
4431 LanguageServerId(0),
4432 None,
4433 cx,
4434 )
4435 })
4436 .await
4437 .unwrap();
4438
4439 buffer.update(cx, |buffer, cx| {
4440 let edits = edits
4441 .into_iter()
4442 .map(|(range, text)| {
4443 (
4444 range.start.to_point(buffer)..range.end.to_point(buffer),
4445 text,
4446 )
4447 })
4448 .collect::<Vec<_>>();
4449
4450 assert_eq!(
4451 edits,
4452 [
4453 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4454 (Point::new(1, 0)..Point::new(2, 0), "".into())
4455 ]
4456 );
4457
4458 for (range, new_text) in edits {
4459 buffer.edit([(range, new_text)], None, cx);
4460 }
4461 assert_eq!(
4462 buffer.text(),
4463 "
4464 use a::{b, c};
4465
4466 fn f() {
4467 b();
4468 c();
4469 }
4470 "
4471 .unindent()
4472 );
4473 });
4474}
4475
4476fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4477 buffer: &Buffer,
4478 range: Range<T>,
4479) -> Vec<(String, Option<DiagnosticSeverity>)> {
4480 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4481 for chunk in buffer.snapshot().chunks(
4482 range,
4483 LanguageAwareStyling {
4484 tree_sitter: true,
4485 diagnostics: true,
4486 },
4487 ) {
4488 if chunks
4489 .last()
4490 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4491 {
4492 chunks.last_mut().unwrap().0.push_str(chunk.text);
4493 } else {
4494 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4495 }
4496 }
4497 chunks
4498}
4499
4500#[gpui::test(iterations = 10)]
4501async fn test_definition(cx: &mut gpui::TestAppContext) {
4502 init_test(cx);
4503
4504 let fs = FakeFs::new(cx.executor());
4505 fs.insert_tree(
4506 path!("/dir"),
4507 json!({
4508 "a.rs": "const fn a() { A }",
4509 "b.rs": "const y: i32 = crate::a()",
4510 }),
4511 )
4512 .await;
4513
4514 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4515
4516 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4517 language_registry.add(rust_lang());
4518 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4519
4520 let (buffer, _handle) = project
4521 .update(cx, |project, cx| {
4522 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4523 })
4524 .await
4525 .unwrap();
4526
4527 let fake_server = fake_servers.next().await.unwrap();
4528 cx.executor().run_until_parked();
4529
4530 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4531 let params = params.text_document_position_params;
4532 assert_eq!(
4533 params.text_document.uri.to_file_path().unwrap(),
4534 Path::new(path!("/dir/b.rs")),
4535 );
4536 assert_eq!(params.position, lsp::Position::new(0, 22));
4537
4538 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4539 lsp::Location::new(
4540 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4541 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4542 ),
4543 )))
4544 });
4545 let mut definitions = project
4546 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4547 .await
4548 .unwrap()
4549 .unwrap();
4550
4551 // Assert no new language server started
4552 cx.executor().run_until_parked();
4553 assert!(fake_servers.try_recv().is_err());
4554
4555 assert_eq!(definitions.len(), 1);
4556 let definition = definitions.pop().unwrap();
4557 cx.update(|cx| {
4558 let target_buffer = definition.target.buffer.read(cx);
4559 assert_eq!(
4560 target_buffer
4561 .file()
4562 .unwrap()
4563 .as_local()
4564 .unwrap()
4565 .abs_path(cx),
4566 Path::new(path!("/dir/a.rs")),
4567 );
4568 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4569 assert_eq!(
4570 list_worktrees(&project, cx),
4571 [
4572 (path!("/dir/a.rs").as_ref(), false),
4573 (path!("/dir/b.rs").as_ref(), true)
4574 ],
4575 );
4576
4577 drop(definition);
4578 });
4579 cx.update(|cx| {
4580 assert_eq!(
4581 list_worktrees(&project, cx),
4582 [(path!("/dir/b.rs").as_ref(), true)]
4583 );
4584 });
4585
4586 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4587 project
4588 .read(cx)
4589 .worktrees(cx)
4590 .map(|worktree| {
4591 let worktree = worktree.read(cx);
4592 (
4593 worktree.as_local().unwrap().abs_path().as_ref(),
4594 worktree.is_visible(),
4595 )
4596 })
4597 .collect::<Vec<_>>()
4598 }
4599}
4600
4601#[gpui::test]
4602async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4603 init_test(cx);
4604
4605 let fs = FakeFs::new(cx.executor());
4606 fs.insert_tree(
4607 path!("/dir"),
4608 json!({
4609 "a.ts": "",
4610 }),
4611 )
4612 .await;
4613
4614 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4615
4616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4617 language_registry.add(typescript_lang());
4618 let mut fake_language_servers = language_registry.register_fake_lsp(
4619 "TypeScript",
4620 FakeLspAdapter {
4621 capabilities: lsp::ServerCapabilities {
4622 completion_provider: Some(lsp::CompletionOptions {
4623 trigger_characters: Some(vec![".".to_string()]),
4624 ..Default::default()
4625 }),
4626 ..Default::default()
4627 },
4628 ..Default::default()
4629 },
4630 );
4631
4632 let (buffer, _handle) = project
4633 .update(cx, |p, cx| {
4634 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4635 })
4636 .await
4637 .unwrap();
4638
4639 let fake_server = fake_language_servers.next().await.unwrap();
4640 cx.executor().run_until_parked();
4641
4642 // When text_edit exists, it takes precedence over insert_text and label
4643 let text = "let a = obj.fqn";
4644 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4645 let completions = project.update(cx, |project, cx| {
4646 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4647 });
4648
4649 fake_server
4650 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4651 Ok(Some(lsp::CompletionResponse::Array(vec![
4652 lsp::CompletionItem {
4653 label: "labelText".into(),
4654 insert_text: Some("insertText".into()),
4655 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4656 range: lsp::Range::new(
4657 lsp::Position::new(0, text.len() as u32 - 3),
4658 lsp::Position::new(0, text.len() as u32),
4659 ),
4660 new_text: "textEditText".into(),
4661 })),
4662 ..Default::default()
4663 },
4664 ])))
4665 })
4666 .next()
4667 .await;
4668
4669 let completions = completions
4670 .await
4671 .unwrap()
4672 .into_iter()
4673 .flat_map(|response| response.completions)
4674 .collect::<Vec<_>>();
4675 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4676
4677 assert_eq!(completions.len(), 1);
4678 assert_eq!(completions[0].new_text, "textEditText");
4679 assert_eq!(
4680 completions[0].replace_range.to_offset(&snapshot),
4681 text.len() - 3..text.len()
4682 );
4683}
4684
4685#[gpui::test]
4686async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4687 init_test(cx);
4688
4689 let fs = FakeFs::new(cx.executor());
4690 fs.insert_tree(
4691 path!("/dir"),
4692 json!({
4693 "a.ts": "",
4694 }),
4695 )
4696 .await;
4697
4698 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4699
4700 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4701 language_registry.add(typescript_lang());
4702 let mut fake_language_servers = language_registry.register_fake_lsp(
4703 "TypeScript",
4704 FakeLspAdapter {
4705 capabilities: lsp::ServerCapabilities {
4706 completion_provider: Some(lsp::CompletionOptions {
4707 trigger_characters: Some(vec![".".to_string()]),
4708 ..Default::default()
4709 }),
4710 ..Default::default()
4711 },
4712 ..Default::default()
4713 },
4714 );
4715
4716 let (buffer, _handle) = project
4717 .update(cx, |p, cx| {
4718 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4719 })
4720 .await
4721 .unwrap();
4722
4723 let fake_server = fake_language_servers.next().await.unwrap();
4724 cx.executor().run_until_parked();
4725 let text = "let a = obj.fqn";
4726
4727 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4728 {
4729 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4730 let completions = project.update(cx, |project, cx| {
4731 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4732 });
4733
4734 fake_server
4735 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4736 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4737 is_incomplete: false,
4738 item_defaults: Some(lsp::CompletionListItemDefaults {
4739 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4740 lsp::Range::new(
4741 lsp::Position::new(0, text.len() as u32 - 3),
4742 lsp::Position::new(0, text.len() as u32),
4743 ),
4744 )),
4745 ..Default::default()
4746 }),
4747 items: vec![lsp::CompletionItem {
4748 label: "labelText".into(),
4749 text_edit_text: Some("textEditText".into()),
4750 text_edit: None,
4751 ..Default::default()
4752 }],
4753 })))
4754 })
4755 .next()
4756 .await;
4757
4758 let completions = completions
4759 .await
4760 .unwrap()
4761 .into_iter()
4762 .flat_map(|response| response.completions)
4763 .collect::<Vec<_>>();
4764 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4765
4766 assert_eq!(completions.len(), 1);
4767 assert_eq!(completions[0].new_text, "textEditText");
4768 assert_eq!(
4769 completions[0].replace_range.to_offset(&snapshot),
4770 text.len() - 3..text.len()
4771 );
4772 }
4773
4774 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4775 {
4776 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4777 let completions = project.update(cx, |project, cx| {
4778 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4779 });
4780
4781 fake_server
4782 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4783 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4784 is_incomplete: false,
4785 item_defaults: Some(lsp::CompletionListItemDefaults {
4786 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4787 lsp::Range::new(
4788 lsp::Position::new(0, text.len() as u32 - 3),
4789 lsp::Position::new(0, text.len() as u32),
4790 ),
4791 )),
4792 ..Default::default()
4793 }),
4794 items: vec![lsp::CompletionItem {
4795 label: "labelText".into(),
4796 text_edit_text: None,
4797 insert_text: Some("irrelevant".into()),
4798 text_edit: None,
4799 ..Default::default()
4800 }],
4801 })))
4802 })
4803 .next()
4804 .await;
4805
4806 let completions = completions
4807 .await
4808 .unwrap()
4809 .into_iter()
4810 .flat_map(|response| response.completions)
4811 .collect::<Vec<_>>();
4812 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4813
4814 assert_eq!(completions.len(), 1);
4815 assert_eq!(completions[0].new_text, "labelText");
4816 assert_eq!(
4817 completions[0].replace_range.to_offset(&snapshot),
4818 text.len() - 3..text.len()
4819 );
4820 }
4821}
4822
4823#[gpui::test]
4824async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4825 init_test(cx);
4826
4827 let fs = FakeFs::new(cx.executor());
4828 fs.insert_tree(
4829 path!("/dir"),
4830 json!({
4831 "a.ts": "",
4832 }),
4833 )
4834 .await;
4835
4836 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4837
4838 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4839 language_registry.add(typescript_lang());
4840 let mut fake_language_servers = language_registry.register_fake_lsp(
4841 "TypeScript",
4842 FakeLspAdapter {
4843 capabilities: lsp::ServerCapabilities {
4844 completion_provider: Some(lsp::CompletionOptions {
4845 trigger_characters: Some(vec![":".to_string()]),
4846 ..Default::default()
4847 }),
4848 ..Default::default()
4849 },
4850 ..Default::default()
4851 },
4852 );
4853
4854 let (buffer, _handle) = project
4855 .update(cx, |p, cx| {
4856 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4857 })
4858 .await
4859 .unwrap();
4860
4861 let fake_server = fake_language_servers.next().await.unwrap();
4862 cx.executor().run_until_parked();
4863
4864 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4865 let text = "let a = b.fqn";
4866 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4867 let completions = project.update(cx, |project, cx| {
4868 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4869 });
4870
4871 fake_server
4872 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4873 Ok(Some(lsp::CompletionResponse::Array(vec![
4874 lsp::CompletionItem {
4875 label: "fullyQualifiedName?".into(),
4876 insert_text: Some("fullyQualifiedName".into()),
4877 ..Default::default()
4878 },
4879 ])))
4880 })
4881 .next()
4882 .await;
4883 let completions = completions
4884 .await
4885 .unwrap()
4886 .into_iter()
4887 .flat_map(|response| response.completions)
4888 .collect::<Vec<_>>();
4889 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4890 assert_eq!(completions.len(), 1);
4891 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4892 assert_eq!(
4893 completions[0].replace_range.to_offset(&snapshot),
4894 text.len() - 3..text.len()
4895 );
4896
4897 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4898 let text = "let a = \"atoms/cmp\"";
4899 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4900 let completions = project.update(cx, |project, cx| {
4901 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4902 });
4903
4904 fake_server
4905 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4906 Ok(Some(lsp::CompletionResponse::Array(vec![
4907 lsp::CompletionItem {
4908 label: "component".into(),
4909 ..Default::default()
4910 },
4911 ])))
4912 })
4913 .next()
4914 .await;
4915 let completions = completions
4916 .await
4917 .unwrap()
4918 .into_iter()
4919 .flat_map(|response| response.completions)
4920 .collect::<Vec<_>>();
4921 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4922 assert_eq!(completions.len(), 1);
4923 assert_eq!(completions[0].new_text, "component");
4924 assert_eq!(
4925 completions[0].replace_range.to_offset(&snapshot),
4926 text.len() - 4..text.len() - 1
4927 );
4928}
4929
4930#[gpui::test]
4931async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4932 init_test(cx);
4933
4934 let fs = FakeFs::new(cx.executor());
4935 fs.insert_tree(
4936 path!("/dir"),
4937 json!({
4938 "a.ts": "",
4939 }),
4940 )
4941 .await;
4942
4943 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4944
4945 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4946 language_registry.add(typescript_lang());
4947 let mut fake_language_servers = language_registry.register_fake_lsp(
4948 "TypeScript",
4949 FakeLspAdapter {
4950 capabilities: lsp::ServerCapabilities {
4951 completion_provider: Some(lsp::CompletionOptions {
4952 trigger_characters: Some(vec![":".to_string()]),
4953 ..Default::default()
4954 }),
4955 ..Default::default()
4956 },
4957 ..Default::default()
4958 },
4959 );
4960
4961 let (buffer, _handle) = project
4962 .update(cx, |p, cx| {
4963 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4964 })
4965 .await
4966 .unwrap();
4967
4968 let fake_server = fake_language_servers.next().await.unwrap();
4969 cx.executor().run_until_parked();
4970
4971 let text = "let a = b.fqn";
4972 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4973 let completions = project.update(cx, |project, cx| {
4974 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4975 });
4976
4977 fake_server
4978 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4979 Ok(Some(lsp::CompletionResponse::Array(vec![
4980 lsp::CompletionItem {
4981 label: "fullyQualifiedName?".into(),
4982 insert_text: Some("fully\rQualified\r\nName".into()),
4983 ..Default::default()
4984 },
4985 ])))
4986 })
4987 .next()
4988 .await;
4989 let completions = completions
4990 .await
4991 .unwrap()
4992 .into_iter()
4993 .flat_map(|response| response.completions)
4994 .collect::<Vec<_>>();
4995 assert_eq!(completions.len(), 1);
4996 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4997}
4998
4999#[gpui::test]
5000async fn test_supports_range_formatting_ignores_unrelated_language_servers(
5001 cx: &mut gpui::TestAppContext,
5002) {
5003 init_test(cx);
5004 cx.update(|cx| {
5005 SettingsStore::update_global(cx, |store, cx| {
5006 store.update_user_settings(cx, |settings| {
5007 settings.project.all_languages.defaults.formatter = Some(FormatterList::Single(
5008 Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current),
5009 ));
5010 });
5011 });
5012 });
5013
5014 let fs = FakeFs::new(cx.executor());
5015 fs.insert_tree(
5016 path!("/dir"),
5017 json!({
5018 "a.ts": "",
5019 "b.rs": "",
5020 }),
5021 )
5022 .await;
5023
5024 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5025 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5026 language_registry.add(typescript_lang());
5027 language_registry.add(rust_lang());
5028
5029 let mut typescript_language_servers = language_registry.register_fake_lsp(
5030 "TypeScript",
5031 FakeLspAdapter {
5032 name: "typescript-fake-language-server",
5033 capabilities: lsp::ServerCapabilities {
5034 document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
5035 ..lsp::ServerCapabilities::default()
5036 },
5037 ..FakeLspAdapter::default()
5038 },
5039 );
5040 let mut rust_language_servers = language_registry.register_fake_lsp(
5041 "Rust",
5042 FakeLspAdapter {
5043 name: "rust-fake-language-server",
5044 capabilities: lsp::ServerCapabilities {
5045 document_formatting_provider: Some(lsp::OneOf::Left(true)),
5046 document_range_formatting_provider: Some(lsp::OneOf::Left(false)),
5047 ..lsp::ServerCapabilities::default()
5048 },
5049 ..FakeLspAdapter::default()
5050 },
5051 );
5052
5053 let (typescript_buffer, _typescript_handle) = project
5054 .update(cx, |project, cx| {
5055 project.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5056 })
5057 .await
5058 .unwrap();
5059 let (rust_buffer, _rust_handle) = project
5060 .update(cx, |project, cx| {
5061 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
5062 })
5063 .await
5064 .unwrap();
5065
5066 let _typescript_language_server = typescript_language_servers.next().await.unwrap();
5067 let _rust_language_server = rust_language_servers.next().await.unwrap();
5068 cx.executor().run_until_parked();
5069
5070 assert!(project.read_with(cx, |project, cx| {
5071 project.supports_range_formatting(&typescript_buffer, cx)
5072 }));
5073 assert!(!project.read_with(cx, |project, cx| {
5074 project.supports_range_formatting(&rust_buffer, cx)
5075 }));
5076}
5077
5078#[gpui::test(iterations = 10)]
5079async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
5080 init_test(cx);
5081
5082 let fs = FakeFs::new(cx.executor());
5083 fs.insert_tree(
5084 path!("/dir"),
5085 json!({
5086 "a.ts": "a",
5087 }),
5088 )
5089 .await;
5090
5091 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5092
5093 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5094 language_registry.add(typescript_lang());
5095 let mut fake_language_servers = language_registry.register_fake_lsp(
5096 "TypeScript",
5097 FakeLspAdapter {
5098 capabilities: lsp::ServerCapabilities {
5099 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
5100 lsp::CodeActionOptions {
5101 resolve_provider: Some(true),
5102 ..lsp::CodeActionOptions::default()
5103 },
5104 )),
5105 execute_command_provider: Some(lsp::ExecuteCommandOptions {
5106 commands: vec!["_the/command".to_string()],
5107 ..lsp::ExecuteCommandOptions::default()
5108 }),
5109 ..lsp::ServerCapabilities::default()
5110 },
5111 ..FakeLspAdapter::default()
5112 },
5113 );
5114
5115 let (buffer, _handle) = project
5116 .update(cx, |p, cx| {
5117 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5118 })
5119 .await
5120 .unwrap();
5121
5122 let fake_server = fake_language_servers.next().await.unwrap();
5123 cx.executor().run_until_parked();
5124
5125 // Language server returns code actions that contain commands, and not edits.
5126 let actions = project.update(cx, |project, cx| {
5127 project.code_actions(&buffer, 0..0, None, cx)
5128 });
5129 fake_server
5130 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5131 Ok(Some(vec![
5132 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5133 title: "The code action".into(),
5134 data: Some(serde_json::json!({
5135 "command": "_the/command",
5136 })),
5137 ..lsp::CodeAction::default()
5138 }),
5139 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5140 title: "two".into(),
5141 ..lsp::CodeAction::default()
5142 }),
5143 ]))
5144 })
5145 .next()
5146 .await;
5147
5148 let action = actions.await.unwrap().unwrap()[0].clone();
5149 let apply = project.update(cx, |project, cx| {
5150 project.apply_code_action(buffer.clone(), action, true, cx)
5151 });
5152
5153 // Resolving the code action does not populate its edits. In absence of
5154 // edits, we must execute the given command.
5155 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
5156 |mut action, _| async move {
5157 if action.data.is_some() {
5158 action.command = Some(lsp::Command {
5159 title: "The command".into(),
5160 command: "_the/command".into(),
5161 arguments: Some(vec![json!("the-argument")]),
5162 });
5163 }
5164 Ok(action)
5165 },
5166 );
5167
5168 // While executing the command, the language server sends the editor
5169 // a `workspaceEdit` request.
5170 fake_server
5171 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
5172 let fake = fake_server.clone();
5173 move |params, _| {
5174 assert_eq!(params.command, "_the/command");
5175 let fake = fake.clone();
5176 async move {
5177 fake.server
5178 .request::<lsp::request::ApplyWorkspaceEdit>(
5179 lsp::ApplyWorkspaceEditParams {
5180 label: None,
5181 edit: lsp::WorkspaceEdit {
5182 changes: Some(
5183 [(
5184 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5185 vec![lsp::TextEdit {
5186 range: lsp::Range::new(
5187 lsp::Position::new(0, 0),
5188 lsp::Position::new(0, 0),
5189 ),
5190 new_text: "X".into(),
5191 }],
5192 )]
5193 .into_iter()
5194 .collect(),
5195 ),
5196 ..Default::default()
5197 },
5198 },
5199 DEFAULT_LSP_REQUEST_TIMEOUT,
5200 )
5201 .await
5202 .into_response()
5203 .unwrap();
5204 Ok(Some(json!(null)))
5205 }
5206 }
5207 })
5208 .next()
5209 .await;
5210
5211 // Applying the code action returns a project transaction containing the edits
5212 // sent by the language server in its `workspaceEdit` request.
5213 let transaction = apply.await.unwrap();
5214 assert!(transaction.0.contains_key(&buffer));
5215 buffer.update(cx, |buffer, cx| {
5216 assert_eq!(buffer.text(), "Xa");
5217 buffer.undo(cx);
5218 assert_eq!(buffer.text(), "a");
5219 });
5220}
5221
5222#[gpui::test]
5223async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5224 init_test(cx);
5225 let fs = FakeFs::new(cx.background_executor.clone());
5226 let expected_contents = "content";
5227 fs.as_fake()
5228 .insert_tree(
5229 "/root",
5230 json!({
5231 "test.txt": expected_contents
5232 }),
5233 )
5234 .await;
5235
5236 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5237
5238 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5239 let worktree = project.worktrees(cx).next().unwrap();
5240 let entry_id = worktree
5241 .read(cx)
5242 .entry_for_path(rel_path("test.txt"))
5243 .unwrap()
5244 .id;
5245 (worktree, entry_id)
5246 });
5247 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5248 let _result = project
5249 .update(cx, |project, cx| {
5250 project.rename_entry(
5251 entry_id,
5252 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5253 cx,
5254 )
5255 })
5256 .await
5257 .unwrap();
5258 worktree.read_with(cx, |worktree, _| {
5259 assert!(
5260 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5261 "Old file should have been removed"
5262 );
5263 assert!(
5264 worktree
5265 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5266 .is_some(),
5267 "Whole directory hierarchy and the new file should have been created"
5268 );
5269 });
5270 assert_eq!(
5271 worktree
5272 .update(cx, |worktree, cx| {
5273 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5274 })
5275 .await
5276 .unwrap()
5277 .text,
5278 expected_contents,
5279 "Moved file's contents should be preserved"
5280 );
5281
5282 let entry_id = worktree.read_with(cx, |worktree, _| {
5283 worktree
5284 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5285 .unwrap()
5286 .id
5287 });
5288
5289 let _result = project
5290 .update(cx, |project, cx| {
5291 project.rename_entry(
5292 entry_id,
5293 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5294 cx,
5295 )
5296 })
5297 .await
5298 .unwrap();
5299 worktree.read_with(cx, |worktree, _| {
5300 assert!(
5301 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5302 "First file should not reappear"
5303 );
5304 assert!(
5305 worktree
5306 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5307 .is_none(),
5308 "Old file should have been removed"
5309 );
5310 assert!(
5311 worktree
5312 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5313 .is_some(),
5314 "No error should have occurred after moving into existing directory"
5315 );
5316 });
5317 assert_eq!(
5318 worktree
5319 .update(cx, |worktree, cx| {
5320 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5321 })
5322 .await
5323 .unwrap()
5324 .text,
5325 expected_contents,
5326 "Moved file's contents should be preserved"
5327 );
5328}
5329
5330#[gpui::test(iterations = 10)]
5331async fn test_save_file(cx: &mut gpui::TestAppContext) {
5332 init_test(cx);
5333
5334 let fs = FakeFs::new(cx.executor());
5335 fs.insert_tree(
5336 path!("/dir"),
5337 json!({
5338 "file1": "the old contents",
5339 }),
5340 )
5341 .await;
5342
5343 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5344 let buffer = project
5345 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5346 .await
5347 .unwrap();
5348 buffer.update(cx, |buffer, cx| {
5349 assert_eq!(buffer.text(), "the old contents");
5350 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5351 });
5352
5353 project
5354 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5355 .await
5356 .unwrap();
5357
5358 let new_text = fs
5359 .load(Path::new(path!("/dir/file1")))
5360 .await
5361 .unwrap()
5362 .replace("\r\n", "\n");
5363 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5364}
5365
5366#[gpui::test(iterations = 10)]
5367async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5368 // Issue: #24349
5369 init_test(cx);
5370
5371 let fs = FakeFs::new(cx.executor());
5372 fs.insert_tree(path!("/dir"), json!({})).await;
5373
5374 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5375 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5376
5377 language_registry.add(rust_lang());
5378 let mut fake_rust_servers = language_registry.register_fake_lsp(
5379 "Rust",
5380 FakeLspAdapter {
5381 name: "the-rust-language-server",
5382 capabilities: lsp::ServerCapabilities {
5383 completion_provider: Some(lsp::CompletionOptions {
5384 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5385 ..Default::default()
5386 }),
5387 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5388 lsp::TextDocumentSyncOptions {
5389 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5390 ..Default::default()
5391 },
5392 )),
5393 ..Default::default()
5394 },
5395 ..Default::default()
5396 },
5397 );
5398
5399 let buffer = project
5400 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5401 .unwrap()
5402 .await;
5403 project.update(cx, |this, cx| {
5404 this.register_buffer_with_language_servers(&buffer, cx);
5405 buffer.update(cx, |buffer, cx| {
5406 assert!(!this.has_language_servers_for(buffer, cx));
5407 })
5408 });
5409
5410 project
5411 .update(cx, |this, cx| {
5412 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5413 this.save_buffer_as(
5414 buffer.clone(),
5415 ProjectPath {
5416 worktree_id,
5417 path: rel_path("file.rs").into(),
5418 },
5419 cx,
5420 )
5421 })
5422 .await
5423 .unwrap();
5424 // A server is started up, and it is notified about Rust files.
5425 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5426 assert_eq!(
5427 fake_rust_server
5428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5429 .await
5430 .text_document,
5431 lsp::TextDocumentItem {
5432 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5433 version: 0,
5434 text: "".to_string(),
5435 language_id: "rust".to_string(),
5436 }
5437 );
5438
5439 project.update(cx, |this, cx| {
5440 buffer.update(cx, |buffer, cx| {
5441 assert!(this.has_language_servers_for(buffer, cx));
5442 })
5443 });
5444}
5445
5446#[gpui::test(iterations = 30)]
5447async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5448 init_test(cx);
5449
5450 let fs = FakeFs::new(cx.executor());
5451 fs.insert_tree(
5452 path!("/dir"),
5453 json!({
5454 "file1": "the original contents",
5455 }),
5456 )
5457 .await;
5458
5459 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5460 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5461 let buffer = project
5462 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5463 .await
5464 .unwrap();
5465
5466 // Change the buffer's file on disk, and then wait for the file change
5467 // to be detected by the worktree, so that the buffer starts reloading.
5468 fs.save(
5469 path!("/dir/file1").as_ref(),
5470 &"the first contents".into(),
5471 Default::default(),
5472 )
5473 .await
5474 .unwrap();
5475 worktree.next_event(cx).await;
5476
5477 // Change the buffer's file again. Depending on the random seed, the
5478 // previous file change may still be in progress.
5479 fs.save(
5480 path!("/dir/file1").as_ref(),
5481 &"the second contents".into(),
5482 Default::default(),
5483 )
5484 .await
5485 .unwrap();
5486 worktree.next_event(cx).await;
5487
5488 cx.executor().run_until_parked();
5489 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5490 buffer.read_with(cx, |buffer, _| {
5491 assert_eq!(buffer.text(), on_disk_text);
5492 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5493 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5494 });
5495}
5496
5497#[gpui::test(iterations = 30)]
5498async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5499 init_test(cx);
5500
5501 let fs = FakeFs::new(cx.executor());
5502 fs.insert_tree(
5503 path!("/dir"),
5504 json!({
5505 "file1": "the original contents",
5506 }),
5507 )
5508 .await;
5509
5510 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5511 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5512 let buffer = project
5513 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5514 .await
5515 .unwrap();
5516
5517 // Change the buffer's file on disk, and then wait for the file change
5518 // to be detected by the worktree, so that the buffer starts reloading.
5519 fs.save(
5520 path!("/dir/file1").as_ref(),
5521 &"the first contents".into(),
5522 Default::default(),
5523 )
5524 .await
5525 .unwrap();
5526 worktree.next_event(cx).await;
5527
5528 cx.executor()
5529 .spawn(cx.executor().simulate_random_delay())
5530 .await;
5531
5532 // Perform a noop edit, causing the buffer's version to increase.
5533 buffer.update(cx, |buffer, cx| {
5534 buffer.edit([(0..0, " ")], None, cx);
5535 buffer.undo(cx);
5536 });
5537
5538 cx.executor().run_until_parked();
5539 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5540 buffer.read_with(cx, |buffer, _| {
5541 let buffer_text = buffer.text();
5542 if buffer_text == on_disk_text {
5543 assert!(
5544 !buffer.is_dirty() && !buffer.has_conflict(),
5545 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5546 );
5547 }
5548 // If the file change occurred while the buffer was processing the first
5549 // change, the buffer will be in a conflicting state.
5550 else {
5551 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5552 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5553 }
5554 });
5555}
5556
5557#[gpui::test]
5558async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5559 init_test(cx);
5560
5561 let fs = FakeFs::new(cx.executor());
5562 fs.insert_tree(
5563 path!("/dir"),
5564 json!({
5565 "file1": "the old contents",
5566 }),
5567 )
5568 .await;
5569
5570 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5571 let buffer = project
5572 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5573 .await
5574 .unwrap();
5575 buffer.update(cx, |buffer, cx| {
5576 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5577 });
5578
5579 project
5580 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5581 .await
5582 .unwrap();
5583
5584 let new_text = fs
5585 .load(Path::new(path!("/dir/file1")))
5586 .await
5587 .unwrap()
5588 .replace("\r\n", "\n");
5589 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5590}
5591
5592#[gpui::test]
5593async fn test_save_as(cx: &mut gpui::TestAppContext) {
5594 init_test(cx);
5595
5596 let fs = FakeFs::new(cx.executor());
5597 fs.insert_tree("/dir", json!({})).await;
5598
5599 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5600
5601 let languages = project.update(cx, |project, _| project.languages().clone());
5602 languages.add(rust_lang());
5603
5604 let buffer = project.update(cx, |project, cx| {
5605 project.create_local_buffer("", None, false, cx)
5606 });
5607 buffer.update(cx, |buffer, cx| {
5608 buffer.edit([(0..0, "abc")], None, cx);
5609 assert!(buffer.is_dirty());
5610 assert!(!buffer.has_conflict());
5611 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5612 });
5613 project
5614 .update(cx, |project, cx| {
5615 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5616 let path = ProjectPath {
5617 worktree_id,
5618 path: rel_path("file1.rs").into(),
5619 };
5620 project.save_buffer_as(buffer.clone(), path, cx)
5621 })
5622 .await
5623 .unwrap();
5624 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5625
5626 cx.executor().run_until_parked();
5627 buffer.update(cx, |buffer, cx| {
5628 assert_eq!(
5629 buffer.file().unwrap().full_path(cx),
5630 Path::new("dir/file1.rs")
5631 );
5632 assert!(!buffer.is_dirty());
5633 assert!(!buffer.has_conflict());
5634 assert_eq!(buffer.language().unwrap().name(), "Rust");
5635 });
5636
5637 let opened_buffer = project
5638 .update(cx, |project, cx| {
5639 project.open_local_buffer("/dir/file1.rs", cx)
5640 })
5641 .await
5642 .unwrap();
5643 assert_eq!(opened_buffer, buffer);
5644}
5645
5646#[gpui::test]
5647async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5648 init_test(cx);
5649
5650 let fs = FakeFs::new(cx.executor());
5651 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5652
5653 fs.insert_tree(
5654 path!("/dir"),
5655 json!({
5656 "data_a.txt": "data about a"
5657 }),
5658 )
5659 .await;
5660
5661 let buffer = project
5662 .update(cx, |project, cx| {
5663 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5664 })
5665 .await
5666 .unwrap();
5667
5668 buffer.update(cx, |buffer, cx| {
5669 buffer.edit([(11..12, "b")], None, cx);
5670 });
5671
5672 // Save buffer's contents as a new file and confirm that the buffer's now
5673 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5674 // file associated with the buffer has now been updated to `data_b.txt`
5675 project
5676 .update(cx, |project, cx| {
5677 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5678 let new_path = ProjectPath {
5679 worktree_id,
5680 path: rel_path("data_b.txt").into(),
5681 };
5682
5683 project.save_buffer_as(buffer.clone(), new_path, cx)
5684 })
5685 .await
5686 .unwrap();
5687
5688 buffer.update(cx, |buffer, cx| {
5689 assert_eq!(
5690 buffer.file().unwrap().full_path(cx),
5691 Path::new("dir/data_b.txt")
5692 )
5693 });
5694
5695 // Open the original `data_a.txt` file, confirming that its contents are
5696 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5697 let original_buffer = project
5698 .update(cx, |project, cx| {
5699 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5700 })
5701 .await
5702 .unwrap();
5703
5704 original_buffer.update(cx, |buffer, cx| {
5705 assert_eq!(buffer.text(), "data about a");
5706 assert_eq!(
5707 buffer.file().unwrap().full_path(cx),
5708 Path::new("dir/data_a.txt")
5709 )
5710 });
5711}
5712
5713#[gpui::test(retries = 5)]
5714async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5715 use worktree::WorktreeModelHandle as _;
5716
5717 init_test(cx);
5718 cx.executor().allow_parking();
5719
5720 let dir = TempTree::new(json!({
5721 "a": {
5722 "file1": "",
5723 "file2": "",
5724 "file3": "",
5725 },
5726 "b": {
5727 "c": {
5728 "file4": "",
5729 "file5": "",
5730 }
5731 }
5732 }));
5733
5734 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5735
5736 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5737 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5738 async move { buffer.await.unwrap() }
5739 };
5740 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5741 project.update(cx, |project, cx| {
5742 let tree = project.worktrees(cx).next().unwrap();
5743 tree.read(cx)
5744 .entry_for_path(rel_path(path))
5745 .unwrap_or_else(|| panic!("no entry for path {}", path))
5746 .id
5747 })
5748 };
5749
5750 let buffer2 = buffer_for_path("a/file2", cx).await;
5751 let buffer3 = buffer_for_path("a/file3", cx).await;
5752 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5753 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5754
5755 let file2_id = id_for_path("a/file2", cx);
5756 let file3_id = id_for_path("a/file3", cx);
5757 let file4_id = id_for_path("b/c/file4", cx);
5758
5759 // Create a remote copy of this worktree.
5760 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5761 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5762
5763 let updates = Arc::new(Mutex::new(Vec::new()));
5764 tree.update(cx, |tree, cx| {
5765 let updates = updates.clone();
5766 tree.observe_updates(0, cx, move |update| {
5767 updates.lock().push(update);
5768 async { true }
5769 });
5770 });
5771
5772 let remote = cx.update(|cx| {
5773 Worktree::remote(
5774 0,
5775 ReplicaId::REMOTE_SERVER,
5776 metadata,
5777 project.read(cx).client().into(),
5778 project.read(cx).path_style(cx),
5779 cx,
5780 )
5781 });
5782
5783 cx.executor().run_until_parked();
5784
5785 cx.update(|cx| {
5786 assert!(!buffer2.read(cx).is_dirty());
5787 assert!(!buffer3.read(cx).is_dirty());
5788 assert!(!buffer4.read(cx).is_dirty());
5789 assert!(!buffer5.read(cx).is_dirty());
5790 });
5791
5792 // Rename and delete files and directories.
5793 tree.flush_fs_events(cx).await;
5794 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5795 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5796 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5797 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5798 tree.flush_fs_events(cx).await;
5799
5800 cx.update(|app| {
5801 assert_eq!(
5802 tree.read(app).paths().collect::<Vec<_>>(),
5803 vec![
5804 rel_path("a"),
5805 rel_path("a/file1"),
5806 rel_path("a/file2.new"),
5807 rel_path("b"),
5808 rel_path("d"),
5809 rel_path("d/file3"),
5810 rel_path("d/file4"),
5811 ]
5812 );
5813 });
5814
5815 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5816 assert_eq!(id_for_path("d/file3", cx), file3_id);
5817 assert_eq!(id_for_path("d/file4", cx), file4_id);
5818
5819 cx.update(|cx| {
5820 assert_eq!(
5821 buffer2.read(cx).file().unwrap().path().as_ref(),
5822 rel_path("a/file2.new")
5823 );
5824 assert_eq!(
5825 buffer3.read(cx).file().unwrap().path().as_ref(),
5826 rel_path("d/file3")
5827 );
5828 assert_eq!(
5829 buffer4.read(cx).file().unwrap().path().as_ref(),
5830 rel_path("d/file4")
5831 );
5832 assert_eq!(
5833 buffer5.read(cx).file().unwrap().path().as_ref(),
5834 rel_path("b/c/file5")
5835 );
5836
5837 assert_matches!(
5838 buffer2.read(cx).file().unwrap().disk_state(),
5839 DiskState::Present { .. }
5840 );
5841 assert_matches!(
5842 buffer3.read(cx).file().unwrap().disk_state(),
5843 DiskState::Present { .. }
5844 );
5845 assert_matches!(
5846 buffer4.read(cx).file().unwrap().disk_state(),
5847 DiskState::Present { .. }
5848 );
5849 assert_eq!(
5850 buffer5.read(cx).file().unwrap().disk_state(),
5851 DiskState::Deleted
5852 );
5853 });
5854
5855 // Update the remote worktree. Check that it becomes consistent with the
5856 // local worktree.
5857 cx.executor().run_until_parked();
5858
5859 remote.update(cx, |remote, _| {
5860 for update in updates.lock().drain(..) {
5861 remote.as_remote_mut().unwrap().update_from_remote(update);
5862 }
5863 });
5864 cx.executor().run_until_parked();
5865 remote.update(cx, |remote, _| {
5866 assert_eq!(
5867 remote.paths().collect::<Vec<_>>(),
5868 vec![
5869 rel_path("a"),
5870 rel_path("a/file1"),
5871 rel_path("a/file2.new"),
5872 rel_path("b"),
5873 rel_path("d"),
5874 rel_path("d/file3"),
5875 rel_path("d/file4"),
5876 ]
5877 );
5878 });
5879}
5880
5881#[cfg(target_os = "linux")]
5882#[gpui::test(retries = 5)]
5883async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5884 init_test(cx);
5885 cx.executor().allow_parking();
5886
5887 let dir = TempTree::new(json!({}));
5888 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5889 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5890
5891 tree.flush_fs_events(cx).await;
5892
5893 let repro_dir = dir.path().join("repro");
5894 std::fs::create_dir(&repro_dir).unwrap();
5895 tree.flush_fs_events(cx).await;
5896
5897 cx.update(|cx| {
5898 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5899 });
5900
5901 std::fs::remove_dir_all(&repro_dir).unwrap();
5902 tree.flush_fs_events(cx).await;
5903
5904 cx.update(|cx| {
5905 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5906 });
5907
5908 std::fs::create_dir(&repro_dir).unwrap();
5909 tree.flush_fs_events(cx).await;
5910
5911 cx.update(|cx| {
5912 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5913 });
5914
5915 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5916 tree.flush_fs_events(cx).await;
5917
5918 cx.update(|cx| {
5919 assert!(
5920 tree.read(cx)
5921 .entry_for_path(rel_path("repro/repro-marker"))
5922 .is_some()
5923 );
5924 });
5925}
5926
5927#[gpui::test(iterations = 10)]
5928async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5929 init_test(cx);
5930
5931 let fs = FakeFs::new(cx.executor());
5932 fs.insert_tree(
5933 path!("/dir"),
5934 json!({
5935 "a": {
5936 "file1": "",
5937 }
5938 }),
5939 )
5940 .await;
5941
5942 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5943 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5944 let tree_id = tree.update(cx, |tree, _| tree.id());
5945
5946 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5947 project.update(cx, |project, cx| {
5948 let tree = project.worktrees(cx).next().unwrap();
5949 tree.read(cx)
5950 .entry_for_path(rel_path(path))
5951 .unwrap_or_else(|| panic!("no entry for path {}", path))
5952 .id
5953 })
5954 };
5955
5956 let dir_id = id_for_path("a", cx);
5957 let file_id = id_for_path("a/file1", cx);
5958 let buffer = project
5959 .update(cx, |p, cx| {
5960 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5961 })
5962 .await
5963 .unwrap();
5964 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5965
5966 project
5967 .update(cx, |project, cx| {
5968 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5969 })
5970 .unwrap()
5971 .await
5972 .into_included()
5973 .unwrap();
5974 cx.executor().run_until_parked();
5975
5976 assert_eq!(id_for_path("b", cx), dir_id);
5977 assert_eq!(id_for_path("b/file1", cx), file_id);
5978 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5979}
5980
5981#[gpui::test]
5982async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5983 init_test(cx);
5984
5985 let fs = FakeFs::new(cx.executor());
5986 fs.insert_tree(
5987 "/dir",
5988 json!({
5989 "a.txt": "a-contents",
5990 "b.txt": "b-contents",
5991 }),
5992 )
5993 .await;
5994
5995 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5996
5997 // Spawn multiple tasks to open paths, repeating some paths.
5998 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5999 (
6000 p.open_local_buffer("/dir/a.txt", cx),
6001 p.open_local_buffer("/dir/b.txt", cx),
6002 p.open_local_buffer("/dir/a.txt", cx),
6003 )
6004 });
6005
6006 let buffer_a_1 = buffer_a_1.await.unwrap();
6007 let buffer_a_2 = buffer_a_2.await.unwrap();
6008 let buffer_b = buffer_b.await.unwrap();
6009 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
6010 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
6011
6012 // There is only one buffer per path.
6013 let buffer_a_id = buffer_a_1.entity_id();
6014 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
6015
6016 // Open the same path again while it is still open.
6017 drop(buffer_a_1);
6018 let buffer_a_3 = project
6019 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6020 .await
6021 .unwrap();
6022
6023 // There's still only one buffer per path.
6024 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
6025}
6026
6027#[gpui::test]
6028async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6029 init_test(cx);
6030
6031 let fs = FakeFs::new(cx.executor());
6032 fs.insert_tree(
6033 path!("/dir"),
6034 json!({
6035 "file1": "abc",
6036 "file2": "def",
6037 "file3": "ghi",
6038 }),
6039 )
6040 .await;
6041
6042 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6043
6044 let buffer1 = project
6045 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6046 .await
6047 .unwrap();
6048 let events = Arc::new(Mutex::new(Vec::new()));
6049
6050 // initially, the buffer isn't dirty.
6051 buffer1.update(cx, |buffer, cx| {
6052 cx.subscribe(&buffer1, {
6053 let events = events.clone();
6054 move |_, _, event, _| match event {
6055 BufferEvent::Operation { .. } => {}
6056 _ => events.lock().push(event.clone()),
6057 }
6058 })
6059 .detach();
6060
6061 assert!(!buffer.is_dirty());
6062 assert!(events.lock().is_empty());
6063
6064 buffer.edit([(1..2, "")], None, cx);
6065 });
6066
6067 // after the first edit, the buffer is dirty, and emits a dirtied event.
6068 buffer1.update(cx, |buffer, cx| {
6069 assert!(buffer.text() == "ac");
6070 assert!(buffer.is_dirty());
6071 assert_eq!(
6072 *events.lock(),
6073 &[
6074 language::BufferEvent::Edited { is_local: true },
6075 language::BufferEvent::DirtyChanged
6076 ]
6077 );
6078 events.lock().clear();
6079 buffer.did_save(
6080 buffer.version(),
6081 buffer.file().unwrap().disk_state().mtime(),
6082 cx,
6083 );
6084 });
6085
6086 // after saving, the buffer is not dirty, and emits a saved event.
6087 buffer1.update(cx, |buffer, cx| {
6088 assert!(!buffer.is_dirty());
6089 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
6090 events.lock().clear();
6091
6092 buffer.edit([(1..1, "B")], None, cx);
6093 buffer.edit([(2..2, "D")], None, cx);
6094 });
6095
6096 // after editing again, the buffer is dirty, and emits another dirty event.
6097 buffer1.update(cx, |buffer, cx| {
6098 assert!(buffer.text() == "aBDc");
6099 assert!(buffer.is_dirty());
6100 assert_eq!(
6101 *events.lock(),
6102 &[
6103 language::BufferEvent::Edited { is_local: true },
6104 language::BufferEvent::DirtyChanged,
6105 language::BufferEvent::Edited { is_local: true },
6106 ],
6107 );
6108 events.lock().clear();
6109
6110 // After restoring the buffer to its previously-saved state,
6111 // the buffer is not considered dirty anymore.
6112 buffer.edit([(1..3, "")], None, cx);
6113 assert!(buffer.text() == "ac");
6114 assert!(!buffer.is_dirty());
6115 });
6116
6117 assert_eq!(
6118 *events.lock(),
6119 &[
6120 language::BufferEvent::Edited { is_local: true },
6121 language::BufferEvent::DirtyChanged
6122 ]
6123 );
6124
6125 // When a file is deleted, it is not considered dirty.
6126 let events = Arc::new(Mutex::new(Vec::new()));
6127 let buffer2 = project
6128 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6129 .await
6130 .unwrap();
6131 buffer2.update(cx, |_, cx| {
6132 cx.subscribe(&buffer2, {
6133 let events = events.clone();
6134 move |_, _, event, _| match event {
6135 BufferEvent::Operation { .. } => {}
6136 _ => events.lock().push(event.clone()),
6137 }
6138 })
6139 .detach();
6140 });
6141
6142 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
6143 .await
6144 .unwrap();
6145 cx.executor().run_until_parked();
6146 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
6147 assert_eq!(
6148 mem::take(&mut *events.lock()),
6149 &[language::BufferEvent::FileHandleChanged]
6150 );
6151
6152 // Buffer becomes dirty when edited.
6153 buffer2.update(cx, |buffer, cx| {
6154 buffer.edit([(2..3, "")], None, cx);
6155 assert_eq!(buffer.is_dirty(), true);
6156 });
6157 assert_eq!(
6158 mem::take(&mut *events.lock()),
6159 &[
6160 language::BufferEvent::Edited { is_local: true },
6161 language::BufferEvent::DirtyChanged
6162 ]
6163 );
6164
6165 // Buffer becomes clean again when all of its content is removed, because
6166 // the file was deleted.
6167 buffer2.update(cx, |buffer, cx| {
6168 buffer.edit([(0..2, "")], None, cx);
6169 assert_eq!(buffer.is_empty(), true);
6170 assert_eq!(buffer.is_dirty(), false);
6171 });
6172 assert_eq!(
6173 *events.lock(),
6174 &[
6175 language::BufferEvent::Edited { is_local: true },
6176 language::BufferEvent::DirtyChanged
6177 ]
6178 );
6179
6180 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6181 let events = Arc::new(Mutex::new(Vec::new()));
6182 let buffer3 = project
6183 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6184 .await
6185 .unwrap();
6186 buffer3.update(cx, |_, cx| {
6187 cx.subscribe(&buffer3, {
6188 let events = events.clone();
6189 move |_, _, event, _| match event {
6190 BufferEvent::Operation { .. } => {}
6191 _ => events.lock().push(event.clone()),
6192 }
6193 })
6194 .detach();
6195 });
6196
6197 buffer3.update(cx, |buffer, cx| {
6198 buffer.edit([(0..0, "x")], None, cx);
6199 });
6200 events.lock().clear();
6201 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6202 .await
6203 .unwrap();
6204 cx.executor().run_until_parked();
6205 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6206 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6207}
6208
6209#[gpui::test]
6210async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6211 init_test(cx);
6212
6213 let fs = FakeFs::new(cx.executor());
6214 fs.insert_tree(
6215 path!("/dir"),
6216 json!({
6217 "file.txt": "version 1",
6218 }),
6219 )
6220 .await;
6221
6222 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6223 let buffer = project
6224 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6225 .await
6226 .unwrap();
6227
6228 buffer.read_with(cx, |buffer, _| {
6229 assert_eq!(buffer.text(), "version 1");
6230 assert!(!buffer.is_dirty());
6231 });
6232
6233 // User makes an edit, making the buffer dirty.
6234 buffer.update(cx, |buffer, cx| {
6235 buffer.edit([(0..0, "user edit: ")], None, cx);
6236 });
6237
6238 buffer.read_with(cx, |buffer, _| {
6239 assert!(buffer.is_dirty());
6240 assert_eq!(buffer.text(), "user edit: version 1");
6241 });
6242
6243 // External tool writes new content while buffer is dirty.
6244 // file_updated() updates the File but suppresses ReloadNeeded.
6245 fs.save(
6246 path!("/dir/file.txt").as_ref(),
6247 &"version 2 from external tool".into(),
6248 Default::default(),
6249 )
6250 .await
6251 .unwrap();
6252 cx.executor().run_until_parked();
6253
6254 buffer.read_with(cx, |buffer, _| {
6255 assert!(buffer.has_conflict());
6256 assert_eq!(buffer.text(), "user edit: version 1");
6257 });
6258
6259 // User undoes their edit. Buffer becomes clean, but disk has different
6260 // content. did_edit() detects the dirty->clean transition and checks if
6261 // disk changed while dirty. Since mtime differs from saved_mtime, it
6262 // emits ReloadNeeded.
6263 buffer.update(cx, |buffer, cx| {
6264 buffer.undo(cx);
6265 });
6266 cx.executor().run_until_parked();
6267
6268 buffer.read_with(cx, |buffer, _| {
6269 assert_eq!(
6270 buffer.text(),
6271 "version 2 from external tool",
6272 "buffer should reload from disk after undo makes it clean"
6273 );
6274 assert!(!buffer.is_dirty());
6275 });
6276}
6277
6278#[gpui::test]
6279async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6280 init_test(cx);
6281
6282 let (initial_contents, initial_offsets) =
6283 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6284 let fs = FakeFs::new(cx.executor());
6285 fs.insert_tree(
6286 path!("/dir"),
6287 json!({
6288 "the-file": initial_contents,
6289 }),
6290 )
6291 .await;
6292 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6293 let buffer = project
6294 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6295 .await
6296 .unwrap();
6297
6298 let anchors = initial_offsets
6299 .iter()
6300 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6301 .collect::<Vec<_>>();
6302
6303 // Change the file on disk, adding two new lines of text, and removing
6304 // one line.
6305 buffer.update(cx, |buffer, _| {
6306 assert!(!buffer.is_dirty());
6307 assert!(!buffer.has_conflict());
6308 });
6309
6310 let (new_contents, new_offsets) =
6311 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6312 fs.save(
6313 path!("/dir/the-file").as_ref(),
6314 &new_contents.as_str().into(),
6315 LineEnding::Unix,
6316 )
6317 .await
6318 .unwrap();
6319
6320 // Because the buffer was not modified, it is reloaded from disk. Its
6321 // contents are edited according to the diff between the old and new
6322 // file contents.
6323 cx.executor().run_until_parked();
6324 buffer.update(cx, |buffer, _| {
6325 assert_eq!(buffer.text(), new_contents);
6326 assert!(!buffer.is_dirty());
6327 assert!(!buffer.has_conflict());
6328
6329 let anchor_offsets = anchors
6330 .iter()
6331 .map(|anchor| anchor.to_offset(&*buffer))
6332 .collect::<Vec<_>>();
6333 assert_eq!(anchor_offsets, new_offsets);
6334 });
6335
6336 // Modify the buffer
6337 buffer.update(cx, |buffer, cx| {
6338 buffer.edit([(0..0, " ")], None, cx);
6339 assert!(buffer.is_dirty());
6340 assert!(!buffer.has_conflict());
6341 });
6342
6343 // Change the file on disk again, adding blank lines to the beginning.
6344 fs.save(
6345 path!("/dir/the-file").as_ref(),
6346 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6347 LineEnding::Unix,
6348 )
6349 .await
6350 .unwrap();
6351
6352 // Because the buffer is modified, it doesn't reload from disk, but is
6353 // marked as having a conflict.
6354 cx.executor().run_until_parked();
6355 buffer.update(cx, |buffer, _| {
6356 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6357 assert!(buffer.has_conflict());
6358 });
6359}
6360
6361#[gpui::test]
6362async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6363 init_test(cx);
6364
6365 let fs = FakeFs::new(cx.executor());
6366 fs.insert_tree(
6367 path!("/dir"),
6368 json!({
6369 "file1": "a\nb\nc\n",
6370 "file2": "one\r\ntwo\r\nthree\r\n",
6371 }),
6372 )
6373 .await;
6374
6375 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6376 let buffer1 = project
6377 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6378 .await
6379 .unwrap();
6380 let buffer2 = project
6381 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6382 .await
6383 .unwrap();
6384
6385 buffer1.update(cx, |buffer, _| {
6386 assert_eq!(buffer.text(), "a\nb\nc\n");
6387 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6388 });
6389 buffer2.update(cx, |buffer, _| {
6390 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6391 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6392 });
6393
6394 // Change a file's line endings on disk from unix to windows. The buffer's
6395 // state updates correctly.
6396 fs.save(
6397 path!("/dir/file1").as_ref(),
6398 &"aaa\nb\nc\n".into(),
6399 LineEnding::Windows,
6400 )
6401 .await
6402 .unwrap();
6403 cx.executor().run_until_parked();
6404 buffer1.update(cx, |buffer, _| {
6405 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6406 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6407 });
6408
6409 // Save a file with windows line endings. The file is written correctly.
6410 buffer2.update(cx, |buffer, cx| {
6411 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6412 });
6413 project
6414 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6415 .await
6416 .unwrap();
6417 assert_eq!(
6418 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6419 "one\r\ntwo\r\nthree\r\nfour\r\n",
6420 );
6421}
6422
6423#[gpui::test]
6424async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6425 init_test(cx);
6426
6427 let fs = FakeFs::new(cx.executor());
6428 fs.insert_tree(
6429 path!("/dir"),
6430 json!({
6431 "a.rs": "
6432 fn foo(mut v: Vec<usize>) {
6433 for x in &v {
6434 v.push(1);
6435 }
6436 }
6437 "
6438 .unindent(),
6439 }),
6440 )
6441 .await;
6442
6443 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6444 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6445 let buffer = project
6446 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6447 .await
6448 .unwrap();
6449
6450 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6451 let message = lsp::PublishDiagnosticsParams {
6452 uri: buffer_uri.clone(),
6453 diagnostics: vec![
6454 lsp::Diagnostic {
6455 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6456 severity: Some(DiagnosticSeverity::WARNING),
6457 message: "error 1".to_string(),
6458 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6459 location: lsp::Location {
6460 uri: buffer_uri.clone(),
6461 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6462 },
6463 message: "error 1 hint 1".to_string(),
6464 }]),
6465 ..Default::default()
6466 },
6467 lsp::Diagnostic {
6468 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6469 severity: Some(DiagnosticSeverity::HINT),
6470 message: "error 1 hint 1".to_string(),
6471 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6472 location: lsp::Location {
6473 uri: buffer_uri.clone(),
6474 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6475 },
6476 message: "original diagnostic".to_string(),
6477 }]),
6478 ..Default::default()
6479 },
6480 lsp::Diagnostic {
6481 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6482 severity: Some(DiagnosticSeverity::ERROR),
6483 message: "error 2".to_string(),
6484 related_information: Some(vec![
6485 lsp::DiagnosticRelatedInformation {
6486 location: lsp::Location {
6487 uri: buffer_uri.clone(),
6488 range: lsp::Range::new(
6489 lsp::Position::new(1, 13),
6490 lsp::Position::new(1, 15),
6491 ),
6492 },
6493 message: "error 2 hint 1".to_string(),
6494 },
6495 lsp::DiagnosticRelatedInformation {
6496 location: lsp::Location {
6497 uri: buffer_uri.clone(),
6498 range: lsp::Range::new(
6499 lsp::Position::new(1, 13),
6500 lsp::Position::new(1, 15),
6501 ),
6502 },
6503 message: "error 2 hint 2".to_string(),
6504 },
6505 ]),
6506 ..Default::default()
6507 },
6508 lsp::Diagnostic {
6509 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6510 severity: Some(DiagnosticSeverity::HINT),
6511 message: "error 2 hint 1".to_string(),
6512 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6513 location: lsp::Location {
6514 uri: buffer_uri.clone(),
6515 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6516 },
6517 message: "original diagnostic".to_string(),
6518 }]),
6519 ..Default::default()
6520 },
6521 lsp::Diagnostic {
6522 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6523 severity: Some(DiagnosticSeverity::HINT),
6524 message: "error 2 hint 2".to_string(),
6525 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6526 location: lsp::Location {
6527 uri: buffer_uri,
6528 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6529 },
6530 message: "original diagnostic".to_string(),
6531 }]),
6532 ..Default::default()
6533 },
6534 ],
6535 version: None,
6536 };
6537
6538 lsp_store
6539 .update(cx, |lsp_store, cx| {
6540 lsp_store.update_diagnostics(
6541 LanguageServerId(0),
6542 message,
6543 None,
6544 DiagnosticSourceKind::Pushed,
6545 &[],
6546 cx,
6547 )
6548 })
6549 .unwrap();
6550 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6551
6552 assert_eq!(
6553 buffer
6554 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6555 .collect::<Vec<_>>(),
6556 &[
6557 DiagnosticEntry {
6558 range: Point::new(1, 8)..Point::new(1, 9),
6559 diagnostic: Diagnostic {
6560 severity: DiagnosticSeverity::WARNING,
6561 message: "error 1".to_string(),
6562 group_id: 1,
6563 is_primary: true,
6564 source_kind: DiagnosticSourceKind::Pushed,
6565 ..Diagnostic::default()
6566 }
6567 },
6568 DiagnosticEntry {
6569 range: Point::new(1, 8)..Point::new(1, 9),
6570 diagnostic: Diagnostic {
6571 severity: DiagnosticSeverity::HINT,
6572 message: "error 1 hint 1".to_string(),
6573 group_id: 1,
6574 is_primary: false,
6575 source_kind: DiagnosticSourceKind::Pushed,
6576 ..Diagnostic::default()
6577 }
6578 },
6579 DiagnosticEntry {
6580 range: Point::new(1, 13)..Point::new(1, 15),
6581 diagnostic: Diagnostic {
6582 severity: DiagnosticSeverity::HINT,
6583 message: "error 2 hint 1".to_string(),
6584 group_id: 0,
6585 is_primary: false,
6586 source_kind: DiagnosticSourceKind::Pushed,
6587 ..Diagnostic::default()
6588 }
6589 },
6590 DiagnosticEntry {
6591 range: Point::new(1, 13)..Point::new(1, 15),
6592 diagnostic: Diagnostic {
6593 severity: DiagnosticSeverity::HINT,
6594 message: "error 2 hint 2".to_string(),
6595 group_id: 0,
6596 is_primary: false,
6597 source_kind: DiagnosticSourceKind::Pushed,
6598 ..Diagnostic::default()
6599 }
6600 },
6601 DiagnosticEntry {
6602 range: Point::new(2, 8)..Point::new(2, 17),
6603 diagnostic: Diagnostic {
6604 severity: DiagnosticSeverity::ERROR,
6605 message: "error 2".to_string(),
6606 group_id: 0,
6607 is_primary: true,
6608 source_kind: DiagnosticSourceKind::Pushed,
6609 ..Diagnostic::default()
6610 }
6611 }
6612 ]
6613 );
6614
6615 assert_eq!(
6616 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6617 &[
6618 DiagnosticEntry {
6619 range: Point::new(1, 13)..Point::new(1, 15),
6620 diagnostic: Diagnostic {
6621 severity: DiagnosticSeverity::HINT,
6622 message: "error 2 hint 1".to_string(),
6623 group_id: 0,
6624 is_primary: false,
6625 source_kind: DiagnosticSourceKind::Pushed,
6626 ..Diagnostic::default()
6627 }
6628 },
6629 DiagnosticEntry {
6630 range: Point::new(1, 13)..Point::new(1, 15),
6631 diagnostic: Diagnostic {
6632 severity: DiagnosticSeverity::HINT,
6633 message: "error 2 hint 2".to_string(),
6634 group_id: 0,
6635 is_primary: false,
6636 source_kind: DiagnosticSourceKind::Pushed,
6637 ..Diagnostic::default()
6638 }
6639 },
6640 DiagnosticEntry {
6641 range: Point::new(2, 8)..Point::new(2, 17),
6642 diagnostic: Diagnostic {
6643 severity: DiagnosticSeverity::ERROR,
6644 message: "error 2".to_string(),
6645 group_id: 0,
6646 is_primary: true,
6647 source_kind: DiagnosticSourceKind::Pushed,
6648 ..Diagnostic::default()
6649 }
6650 }
6651 ]
6652 );
6653
6654 assert_eq!(
6655 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6656 &[
6657 DiagnosticEntry {
6658 range: Point::new(1, 8)..Point::new(1, 9),
6659 diagnostic: Diagnostic {
6660 severity: DiagnosticSeverity::WARNING,
6661 message: "error 1".to_string(),
6662 group_id: 1,
6663 is_primary: true,
6664 source_kind: DiagnosticSourceKind::Pushed,
6665 ..Diagnostic::default()
6666 }
6667 },
6668 DiagnosticEntry {
6669 range: Point::new(1, 8)..Point::new(1, 9),
6670 diagnostic: Diagnostic {
6671 severity: DiagnosticSeverity::HINT,
6672 message: "error 1 hint 1".to_string(),
6673 group_id: 1,
6674 is_primary: false,
6675 source_kind: DiagnosticSourceKind::Pushed,
6676 ..Diagnostic::default()
6677 }
6678 },
6679 ]
6680 );
6681}
6682
6683#[gpui::test]
6684async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6685 init_test(cx);
6686
6687 let fs = FakeFs::new(cx.executor());
6688 fs.insert_tree(
6689 path!("/dir"),
6690 json!({
6691 "one.rs": "const ONE: usize = 1;",
6692 "two": {
6693 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6694 }
6695
6696 }),
6697 )
6698 .await;
6699 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6700
6701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6702 language_registry.add(rust_lang());
6703 let watched_paths = lsp::FileOperationRegistrationOptions {
6704 filters: vec![
6705 FileOperationFilter {
6706 scheme: Some("file".to_owned()),
6707 pattern: lsp::FileOperationPattern {
6708 glob: "**/*.rs".to_owned(),
6709 matches: Some(lsp::FileOperationPatternKind::File),
6710 options: None,
6711 },
6712 },
6713 FileOperationFilter {
6714 scheme: Some("file".to_owned()),
6715 pattern: lsp::FileOperationPattern {
6716 glob: "**/**".to_owned(),
6717 matches: Some(lsp::FileOperationPatternKind::Folder),
6718 options: None,
6719 },
6720 },
6721 ],
6722 };
6723 let mut fake_servers = language_registry.register_fake_lsp(
6724 "Rust",
6725 FakeLspAdapter {
6726 capabilities: lsp::ServerCapabilities {
6727 workspace: Some(lsp::WorkspaceServerCapabilities {
6728 workspace_folders: None,
6729 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6730 did_rename: Some(watched_paths.clone()),
6731 will_rename: Some(watched_paths),
6732 ..Default::default()
6733 }),
6734 }),
6735 ..Default::default()
6736 },
6737 ..Default::default()
6738 },
6739 );
6740
6741 let _ = project
6742 .update(cx, |project, cx| {
6743 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6744 })
6745 .await
6746 .unwrap();
6747
6748 let fake_server = fake_servers.next().await.unwrap();
6749 cx.executor().run_until_parked();
6750 let response = project.update(cx, |project, cx| {
6751 let worktree = project.worktrees(cx).next().unwrap();
6752 let entry = worktree
6753 .read(cx)
6754 .entry_for_path(rel_path("one.rs"))
6755 .unwrap();
6756 project.rename_entry(
6757 entry.id,
6758 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6759 cx,
6760 )
6761 });
6762 let expected_edit = lsp::WorkspaceEdit {
6763 changes: None,
6764 document_changes: Some(DocumentChanges::Edits({
6765 vec![TextDocumentEdit {
6766 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6767 range: lsp::Range {
6768 start: lsp::Position {
6769 line: 0,
6770 character: 1,
6771 },
6772 end: lsp::Position {
6773 line: 0,
6774 character: 3,
6775 },
6776 },
6777 new_text: "This is not a drill".to_owned(),
6778 })],
6779 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6780 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6781 version: Some(1337),
6782 },
6783 }]
6784 })),
6785 change_annotations: None,
6786 };
6787 let resolved_workspace_edit = Arc::new(OnceLock::new());
6788 fake_server
6789 .set_request_handler::<WillRenameFiles, _, _>({
6790 let resolved_workspace_edit = resolved_workspace_edit.clone();
6791 let expected_edit = expected_edit.clone();
6792 move |params, _| {
6793 let resolved_workspace_edit = resolved_workspace_edit.clone();
6794 let expected_edit = expected_edit.clone();
6795 async move {
6796 assert_eq!(params.files.len(), 1);
6797 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6798 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6799 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6800 Ok(Some(expected_edit))
6801 }
6802 }
6803 })
6804 .next()
6805 .await
6806 .unwrap();
6807 let _ = response.await.unwrap();
6808 fake_server
6809 .handle_notification::<DidRenameFiles, _>(|params, _| {
6810 assert_eq!(params.files.len(), 1);
6811 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6812 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6813 })
6814 .next()
6815 .await
6816 .unwrap();
6817 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6818}
6819
6820#[gpui::test]
6821async fn test_rename(cx: &mut gpui::TestAppContext) {
6822 // hi
6823 init_test(cx);
6824
6825 let fs = FakeFs::new(cx.executor());
6826 fs.insert_tree(
6827 path!("/dir"),
6828 json!({
6829 "one.rs": "const ONE: usize = 1;",
6830 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6831 }),
6832 )
6833 .await;
6834
6835 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6836
6837 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6838 language_registry.add(rust_lang());
6839 let mut fake_servers = language_registry.register_fake_lsp(
6840 "Rust",
6841 FakeLspAdapter {
6842 capabilities: lsp::ServerCapabilities {
6843 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6844 prepare_provider: Some(true),
6845 work_done_progress_options: Default::default(),
6846 })),
6847 ..Default::default()
6848 },
6849 ..Default::default()
6850 },
6851 );
6852
6853 let (buffer, _handle) = project
6854 .update(cx, |project, cx| {
6855 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6856 })
6857 .await
6858 .unwrap();
6859
6860 let fake_server = fake_servers.next().await.unwrap();
6861 cx.executor().run_until_parked();
6862
6863 let response = project.update(cx, |project, cx| {
6864 project.prepare_rename(buffer.clone(), 7, cx)
6865 });
6866 fake_server
6867 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6868 assert_eq!(
6869 params.text_document.uri.as_str(),
6870 uri!("file:///dir/one.rs")
6871 );
6872 assert_eq!(params.position, lsp::Position::new(0, 7));
6873 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6874 lsp::Position::new(0, 6),
6875 lsp::Position::new(0, 9),
6876 ))))
6877 })
6878 .next()
6879 .await
6880 .unwrap();
6881 let response = response.await.unwrap();
6882 let PrepareRenameResponse::Success(range) = response else {
6883 panic!("{:?}", response);
6884 };
6885 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6886 assert_eq!(range, 6..9);
6887
6888 let response = project.update(cx, |project, cx| {
6889 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6890 });
6891 fake_server
6892 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6893 assert_eq!(
6894 params.text_document_position.text_document.uri.as_str(),
6895 uri!("file:///dir/one.rs")
6896 );
6897 assert_eq!(
6898 params.text_document_position.position,
6899 lsp::Position::new(0, 7)
6900 );
6901 assert_eq!(params.new_name, "THREE");
6902 Ok(Some(lsp::WorkspaceEdit {
6903 changes: Some(
6904 [
6905 (
6906 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6907 vec![lsp::TextEdit::new(
6908 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6909 "THREE".to_string(),
6910 )],
6911 ),
6912 (
6913 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6914 vec![
6915 lsp::TextEdit::new(
6916 lsp::Range::new(
6917 lsp::Position::new(0, 24),
6918 lsp::Position::new(0, 27),
6919 ),
6920 "THREE".to_string(),
6921 ),
6922 lsp::TextEdit::new(
6923 lsp::Range::new(
6924 lsp::Position::new(0, 35),
6925 lsp::Position::new(0, 38),
6926 ),
6927 "THREE".to_string(),
6928 ),
6929 ],
6930 ),
6931 ]
6932 .into_iter()
6933 .collect(),
6934 ),
6935 ..Default::default()
6936 }))
6937 })
6938 .next()
6939 .await
6940 .unwrap();
6941 let mut transaction = response.await.unwrap().0;
6942 assert_eq!(transaction.len(), 2);
6943 assert_eq!(
6944 transaction
6945 .remove_entry(&buffer)
6946 .unwrap()
6947 .0
6948 .update(cx, |buffer, _| buffer.text()),
6949 "const THREE: usize = 1;"
6950 );
6951 assert_eq!(
6952 transaction
6953 .into_keys()
6954 .next()
6955 .unwrap()
6956 .update(cx, |buffer, _| buffer.text()),
6957 "const TWO: usize = one::THREE + one::THREE;"
6958 );
6959}
6960
6961#[gpui::test]
6962async fn test_search(cx: &mut gpui::TestAppContext) {
6963 init_test(cx);
6964
6965 let fs = FakeFs::new(cx.executor());
6966 fs.insert_tree(
6967 path!("/dir"),
6968 json!({
6969 "one.rs": "const ONE: usize = 1;",
6970 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6971 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6972 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6973 }),
6974 )
6975 .await;
6976 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6977 assert_eq!(
6978 search(
6979 &project,
6980 SearchQuery::text(
6981 "TWO",
6982 false,
6983 true,
6984 false,
6985 Default::default(),
6986 Default::default(),
6987 false,
6988 None
6989 )
6990 .unwrap(),
6991 cx
6992 )
6993 .await
6994 .unwrap(),
6995 HashMap::from_iter([
6996 (path!("dir/two.rs").to_string(), vec![6..9]),
6997 (path!("dir/three.rs").to_string(), vec![37..40])
6998 ])
6999 );
7000
7001 let buffer_4 = project
7002 .update(cx, |project, cx| {
7003 project.open_local_buffer(path!("/dir/four.rs"), cx)
7004 })
7005 .await
7006 .unwrap();
7007 buffer_4.update(cx, |buffer, cx| {
7008 let text = "two::TWO";
7009 buffer.edit([(20..28, text), (31..43, text)], None, cx);
7010 });
7011
7012 assert_eq!(
7013 search(
7014 &project,
7015 SearchQuery::text(
7016 "TWO",
7017 false,
7018 true,
7019 false,
7020 Default::default(),
7021 Default::default(),
7022 false,
7023 None,
7024 )
7025 .unwrap(),
7026 cx
7027 )
7028 .await
7029 .unwrap(),
7030 HashMap::from_iter([
7031 (path!("dir/two.rs").to_string(), vec![6..9]),
7032 (path!("dir/three.rs").to_string(), vec![37..40]),
7033 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
7034 ])
7035 );
7036}
7037
7038#[gpui::test]
7039async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
7040 init_test(cx);
7041
7042 let search_query = "file";
7043
7044 let fs = FakeFs::new(cx.executor());
7045 fs.insert_tree(
7046 path!("/dir"),
7047 json!({
7048 "one.rs": r#"// Rust file one"#,
7049 "one.ts": r#"// TypeScript file one"#,
7050 "two.rs": r#"// Rust file two"#,
7051 "two.ts": r#"// TypeScript file two"#,
7052 }),
7053 )
7054 .await;
7055 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7056
7057 assert!(
7058 search(
7059 &project,
7060 SearchQuery::text(
7061 search_query,
7062 false,
7063 true,
7064 false,
7065 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7066 Default::default(),
7067 false,
7068 None
7069 )
7070 .unwrap(),
7071 cx
7072 )
7073 .await
7074 .unwrap()
7075 .is_empty(),
7076 "If no inclusions match, no files should be returned"
7077 );
7078
7079 assert_eq!(
7080 search(
7081 &project,
7082 SearchQuery::text(
7083 search_query,
7084 false,
7085 true,
7086 false,
7087 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7088 Default::default(),
7089 false,
7090 None
7091 )
7092 .unwrap(),
7093 cx
7094 )
7095 .await
7096 .unwrap(),
7097 HashMap::from_iter([
7098 (path!("dir/one.rs").to_string(), vec![8..12]),
7099 (path!("dir/two.rs").to_string(), vec![8..12]),
7100 ]),
7101 "Rust only search should give only Rust files"
7102 );
7103
7104 assert_eq!(
7105 search(
7106 &project,
7107 SearchQuery::text(
7108 search_query,
7109 false,
7110 true,
7111 false,
7112 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7113 .unwrap(),
7114 Default::default(),
7115 false,
7116 None,
7117 )
7118 .unwrap(),
7119 cx
7120 )
7121 .await
7122 .unwrap(),
7123 HashMap::from_iter([
7124 (path!("dir/one.ts").to_string(), vec![14..18]),
7125 (path!("dir/two.ts").to_string(), vec![14..18]),
7126 ]),
7127 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
7128 );
7129
7130 assert_eq!(
7131 search(
7132 &project,
7133 SearchQuery::text(
7134 search_query,
7135 false,
7136 true,
7137 false,
7138 PathMatcher::new(
7139 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7140 PathStyle::local()
7141 )
7142 .unwrap(),
7143 Default::default(),
7144 false,
7145 None,
7146 )
7147 .unwrap(),
7148 cx
7149 )
7150 .await
7151 .unwrap(),
7152 HashMap::from_iter([
7153 (path!("dir/two.ts").to_string(), vec![14..18]),
7154 (path!("dir/one.rs").to_string(), vec![8..12]),
7155 (path!("dir/one.ts").to_string(), vec![14..18]),
7156 (path!("dir/two.rs").to_string(), vec![8..12]),
7157 ]),
7158 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7159 );
7160}
7161
7162#[gpui::test]
7163async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7164 init_test(cx);
7165
7166 let search_query = "file";
7167
7168 let fs = FakeFs::new(cx.executor());
7169 fs.insert_tree(
7170 path!("/dir"),
7171 json!({
7172 "one.rs": r#"// Rust file one"#,
7173 "one.ts": r#"// TypeScript file one"#,
7174 "two.rs": r#"// Rust file two"#,
7175 "two.ts": r#"// TypeScript file two"#,
7176 }),
7177 )
7178 .await;
7179 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7180
7181 assert_eq!(
7182 search(
7183 &project,
7184 SearchQuery::text(
7185 search_query,
7186 false,
7187 true,
7188 false,
7189 Default::default(),
7190 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7191 false,
7192 None,
7193 )
7194 .unwrap(),
7195 cx
7196 )
7197 .await
7198 .unwrap(),
7199 HashMap::from_iter([
7200 (path!("dir/one.rs").to_string(), vec![8..12]),
7201 (path!("dir/one.ts").to_string(), vec![14..18]),
7202 (path!("dir/two.rs").to_string(), vec![8..12]),
7203 (path!("dir/two.ts").to_string(), vec![14..18]),
7204 ]),
7205 "If no exclusions match, all files should be returned"
7206 );
7207
7208 assert_eq!(
7209 search(
7210 &project,
7211 SearchQuery::text(
7212 search_query,
7213 false,
7214 true,
7215 false,
7216 Default::default(),
7217 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7218 false,
7219 None,
7220 )
7221 .unwrap(),
7222 cx
7223 )
7224 .await
7225 .unwrap(),
7226 HashMap::from_iter([
7227 (path!("dir/one.ts").to_string(), vec![14..18]),
7228 (path!("dir/two.ts").to_string(), vec![14..18]),
7229 ]),
7230 "Rust exclusion search should give only TypeScript files"
7231 );
7232
7233 assert_eq!(
7234 search(
7235 &project,
7236 SearchQuery::text(
7237 search_query,
7238 false,
7239 true,
7240 false,
7241 Default::default(),
7242 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7243 .unwrap(),
7244 false,
7245 None,
7246 )
7247 .unwrap(),
7248 cx
7249 )
7250 .await
7251 .unwrap(),
7252 HashMap::from_iter([
7253 (path!("dir/one.rs").to_string(), vec![8..12]),
7254 (path!("dir/two.rs").to_string(), vec![8..12]),
7255 ]),
7256 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7257 );
7258
7259 assert!(
7260 search(
7261 &project,
7262 SearchQuery::text(
7263 search_query,
7264 false,
7265 true,
7266 false,
7267 Default::default(),
7268 PathMatcher::new(
7269 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7270 PathStyle::local(),
7271 )
7272 .unwrap(),
7273 false,
7274 None,
7275 )
7276 .unwrap(),
7277 cx
7278 )
7279 .await
7280 .unwrap()
7281 .is_empty(),
7282 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7283 );
7284}
7285
7286#[gpui::test]
7287async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7288 init_test(cx);
7289
7290 let search_query = "file";
7291
7292 let fs = FakeFs::new(cx.executor());
7293 fs.insert_tree(
7294 path!("/dir"),
7295 json!({
7296 "one.rs": r#"// Rust file one"#,
7297 "one.ts": r#"// TypeScript file one"#,
7298 "two.rs": r#"// Rust file two"#,
7299 "two.ts": r#"// TypeScript file two"#,
7300 }),
7301 )
7302 .await;
7303
7304 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7305 let path_style = PathStyle::local();
7306 let _buffer = project.update(cx, |project, cx| {
7307 project.create_local_buffer("file", None, false, cx)
7308 });
7309
7310 assert_eq!(
7311 search(
7312 &project,
7313 SearchQuery::text(
7314 search_query,
7315 false,
7316 true,
7317 false,
7318 Default::default(),
7319 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7320 false,
7321 None,
7322 )
7323 .unwrap(),
7324 cx
7325 )
7326 .await
7327 .unwrap(),
7328 HashMap::from_iter([
7329 (path!("dir/one.rs").to_string(), vec![8..12]),
7330 (path!("dir/one.ts").to_string(), vec![14..18]),
7331 (path!("dir/two.rs").to_string(), vec![8..12]),
7332 (path!("dir/two.ts").to_string(), vec![14..18]),
7333 ]),
7334 "If no exclusions match, all files should be returned"
7335 );
7336
7337 assert_eq!(
7338 search(
7339 &project,
7340 SearchQuery::text(
7341 search_query,
7342 false,
7343 true,
7344 false,
7345 Default::default(),
7346 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7347 false,
7348 None,
7349 )
7350 .unwrap(),
7351 cx
7352 )
7353 .await
7354 .unwrap(),
7355 HashMap::from_iter([
7356 (path!("dir/one.ts").to_string(), vec![14..18]),
7357 (path!("dir/two.ts").to_string(), vec![14..18]),
7358 ]),
7359 "Rust exclusion search should give only TypeScript files"
7360 );
7361
7362 assert_eq!(
7363 search(
7364 &project,
7365 SearchQuery::text(
7366 search_query,
7367 false,
7368 true,
7369 false,
7370 Default::default(),
7371 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7372 false,
7373 None,
7374 )
7375 .unwrap(),
7376 cx
7377 )
7378 .await
7379 .unwrap(),
7380 HashMap::from_iter([
7381 (path!("dir/one.rs").to_string(), vec![8..12]),
7382 (path!("dir/two.rs").to_string(), vec![8..12]),
7383 ]),
7384 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7385 );
7386
7387 assert!(
7388 search(
7389 &project,
7390 SearchQuery::text(
7391 search_query,
7392 false,
7393 true,
7394 false,
7395 Default::default(),
7396 PathMatcher::new(
7397 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7398 PathStyle::local(),
7399 )
7400 .unwrap(),
7401 false,
7402 None,
7403 )
7404 .unwrap(),
7405 cx
7406 )
7407 .await
7408 .unwrap()
7409 .is_empty(),
7410 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7411 );
7412}
7413
7414#[gpui::test]
7415async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7416 init_test(cx);
7417
7418 let search_query = "file";
7419
7420 let fs = FakeFs::new(cx.executor());
7421 fs.insert_tree(
7422 path!("/dir"),
7423 json!({
7424 "one.rs": r#"// Rust file one"#,
7425 "one.ts": r#"// TypeScript file one"#,
7426 "two.rs": r#"// Rust file two"#,
7427 "two.ts": r#"// TypeScript file two"#,
7428 }),
7429 )
7430 .await;
7431 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7432 assert!(
7433 search(
7434 &project,
7435 SearchQuery::text(
7436 search_query,
7437 false,
7438 true,
7439 false,
7440 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7441 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7442 false,
7443 None,
7444 )
7445 .unwrap(),
7446 cx
7447 )
7448 .await
7449 .unwrap()
7450 .is_empty(),
7451 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7452 );
7453
7454 assert!(
7455 search(
7456 &project,
7457 SearchQuery::text(
7458 search_query,
7459 false,
7460 true,
7461 false,
7462 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7463 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7464 false,
7465 None,
7466 )
7467 .unwrap(),
7468 cx
7469 )
7470 .await
7471 .unwrap()
7472 .is_empty(),
7473 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7474 );
7475
7476 assert!(
7477 search(
7478 &project,
7479 SearchQuery::text(
7480 search_query,
7481 false,
7482 true,
7483 false,
7484 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7485 .unwrap(),
7486 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7487 .unwrap(),
7488 false,
7489 None,
7490 )
7491 .unwrap(),
7492 cx
7493 )
7494 .await
7495 .unwrap()
7496 .is_empty(),
7497 "Non-matching inclusions and exclusions should not change that."
7498 );
7499
7500 assert_eq!(
7501 search(
7502 &project,
7503 SearchQuery::text(
7504 search_query,
7505 false,
7506 true,
7507 false,
7508 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7509 .unwrap(),
7510 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7511 .unwrap(),
7512 false,
7513 None,
7514 )
7515 .unwrap(),
7516 cx
7517 )
7518 .await
7519 .unwrap(),
7520 HashMap::from_iter([
7521 (path!("dir/one.ts").to_string(), vec![14..18]),
7522 (path!("dir/two.ts").to_string(), vec![14..18]),
7523 ]),
7524 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7525 );
7526}
7527
7528#[gpui::test]
7529async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7530 init_test(cx);
7531
7532 let fs = FakeFs::new(cx.executor());
7533 fs.insert_tree(
7534 path!("/worktree-a"),
7535 json!({
7536 "haystack.rs": r#"// NEEDLE"#,
7537 "haystack.ts": r#"// NEEDLE"#,
7538 }),
7539 )
7540 .await;
7541 fs.insert_tree(
7542 path!("/worktree-b"),
7543 json!({
7544 "haystack.rs": r#"// NEEDLE"#,
7545 "haystack.ts": r#"// NEEDLE"#,
7546 }),
7547 )
7548 .await;
7549
7550 let path_style = PathStyle::local();
7551 let project = Project::test(
7552 fs.clone(),
7553 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7554 cx,
7555 )
7556 .await;
7557
7558 assert_eq!(
7559 search(
7560 &project,
7561 SearchQuery::text(
7562 "NEEDLE",
7563 false,
7564 true,
7565 false,
7566 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7567 Default::default(),
7568 true,
7569 None,
7570 )
7571 .unwrap(),
7572 cx
7573 )
7574 .await
7575 .unwrap(),
7576 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7577 "should only return results from included worktree"
7578 );
7579 assert_eq!(
7580 search(
7581 &project,
7582 SearchQuery::text(
7583 "NEEDLE",
7584 false,
7585 true,
7586 false,
7587 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7588 Default::default(),
7589 true,
7590 None,
7591 )
7592 .unwrap(),
7593 cx
7594 )
7595 .await
7596 .unwrap(),
7597 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7598 "should only return results from included worktree"
7599 );
7600
7601 assert_eq!(
7602 search(
7603 &project,
7604 SearchQuery::text(
7605 "NEEDLE",
7606 false,
7607 true,
7608 false,
7609 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7610 Default::default(),
7611 false,
7612 None,
7613 )
7614 .unwrap(),
7615 cx
7616 )
7617 .await
7618 .unwrap(),
7619 HashMap::from_iter([
7620 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7621 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7622 ]),
7623 "should return results from both worktrees"
7624 );
7625}
7626
7627#[gpui::test]
7628async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7629 init_test(cx);
7630
7631 let fs = FakeFs::new(cx.background_executor.clone());
7632 fs.insert_tree(
7633 path!("/dir"),
7634 json!({
7635 ".git": {},
7636 ".gitignore": "**/target\n/node_modules\n",
7637 "target": {
7638 "index.txt": "index_key:index_value"
7639 },
7640 "node_modules": {
7641 "eslint": {
7642 "index.ts": "const eslint_key = 'eslint value'",
7643 "package.json": r#"{ "some_key": "some value" }"#,
7644 },
7645 "prettier": {
7646 "index.ts": "const prettier_key = 'prettier value'",
7647 "package.json": r#"{ "other_key": "other value" }"#,
7648 },
7649 },
7650 "package.json": r#"{ "main_key": "main value" }"#,
7651 }),
7652 )
7653 .await;
7654 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7655
7656 let query = "key";
7657 assert_eq!(
7658 search(
7659 &project,
7660 SearchQuery::text(
7661 query,
7662 false,
7663 false,
7664 false,
7665 Default::default(),
7666 Default::default(),
7667 false,
7668 None,
7669 )
7670 .unwrap(),
7671 cx
7672 )
7673 .await
7674 .unwrap(),
7675 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7676 "Only one non-ignored file should have the query"
7677 );
7678
7679 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7680 let path_style = PathStyle::local();
7681 assert_eq!(
7682 search(
7683 &project,
7684 SearchQuery::text(
7685 query,
7686 false,
7687 false,
7688 true,
7689 Default::default(),
7690 Default::default(),
7691 false,
7692 None,
7693 )
7694 .unwrap(),
7695 cx
7696 )
7697 .await
7698 .unwrap(),
7699 HashMap::from_iter([
7700 (path!("dir/package.json").to_string(), vec![8..11]),
7701 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7702 (
7703 path!("dir/node_modules/prettier/package.json").to_string(),
7704 vec![9..12]
7705 ),
7706 (
7707 path!("dir/node_modules/prettier/index.ts").to_string(),
7708 vec![15..18]
7709 ),
7710 (
7711 path!("dir/node_modules/eslint/index.ts").to_string(),
7712 vec![13..16]
7713 ),
7714 (
7715 path!("dir/node_modules/eslint/package.json").to_string(),
7716 vec![8..11]
7717 ),
7718 ]),
7719 "Unrestricted search with ignored directories should find every file with the query"
7720 );
7721
7722 let files_to_include =
7723 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7724 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7725 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7726 assert_eq!(
7727 search(
7728 &project,
7729 SearchQuery::text(
7730 query,
7731 false,
7732 false,
7733 true,
7734 files_to_include,
7735 files_to_exclude,
7736 false,
7737 None,
7738 )
7739 .unwrap(),
7740 cx
7741 )
7742 .await
7743 .unwrap(),
7744 HashMap::from_iter([(
7745 path!("dir/node_modules/prettier/package.json").to_string(),
7746 vec![9..12]
7747 )]),
7748 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7749 );
7750}
7751
7752#[gpui::test]
7753async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7754 init_test(cx);
7755
7756 let fs = FakeFs::new(cx.executor());
7757 fs.insert_tree(
7758 path!("/dir"),
7759 json!({
7760 "one.rs": "// ПРИВЕТ? привет!",
7761 "two.rs": "// ПРИВЕТ.",
7762 "three.rs": "// привет",
7763 }),
7764 )
7765 .await;
7766 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7767 let unicode_case_sensitive_query = SearchQuery::text(
7768 "привет",
7769 false,
7770 true,
7771 false,
7772 Default::default(),
7773 Default::default(),
7774 false,
7775 None,
7776 );
7777 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7778 assert_eq!(
7779 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7780 .await
7781 .unwrap(),
7782 HashMap::from_iter([
7783 (path!("dir/one.rs").to_string(), vec![17..29]),
7784 (path!("dir/three.rs").to_string(), vec![3..15]),
7785 ])
7786 );
7787
7788 let unicode_case_insensitive_query = SearchQuery::text(
7789 "привет",
7790 false,
7791 false,
7792 false,
7793 Default::default(),
7794 Default::default(),
7795 false,
7796 None,
7797 );
7798 assert_matches!(
7799 unicode_case_insensitive_query,
7800 Ok(SearchQuery::Regex { .. })
7801 );
7802 assert_eq!(
7803 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7804 .await
7805 .unwrap(),
7806 HashMap::from_iter([
7807 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7808 (path!("dir/two.rs").to_string(), vec![3..15]),
7809 (path!("dir/three.rs").to_string(), vec![3..15]),
7810 ])
7811 );
7812
7813 assert_eq!(
7814 search(
7815 &project,
7816 SearchQuery::text(
7817 "привет.",
7818 false,
7819 false,
7820 false,
7821 Default::default(),
7822 Default::default(),
7823 false,
7824 None,
7825 )
7826 .unwrap(),
7827 cx
7828 )
7829 .await
7830 .unwrap(),
7831 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7832 );
7833}
7834
7835#[gpui::test]
7836async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7837 init_test(cx);
7838
7839 let fs = FakeFs::new(cx.executor());
7840 fs.insert_tree(
7841 "/one/two",
7842 json!({
7843 "three": {
7844 "a.txt": "",
7845 "four": {}
7846 },
7847 "c.rs": ""
7848 }),
7849 )
7850 .await;
7851
7852 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7853 project
7854 .update(cx, |project, cx| {
7855 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7856 project.create_entry((id, rel_path("b..")), true, cx)
7857 })
7858 .await
7859 .unwrap()
7860 .into_included()
7861 .unwrap();
7862
7863 assert_eq!(
7864 fs.paths(true),
7865 vec![
7866 PathBuf::from(path!("/")),
7867 PathBuf::from(path!("/one")),
7868 PathBuf::from(path!("/one/two")),
7869 PathBuf::from(path!("/one/two/c.rs")),
7870 PathBuf::from(path!("/one/two/three")),
7871 PathBuf::from(path!("/one/two/three/a.txt")),
7872 PathBuf::from(path!("/one/two/three/b..")),
7873 PathBuf::from(path!("/one/two/three/four")),
7874 ]
7875 );
7876}
7877
7878#[gpui::test]
7879async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7880 init_test(cx);
7881
7882 let fs = FakeFs::new(cx.executor());
7883 fs.insert_tree(
7884 path!("/dir"),
7885 json!({
7886 "a.tsx": "a",
7887 }),
7888 )
7889 .await;
7890
7891 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7892
7893 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7894 language_registry.add(tsx_lang());
7895 let language_server_names = [
7896 "TypeScriptServer",
7897 "TailwindServer",
7898 "ESLintServer",
7899 "NoHoverCapabilitiesServer",
7900 ];
7901 let mut language_servers = [
7902 language_registry.register_fake_lsp(
7903 "tsx",
7904 FakeLspAdapter {
7905 name: language_server_names[0],
7906 capabilities: lsp::ServerCapabilities {
7907 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7908 ..lsp::ServerCapabilities::default()
7909 },
7910 ..FakeLspAdapter::default()
7911 },
7912 ),
7913 language_registry.register_fake_lsp(
7914 "tsx",
7915 FakeLspAdapter {
7916 name: language_server_names[1],
7917 capabilities: lsp::ServerCapabilities {
7918 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7919 ..lsp::ServerCapabilities::default()
7920 },
7921 ..FakeLspAdapter::default()
7922 },
7923 ),
7924 language_registry.register_fake_lsp(
7925 "tsx",
7926 FakeLspAdapter {
7927 name: language_server_names[2],
7928 capabilities: lsp::ServerCapabilities {
7929 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7930 ..lsp::ServerCapabilities::default()
7931 },
7932 ..FakeLspAdapter::default()
7933 },
7934 ),
7935 language_registry.register_fake_lsp(
7936 "tsx",
7937 FakeLspAdapter {
7938 name: language_server_names[3],
7939 capabilities: lsp::ServerCapabilities {
7940 hover_provider: None,
7941 ..lsp::ServerCapabilities::default()
7942 },
7943 ..FakeLspAdapter::default()
7944 },
7945 ),
7946 ];
7947
7948 let (buffer, _handle) = project
7949 .update(cx, |p, cx| {
7950 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7951 })
7952 .await
7953 .unwrap();
7954 cx.executor().run_until_parked();
7955
7956 let mut servers_with_hover_requests = HashMap::default();
7957 for i in 0..language_server_names.len() {
7958 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7959 panic!(
7960 "Failed to get language server #{i} with name {}",
7961 &language_server_names[i]
7962 )
7963 });
7964 let new_server_name = new_server.server.name();
7965 assert!(
7966 !servers_with_hover_requests.contains_key(&new_server_name),
7967 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7968 );
7969 match new_server_name.as_ref() {
7970 "TailwindServer" | "TypeScriptServer" => {
7971 servers_with_hover_requests.insert(
7972 new_server_name.clone(),
7973 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7974 move |_, _| {
7975 let name = new_server_name.clone();
7976 async move {
7977 Ok(Some(lsp::Hover {
7978 contents: lsp::HoverContents::Scalar(
7979 lsp::MarkedString::String(format!("{name} hover")),
7980 ),
7981 range: None,
7982 }))
7983 }
7984 },
7985 ),
7986 );
7987 }
7988 "ESLintServer" => {
7989 servers_with_hover_requests.insert(
7990 new_server_name,
7991 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7992 |_, _| async move { Ok(None) },
7993 ),
7994 );
7995 }
7996 "NoHoverCapabilitiesServer" => {
7997 let _never_handled = new_server
7998 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7999 panic!(
8000 "Should not call for hovers server with no corresponding capabilities"
8001 )
8002 });
8003 }
8004 unexpected => panic!("Unexpected server name: {unexpected}"),
8005 }
8006 }
8007
8008 let hover_task = project.update(cx, |project, cx| {
8009 project.hover(&buffer, Point::new(0, 0), cx)
8010 });
8011 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
8012 |mut hover_request| async move {
8013 hover_request
8014 .next()
8015 .await
8016 .expect("All hover requests should have been triggered")
8017 },
8018 ))
8019 .await;
8020 assert_eq!(
8021 vec!["TailwindServer hover", "TypeScriptServer hover"],
8022 hover_task
8023 .await
8024 .into_iter()
8025 .flatten()
8026 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8027 .sorted()
8028 .collect::<Vec<_>>(),
8029 "Should receive hover responses from all related servers with hover capabilities"
8030 );
8031}
8032
8033#[gpui::test]
8034async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
8035 init_test(cx);
8036
8037 let fs = FakeFs::new(cx.executor());
8038 fs.insert_tree(
8039 path!("/dir"),
8040 json!({
8041 "a.ts": "a",
8042 }),
8043 )
8044 .await;
8045
8046 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8047
8048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8049 language_registry.add(typescript_lang());
8050 let mut fake_language_servers = language_registry.register_fake_lsp(
8051 "TypeScript",
8052 FakeLspAdapter {
8053 capabilities: lsp::ServerCapabilities {
8054 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
8055 ..lsp::ServerCapabilities::default()
8056 },
8057 ..FakeLspAdapter::default()
8058 },
8059 );
8060
8061 let (buffer, _handle) = project
8062 .update(cx, |p, cx| {
8063 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8064 })
8065 .await
8066 .unwrap();
8067 cx.executor().run_until_parked();
8068
8069 let fake_server = fake_language_servers
8070 .next()
8071 .await
8072 .expect("failed to get the language server");
8073
8074 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
8075 move |_, _| async move {
8076 Ok(Some(lsp::Hover {
8077 contents: lsp::HoverContents::Array(vec![
8078 lsp::MarkedString::String("".to_string()),
8079 lsp::MarkedString::String(" ".to_string()),
8080 lsp::MarkedString::String("\n\n\n".to_string()),
8081 ]),
8082 range: None,
8083 }))
8084 },
8085 );
8086
8087 let hover_task = project.update(cx, |project, cx| {
8088 project.hover(&buffer, Point::new(0, 0), cx)
8089 });
8090 let () = request_handled
8091 .next()
8092 .await
8093 .expect("All hover requests should have been triggered");
8094 assert_eq!(
8095 Vec::<String>::new(),
8096 hover_task
8097 .await
8098 .into_iter()
8099 .flatten()
8100 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8101 .sorted()
8102 .collect::<Vec<_>>(),
8103 "Empty hover parts should be ignored"
8104 );
8105}
8106
8107#[gpui::test]
8108async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
8109 init_test(cx);
8110
8111 let fs = FakeFs::new(cx.executor());
8112 fs.insert_tree(
8113 path!("/dir"),
8114 json!({
8115 "a.ts": "a",
8116 }),
8117 )
8118 .await;
8119
8120 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8121
8122 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8123 language_registry.add(typescript_lang());
8124 let mut fake_language_servers = language_registry.register_fake_lsp(
8125 "TypeScript",
8126 FakeLspAdapter {
8127 capabilities: lsp::ServerCapabilities {
8128 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8129 ..lsp::ServerCapabilities::default()
8130 },
8131 ..FakeLspAdapter::default()
8132 },
8133 );
8134
8135 let (buffer, _handle) = project
8136 .update(cx, |p, cx| {
8137 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8138 })
8139 .await
8140 .unwrap();
8141 cx.executor().run_until_parked();
8142
8143 let fake_server = fake_language_servers
8144 .next()
8145 .await
8146 .expect("failed to get the language server");
8147
8148 let mut request_handled = fake_server
8149 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
8150 Ok(Some(vec![
8151 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8152 title: "organize imports".to_string(),
8153 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
8154 ..lsp::CodeAction::default()
8155 }),
8156 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8157 title: "fix code".to_string(),
8158 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8159 ..lsp::CodeAction::default()
8160 }),
8161 ]))
8162 });
8163
8164 let code_actions_task = project.update(cx, |project, cx| {
8165 project.code_actions(
8166 &buffer,
8167 0..buffer.read(cx).len(),
8168 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8169 cx,
8170 )
8171 });
8172
8173 let () = request_handled
8174 .next()
8175 .await
8176 .expect("The code action request should have been triggered");
8177
8178 let code_actions = code_actions_task.await.unwrap().unwrap();
8179 assert_eq!(code_actions.len(), 1);
8180 assert_eq!(
8181 code_actions[0].lsp_action.action_kind(),
8182 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8183 );
8184}
8185
8186#[gpui::test]
8187async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8188 cx: &mut gpui::TestAppContext,
8189) {
8190 init_test(cx);
8191
8192 let fs = FakeFs::new(cx.executor());
8193 fs.insert_tree(
8194 path!("/dir"),
8195 json!({
8196 "a.ts": "a",
8197 }),
8198 )
8199 .await;
8200
8201 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8202
8203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8204 language_registry.add(typescript_lang());
8205 let mut fake_language_servers = language_registry.register_fake_lsp(
8206 "TypeScript",
8207 FakeLspAdapter {
8208 capabilities: lsp::ServerCapabilities {
8209 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8210 lsp::CodeActionOptions {
8211 code_action_kinds: Some(vec![
8212 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8213 "source.doc".into(),
8214 ]),
8215 ..lsp::CodeActionOptions::default()
8216 },
8217 )),
8218 ..lsp::ServerCapabilities::default()
8219 },
8220 ..FakeLspAdapter::default()
8221 },
8222 );
8223
8224 let (buffer, _handle) = project
8225 .update(cx, |p, cx| {
8226 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8227 })
8228 .await
8229 .unwrap();
8230 cx.executor().run_until_parked();
8231
8232 let fake_server = fake_language_servers
8233 .next()
8234 .await
8235 .expect("failed to get the language server");
8236
8237 let mut request_handled = fake_server.set_request_handler::<
8238 lsp::request::CodeActionRequest,
8239 _,
8240 _,
8241 >(move |params, _| async move {
8242 assert_eq!(
8243 params.context.only, None,
8244 "Code action requests without explicit kind filters should not send `context.only`"
8245 );
8246 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8247 lsp::CodeAction {
8248 title: "Add test".to_string(),
8249 kind: Some("source.addTest".into()),
8250 ..lsp::CodeAction::default()
8251 },
8252 )]))
8253 });
8254
8255 let code_actions_task = project.update(cx, |project, cx| {
8256 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8257 });
8258
8259 let () = request_handled
8260 .next()
8261 .await
8262 .expect("The code action request should have been triggered");
8263
8264 let code_actions = code_actions_task.await.unwrap().unwrap();
8265 assert_eq!(code_actions.len(), 1);
8266 assert_eq!(
8267 code_actions[0].lsp_action.action_kind(),
8268 Some("source.addTest".into())
8269 );
8270}
8271
8272#[gpui::test]
8273async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8274 init_test(cx);
8275
8276 let fs = FakeFs::new(cx.executor());
8277 fs.insert_tree(
8278 path!("/dir"),
8279 json!({
8280 "a.tsx": "a",
8281 }),
8282 )
8283 .await;
8284
8285 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8286
8287 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8288 language_registry.add(tsx_lang());
8289 let language_server_names = [
8290 "TypeScriptServer",
8291 "TailwindServer",
8292 "ESLintServer",
8293 "NoActionsCapabilitiesServer",
8294 ];
8295
8296 let mut language_server_rxs = [
8297 language_registry.register_fake_lsp(
8298 "tsx",
8299 FakeLspAdapter {
8300 name: language_server_names[0],
8301 capabilities: lsp::ServerCapabilities {
8302 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8303 ..lsp::ServerCapabilities::default()
8304 },
8305 ..FakeLspAdapter::default()
8306 },
8307 ),
8308 language_registry.register_fake_lsp(
8309 "tsx",
8310 FakeLspAdapter {
8311 name: language_server_names[1],
8312 capabilities: lsp::ServerCapabilities {
8313 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8314 ..lsp::ServerCapabilities::default()
8315 },
8316 ..FakeLspAdapter::default()
8317 },
8318 ),
8319 language_registry.register_fake_lsp(
8320 "tsx",
8321 FakeLspAdapter {
8322 name: language_server_names[2],
8323 capabilities: lsp::ServerCapabilities {
8324 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8325 ..lsp::ServerCapabilities::default()
8326 },
8327 ..FakeLspAdapter::default()
8328 },
8329 ),
8330 language_registry.register_fake_lsp(
8331 "tsx",
8332 FakeLspAdapter {
8333 name: language_server_names[3],
8334 capabilities: lsp::ServerCapabilities {
8335 code_action_provider: None,
8336 ..lsp::ServerCapabilities::default()
8337 },
8338 ..FakeLspAdapter::default()
8339 },
8340 ),
8341 ];
8342
8343 let (buffer, _handle) = project
8344 .update(cx, |p, cx| {
8345 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8346 })
8347 .await
8348 .unwrap();
8349 cx.executor().run_until_parked();
8350
8351 let mut servers_with_actions_requests = HashMap::default();
8352 for i in 0..language_server_names.len() {
8353 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8354 panic!(
8355 "Failed to get language server #{i} with name {}",
8356 &language_server_names[i]
8357 )
8358 });
8359 let new_server_name = new_server.server.name();
8360
8361 assert!(
8362 !servers_with_actions_requests.contains_key(&new_server_name),
8363 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8364 );
8365 match new_server_name.0.as_ref() {
8366 "TailwindServer" | "TypeScriptServer" => {
8367 servers_with_actions_requests.insert(
8368 new_server_name.clone(),
8369 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8370 move |_, _| {
8371 let name = new_server_name.clone();
8372 async move {
8373 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8374 lsp::CodeAction {
8375 title: format!("{name} code action"),
8376 ..lsp::CodeAction::default()
8377 },
8378 )]))
8379 }
8380 },
8381 ),
8382 );
8383 }
8384 "ESLintServer" => {
8385 servers_with_actions_requests.insert(
8386 new_server_name,
8387 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8388 |_, _| async move { Ok(None) },
8389 ),
8390 );
8391 }
8392 "NoActionsCapabilitiesServer" => {
8393 let _never_handled = new_server
8394 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8395 panic!(
8396 "Should not call for code actions server with no corresponding capabilities"
8397 )
8398 });
8399 }
8400 unexpected => panic!("Unexpected server name: {unexpected}"),
8401 }
8402 }
8403
8404 let code_actions_task = project.update(cx, |project, cx| {
8405 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8406 });
8407
8408 // cx.run_until_parked();
8409 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8410 |mut code_actions_request| async move {
8411 code_actions_request
8412 .next()
8413 .await
8414 .expect("All code actions requests should have been triggered")
8415 },
8416 ))
8417 .await;
8418 assert_eq!(
8419 vec!["TailwindServer code action", "TypeScriptServer code action"],
8420 code_actions_task
8421 .await
8422 .unwrap()
8423 .unwrap()
8424 .into_iter()
8425 .map(|code_action| code_action.lsp_action.title().to_owned())
8426 .sorted()
8427 .collect::<Vec<_>>(),
8428 "Should receive code actions responses from all related servers with hover capabilities"
8429 );
8430}
8431
8432#[gpui::test]
8433async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8434 init_test(cx);
8435
8436 let fs = FakeFs::new(cx.executor());
8437 fs.insert_tree(
8438 "/dir",
8439 json!({
8440 "a.rs": "let a = 1;",
8441 "b.rs": "let b = 2;",
8442 "c.rs": "let c = 2;",
8443 }),
8444 )
8445 .await;
8446
8447 let project = Project::test(
8448 fs,
8449 [
8450 "/dir/a.rs".as_ref(),
8451 "/dir/b.rs".as_ref(),
8452 "/dir/c.rs".as_ref(),
8453 ],
8454 cx,
8455 )
8456 .await;
8457
8458 // check the initial state and get the worktrees
8459 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8460 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8461 assert_eq!(worktrees.len(), 3);
8462
8463 let worktree_a = worktrees[0].read(cx);
8464 let worktree_b = worktrees[1].read(cx);
8465 let worktree_c = worktrees[2].read(cx);
8466
8467 // check they start in the right order
8468 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8469 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8470 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8471
8472 (
8473 worktrees[0].clone(),
8474 worktrees[1].clone(),
8475 worktrees[2].clone(),
8476 )
8477 });
8478
8479 // move first worktree to after the second
8480 // [a, b, c] -> [b, a, c]
8481 project
8482 .update(cx, |project, cx| {
8483 let first = worktree_a.read(cx);
8484 let second = worktree_b.read(cx);
8485 project.move_worktree(first.id(), second.id(), cx)
8486 })
8487 .expect("moving first after second");
8488
8489 // check the state after moving
8490 project.update(cx, |project, cx| {
8491 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8492 assert_eq!(worktrees.len(), 3);
8493
8494 let first = worktrees[0].read(cx);
8495 let second = worktrees[1].read(cx);
8496 let third = worktrees[2].read(cx);
8497
8498 // check they are now in the right order
8499 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8500 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8501 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8502 });
8503
8504 // move the second worktree to before the first
8505 // [b, a, c] -> [a, b, c]
8506 project
8507 .update(cx, |project, cx| {
8508 let second = worktree_a.read(cx);
8509 let first = worktree_b.read(cx);
8510 project.move_worktree(first.id(), second.id(), cx)
8511 })
8512 .expect("moving second before first");
8513
8514 // check the state after moving
8515 project.update(cx, |project, cx| {
8516 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8517 assert_eq!(worktrees.len(), 3);
8518
8519 let first = worktrees[0].read(cx);
8520 let second = worktrees[1].read(cx);
8521 let third = worktrees[2].read(cx);
8522
8523 // check they are now in the right order
8524 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8525 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8526 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8527 });
8528
8529 // move the second worktree to after the third
8530 // [a, b, c] -> [a, c, b]
8531 project
8532 .update(cx, |project, cx| {
8533 let second = worktree_b.read(cx);
8534 let third = worktree_c.read(cx);
8535 project.move_worktree(second.id(), third.id(), cx)
8536 })
8537 .expect("moving second after third");
8538
8539 // check the state after moving
8540 project.update(cx, |project, cx| {
8541 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8542 assert_eq!(worktrees.len(), 3);
8543
8544 let first = worktrees[0].read(cx);
8545 let second = worktrees[1].read(cx);
8546 let third = worktrees[2].read(cx);
8547
8548 // check they are now in the right order
8549 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8550 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8551 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8552 });
8553
8554 // move the third worktree to before the second
8555 // [a, c, b] -> [a, b, c]
8556 project
8557 .update(cx, |project, cx| {
8558 let third = worktree_c.read(cx);
8559 let second = worktree_b.read(cx);
8560 project.move_worktree(third.id(), second.id(), cx)
8561 })
8562 .expect("moving third before second");
8563
8564 // check the state after moving
8565 project.update(cx, |project, cx| {
8566 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8567 assert_eq!(worktrees.len(), 3);
8568
8569 let first = worktrees[0].read(cx);
8570 let second = worktrees[1].read(cx);
8571 let third = worktrees[2].read(cx);
8572
8573 // check they are now in the right order
8574 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8575 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8576 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8577 });
8578
8579 // move the first worktree to after the third
8580 // [a, b, c] -> [b, c, a]
8581 project
8582 .update(cx, |project, cx| {
8583 let first = worktree_a.read(cx);
8584 let third = worktree_c.read(cx);
8585 project.move_worktree(first.id(), third.id(), cx)
8586 })
8587 .expect("moving first after third");
8588
8589 // check the state after moving
8590 project.update(cx, |project, cx| {
8591 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8592 assert_eq!(worktrees.len(), 3);
8593
8594 let first = worktrees[0].read(cx);
8595 let second = worktrees[1].read(cx);
8596 let third = worktrees[2].read(cx);
8597
8598 // check they are now in the right order
8599 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8600 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8601 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8602 });
8603
8604 // move the third worktree to before the first
8605 // [b, c, a] -> [a, b, c]
8606 project
8607 .update(cx, |project, cx| {
8608 let third = worktree_a.read(cx);
8609 let first = worktree_b.read(cx);
8610 project.move_worktree(third.id(), first.id(), cx)
8611 })
8612 .expect("moving third before first");
8613
8614 // check the state after moving
8615 project.update(cx, |project, cx| {
8616 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8617 assert_eq!(worktrees.len(), 3);
8618
8619 let first = worktrees[0].read(cx);
8620 let second = worktrees[1].read(cx);
8621 let third = worktrees[2].read(cx);
8622
8623 // check they are now in the right order
8624 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8625 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8626 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8627 });
8628}
8629
8630#[gpui::test]
8631async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8632 init_test(cx);
8633
8634 let staged_contents = r#"
8635 fn main() {
8636 println!("hello world");
8637 }
8638 "#
8639 .unindent();
8640 let file_contents = r#"
8641 // print goodbye
8642 fn main() {
8643 println!("goodbye world");
8644 }
8645 "#
8646 .unindent();
8647
8648 let fs = FakeFs::new(cx.background_executor.clone());
8649 fs.insert_tree(
8650 "/dir",
8651 json!({
8652 ".git": {},
8653 "src": {
8654 "main.rs": file_contents,
8655 }
8656 }),
8657 )
8658 .await;
8659
8660 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8661
8662 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8663
8664 let buffer = project
8665 .update(cx, |project, cx| {
8666 project.open_local_buffer("/dir/src/main.rs", cx)
8667 })
8668 .await
8669 .unwrap();
8670 let unstaged_diff = project
8671 .update(cx, |project, cx| {
8672 project.open_unstaged_diff(buffer.clone(), cx)
8673 })
8674 .await
8675 .unwrap();
8676
8677 cx.run_until_parked();
8678 unstaged_diff.update(cx, |unstaged_diff, cx| {
8679 let snapshot = buffer.read(cx).snapshot();
8680 assert_hunks(
8681 unstaged_diff.snapshot(cx).hunks(&snapshot),
8682 &snapshot,
8683 &unstaged_diff.base_text_string(cx).unwrap(),
8684 &[
8685 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8686 (
8687 2..3,
8688 " println!(\"hello world\");\n",
8689 " println!(\"goodbye world\");\n",
8690 DiffHunkStatus::modified_none(),
8691 ),
8692 ],
8693 );
8694 });
8695
8696 let staged_contents = r#"
8697 // print goodbye
8698 fn main() {
8699 }
8700 "#
8701 .unindent();
8702
8703 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8704
8705 cx.run_until_parked();
8706 unstaged_diff.update(cx, |unstaged_diff, cx| {
8707 let snapshot = buffer.read(cx).snapshot();
8708 assert_hunks(
8709 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8710 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8711 &snapshot,
8712 ),
8713 &snapshot,
8714 &unstaged_diff.base_text(cx).text(),
8715 &[(
8716 2..3,
8717 "",
8718 " println!(\"goodbye world\");\n",
8719 DiffHunkStatus::added_none(),
8720 )],
8721 );
8722 });
8723}
8724
8725#[gpui::test]
8726async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8727 init_test(cx);
8728
8729 let committed_contents = r#"
8730 fn main() {
8731 println!("hello world");
8732 }
8733 "#
8734 .unindent();
8735 let staged_contents = r#"
8736 fn main() {
8737 println!("goodbye world");
8738 }
8739 "#
8740 .unindent();
8741 let file_contents = r#"
8742 // print goodbye
8743 fn main() {
8744 println!("goodbye world");
8745 }
8746 "#
8747 .unindent();
8748
8749 let fs = FakeFs::new(cx.background_executor.clone());
8750 fs.insert_tree(
8751 "/dir",
8752 json!({
8753 ".git": {},
8754 "src": {
8755 "modification.rs": file_contents,
8756 }
8757 }),
8758 )
8759 .await;
8760
8761 fs.set_head_for_repo(
8762 Path::new("/dir/.git"),
8763 &[
8764 ("src/modification.rs", committed_contents),
8765 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8766 ],
8767 "deadbeef",
8768 );
8769 fs.set_index_for_repo(
8770 Path::new("/dir/.git"),
8771 &[
8772 ("src/modification.rs", staged_contents),
8773 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8774 ],
8775 );
8776
8777 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8779 let language = rust_lang();
8780 language_registry.add(language.clone());
8781
8782 let buffer_1 = project
8783 .update(cx, |project, cx| {
8784 project.open_local_buffer("/dir/src/modification.rs", cx)
8785 })
8786 .await
8787 .unwrap();
8788 let diff_1 = project
8789 .update(cx, |project, cx| {
8790 project.open_uncommitted_diff(buffer_1.clone(), cx)
8791 })
8792 .await
8793 .unwrap();
8794 diff_1.read_with(cx, |diff, cx| {
8795 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8796 });
8797 cx.run_until_parked();
8798 diff_1.update(cx, |diff, cx| {
8799 let snapshot = buffer_1.read(cx).snapshot();
8800 assert_hunks(
8801 diff.snapshot(cx).hunks_intersecting_range(
8802 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8803 &snapshot,
8804 ),
8805 &snapshot,
8806 &diff.base_text_string(cx).unwrap(),
8807 &[
8808 (
8809 0..1,
8810 "",
8811 "// print goodbye\n",
8812 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8813 ),
8814 (
8815 2..3,
8816 " println!(\"hello world\");\n",
8817 " println!(\"goodbye world\");\n",
8818 DiffHunkStatus::modified_none(),
8819 ),
8820 ],
8821 );
8822 });
8823
8824 // Reset HEAD to a version that differs from both the buffer and the index.
8825 let committed_contents = r#"
8826 // print goodbye
8827 fn main() {
8828 }
8829 "#
8830 .unindent();
8831 fs.set_head_for_repo(
8832 Path::new("/dir/.git"),
8833 &[
8834 ("src/modification.rs", committed_contents.clone()),
8835 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8836 ],
8837 "deadbeef",
8838 );
8839
8840 // Buffer now has an unstaged hunk.
8841 cx.run_until_parked();
8842 diff_1.update(cx, |diff, cx| {
8843 let snapshot = buffer_1.read(cx).snapshot();
8844 assert_hunks(
8845 diff.snapshot(cx).hunks_intersecting_range(
8846 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8847 &snapshot,
8848 ),
8849 &snapshot,
8850 &diff.base_text(cx).text(),
8851 &[(
8852 2..3,
8853 "",
8854 " println!(\"goodbye world\");\n",
8855 DiffHunkStatus::added_none(),
8856 )],
8857 );
8858 });
8859
8860 // Open a buffer for a file that's been deleted.
8861 let buffer_2 = project
8862 .update(cx, |project, cx| {
8863 project.open_local_buffer("/dir/src/deletion.rs", cx)
8864 })
8865 .await
8866 .unwrap();
8867 let diff_2 = project
8868 .update(cx, |project, cx| {
8869 project.open_uncommitted_diff(buffer_2.clone(), cx)
8870 })
8871 .await
8872 .unwrap();
8873 cx.run_until_parked();
8874 diff_2.update(cx, |diff, cx| {
8875 let snapshot = buffer_2.read(cx).snapshot();
8876 assert_hunks(
8877 diff.snapshot(cx).hunks_intersecting_range(
8878 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8879 &snapshot,
8880 ),
8881 &snapshot,
8882 &diff.base_text_string(cx).unwrap(),
8883 &[(
8884 0..0,
8885 "// the-deleted-contents\n",
8886 "",
8887 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8888 )],
8889 );
8890 });
8891
8892 // Stage the deletion of this file
8893 fs.set_index_for_repo(
8894 Path::new("/dir/.git"),
8895 &[("src/modification.rs", committed_contents.clone())],
8896 );
8897 cx.run_until_parked();
8898 diff_2.update(cx, |diff, cx| {
8899 let snapshot = buffer_2.read(cx).snapshot();
8900 assert_hunks(
8901 diff.snapshot(cx).hunks_intersecting_range(
8902 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8903 &snapshot,
8904 ),
8905 &snapshot,
8906 &diff.base_text_string(cx).unwrap(),
8907 &[(
8908 0..0,
8909 "// the-deleted-contents\n",
8910 "",
8911 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8912 )],
8913 );
8914 });
8915}
8916
8917#[gpui::test]
8918async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8919 use DiffHunkSecondaryStatus::*;
8920 init_test(cx);
8921
8922 let committed_contents = r#"
8923 zero
8924 one
8925 two
8926 three
8927 four
8928 five
8929 "#
8930 .unindent();
8931 let file_contents = r#"
8932 one
8933 TWO
8934 three
8935 FOUR
8936 five
8937 "#
8938 .unindent();
8939
8940 let fs = FakeFs::new(cx.background_executor.clone());
8941 fs.insert_tree(
8942 "/dir",
8943 json!({
8944 ".git": {},
8945 "file.txt": file_contents.clone()
8946 }),
8947 )
8948 .await;
8949
8950 fs.set_head_and_index_for_repo(
8951 path!("/dir/.git").as_ref(),
8952 &[("file.txt", committed_contents.clone())],
8953 );
8954
8955 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8956
8957 let buffer = project
8958 .update(cx, |project, cx| {
8959 project.open_local_buffer("/dir/file.txt", cx)
8960 })
8961 .await
8962 .unwrap();
8963 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8964 let uncommitted_diff = project
8965 .update(cx, |project, cx| {
8966 project.open_uncommitted_diff(buffer.clone(), cx)
8967 })
8968 .await
8969 .unwrap();
8970 let mut diff_events = cx.events(&uncommitted_diff);
8971
8972 // The hunks are initially unstaged.
8973 uncommitted_diff.read_with(cx, |diff, cx| {
8974 assert_hunks(
8975 diff.snapshot(cx).hunks(&snapshot),
8976 &snapshot,
8977 &diff.base_text_string(cx).unwrap(),
8978 &[
8979 (
8980 0..0,
8981 "zero\n",
8982 "",
8983 DiffHunkStatus::deleted(HasSecondaryHunk),
8984 ),
8985 (
8986 1..2,
8987 "two\n",
8988 "TWO\n",
8989 DiffHunkStatus::modified(HasSecondaryHunk),
8990 ),
8991 (
8992 3..4,
8993 "four\n",
8994 "FOUR\n",
8995 DiffHunkStatus::modified(HasSecondaryHunk),
8996 ),
8997 ],
8998 );
8999 });
9000
9001 // Stage a hunk. It appears as optimistically staged.
9002 uncommitted_diff.update(cx, |diff, cx| {
9003 let range =
9004 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
9005 let hunks = diff
9006 .snapshot(cx)
9007 .hunks_intersecting_range(range, &snapshot)
9008 .collect::<Vec<_>>();
9009 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9010
9011 assert_hunks(
9012 diff.snapshot(cx).hunks(&snapshot),
9013 &snapshot,
9014 &diff.base_text_string(cx).unwrap(),
9015 &[
9016 (
9017 0..0,
9018 "zero\n",
9019 "",
9020 DiffHunkStatus::deleted(HasSecondaryHunk),
9021 ),
9022 (
9023 1..2,
9024 "two\n",
9025 "TWO\n",
9026 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9027 ),
9028 (
9029 3..4,
9030 "four\n",
9031 "FOUR\n",
9032 DiffHunkStatus::modified(HasSecondaryHunk),
9033 ),
9034 ],
9035 );
9036 });
9037
9038 // The diff emits a change event for the range of the staged hunk.
9039 assert!(matches!(
9040 diff_events.next().await.unwrap(),
9041 BufferDiffEvent::HunksStagedOrUnstaged(_)
9042 ));
9043 let event = diff_events.next().await.unwrap();
9044 if let BufferDiffEvent::DiffChanged(DiffChanged {
9045 changed_range: Some(changed_range),
9046 base_text_changed_range: _,
9047 extended_range: _,
9048 }) = event
9049 {
9050 let changed_range = changed_range.to_point(&snapshot);
9051 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
9052 } else {
9053 panic!("Unexpected event {event:?}");
9054 }
9055
9056 // When the write to the index completes, it appears as staged.
9057 cx.run_until_parked();
9058 uncommitted_diff.update(cx, |diff, cx| {
9059 assert_hunks(
9060 diff.snapshot(cx).hunks(&snapshot),
9061 &snapshot,
9062 &diff.base_text_string(cx).unwrap(),
9063 &[
9064 (
9065 0..0,
9066 "zero\n",
9067 "",
9068 DiffHunkStatus::deleted(HasSecondaryHunk),
9069 ),
9070 (
9071 1..2,
9072 "two\n",
9073 "TWO\n",
9074 DiffHunkStatus::modified(NoSecondaryHunk),
9075 ),
9076 (
9077 3..4,
9078 "four\n",
9079 "FOUR\n",
9080 DiffHunkStatus::modified(HasSecondaryHunk),
9081 ),
9082 ],
9083 );
9084 });
9085
9086 // The diff emits a change event for the changed index text.
9087 let event = diff_events.next().await.unwrap();
9088 if let BufferDiffEvent::DiffChanged(DiffChanged {
9089 changed_range: Some(changed_range),
9090 base_text_changed_range: _,
9091 extended_range: _,
9092 }) = event
9093 {
9094 let changed_range = changed_range.to_point(&snapshot);
9095 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
9096 } else {
9097 panic!("Unexpected event {event:?}");
9098 }
9099
9100 // Simulate a problem writing to the git index.
9101 fs.set_error_message_for_index_write(
9102 "/dir/.git".as_ref(),
9103 Some("failed to write git index".into()),
9104 );
9105
9106 // Stage another hunk.
9107 uncommitted_diff.update(cx, |diff, cx| {
9108 let range =
9109 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
9110 let hunks = diff
9111 .snapshot(cx)
9112 .hunks_intersecting_range(range, &snapshot)
9113 .collect::<Vec<_>>();
9114 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9115
9116 assert_hunks(
9117 diff.snapshot(cx).hunks(&snapshot),
9118 &snapshot,
9119 &diff.base_text_string(cx).unwrap(),
9120 &[
9121 (
9122 0..0,
9123 "zero\n",
9124 "",
9125 DiffHunkStatus::deleted(HasSecondaryHunk),
9126 ),
9127 (
9128 1..2,
9129 "two\n",
9130 "TWO\n",
9131 DiffHunkStatus::modified(NoSecondaryHunk),
9132 ),
9133 (
9134 3..4,
9135 "four\n",
9136 "FOUR\n",
9137 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9138 ),
9139 ],
9140 );
9141 });
9142 assert!(matches!(
9143 diff_events.next().await.unwrap(),
9144 BufferDiffEvent::HunksStagedOrUnstaged(_)
9145 ));
9146 let event = diff_events.next().await.unwrap();
9147 if let BufferDiffEvent::DiffChanged(DiffChanged {
9148 changed_range: Some(changed_range),
9149 base_text_changed_range: _,
9150 extended_range: _,
9151 }) = event
9152 {
9153 let changed_range = changed_range.to_point(&snapshot);
9154 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
9155 } else {
9156 panic!("Unexpected event {event:?}");
9157 }
9158
9159 // When the write fails, the hunk returns to being unstaged.
9160 cx.run_until_parked();
9161 uncommitted_diff.update(cx, |diff, cx| {
9162 assert_hunks(
9163 diff.snapshot(cx).hunks(&snapshot),
9164 &snapshot,
9165 &diff.base_text_string(cx).unwrap(),
9166 &[
9167 (
9168 0..0,
9169 "zero\n",
9170 "",
9171 DiffHunkStatus::deleted(HasSecondaryHunk),
9172 ),
9173 (
9174 1..2,
9175 "two\n",
9176 "TWO\n",
9177 DiffHunkStatus::modified(NoSecondaryHunk),
9178 ),
9179 (
9180 3..4,
9181 "four\n",
9182 "FOUR\n",
9183 DiffHunkStatus::modified(HasSecondaryHunk),
9184 ),
9185 ],
9186 );
9187 });
9188
9189 let event = diff_events.next().await.unwrap();
9190 if let BufferDiffEvent::DiffChanged(DiffChanged {
9191 changed_range: Some(changed_range),
9192 base_text_changed_range: _,
9193 extended_range: _,
9194 }) = event
9195 {
9196 let changed_range = changed_range.to_point(&snapshot);
9197 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9198 } else {
9199 panic!("Unexpected event {event:?}");
9200 }
9201
9202 // Allow writing to the git index to succeed again.
9203 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9204
9205 // Stage two hunks with separate operations.
9206 uncommitted_diff.update(cx, |diff, cx| {
9207 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9208 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9209 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9210 });
9211
9212 // Both staged hunks appear as pending.
9213 uncommitted_diff.update(cx, |diff, cx| {
9214 assert_hunks(
9215 diff.snapshot(cx).hunks(&snapshot),
9216 &snapshot,
9217 &diff.base_text_string(cx).unwrap(),
9218 &[
9219 (
9220 0..0,
9221 "zero\n",
9222 "",
9223 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9224 ),
9225 (
9226 1..2,
9227 "two\n",
9228 "TWO\n",
9229 DiffHunkStatus::modified(NoSecondaryHunk),
9230 ),
9231 (
9232 3..4,
9233 "four\n",
9234 "FOUR\n",
9235 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9236 ),
9237 ],
9238 );
9239 });
9240
9241 // Both staging operations take effect.
9242 cx.run_until_parked();
9243 uncommitted_diff.update(cx, |diff, cx| {
9244 assert_hunks(
9245 diff.snapshot(cx).hunks(&snapshot),
9246 &snapshot,
9247 &diff.base_text_string(cx).unwrap(),
9248 &[
9249 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9250 (
9251 1..2,
9252 "two\n",
9253 "TWO\n",
9254 DiffHunkStatus::modified(NoSecondaryHunk),
9255 ),
9256 (
9257 3..4,
9258 "four\n",
9259 "FOUR\n",
9260 DiffHunkStatus::modified(NoSecondaryHunk),
9261 ),
9262 ],
9263 );
9264 });
9265}
9266
9267#[gpui::test(iterations = 10)]
9268async fn test_uncommitted_diff_opened_before_unstaged_diff(cx: &mut gpui::TestAppContext) {
9269 use DiffHunkSecondaryStatus::*;
9270 init_test(cx);
9271
9272 let committed_contents = "one\ntwo\nthree\n";
9273 let file_contents = "one\nTWO\nthree\n";
9274
9275 let fs = FakeFs::new(cx.background_executor.clone());
9276 fs.insert_tree(
9277 "/dir",
9278 json!({
9279 ".git": {},
9280 "file.txt": file_contents,
9281 }),
9282 )
9283 .await;
9284 fs.set_head_and_index_for_repo(
9285 path!("/dir/.git").as_ref(),
9286 &[("file.txt", committed_contents.into())],
9287 );
9288
9289 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9290 let buffer = project
9291 .update(cx, |project, cx| {
9292 project.open_local_buffer("/dir/file.txt", cx)
9293 })
9294 .await
9295 .unwrap();
9296
9297 let uncommitted_diff_task = project.update(cx, |project, cx| {
9298 project.open_uncommitted_diff(buffer.clone(), cx)
9299 });
9300 let unstaged_diff_task = project.update(cx, |project, cx| {
9301 project.open_unstaged_diff(buffer.clone(), cx)
9302 });
9303 let (uncommitted_diff, _unstaged_diff) =
9304 futures::future::join(uncommitted_diff_task, unstaged_diff_task).await;
9305 let uncommitted_diff = uncommitted_diff.unwrap();
9306 let _unstaged_diff = _unstaged_diff.unwrap();
9307
9308 cx.run_until_parked();
9309
9310 uncommitted_diff.read_with(cx, |diff, cx| {
9311 let snapshot = buffer.read(cx).snapshot();
9312 assert_hunks(
9313 diff.snapshot(cx).hunks_intersecting_range(
9314 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9315 &snapshot,
9316 ),
9317 &snapshot,
9318 &diff.base_text_string(cx).unwrap(),
9319 &[(
9320 1..2,
9321 "two\n",
9322 "TWO\n",
9323 DiffHunkStatus::modified(HasSecondaryHunk),
9324 )],
9325 );
9326 });
9327}
9328
9329#[gpui::test(seeds(340, 472))]
9330async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9331 use DiffHunkSecondaryStatus::*;
9332 init_test(cx);
9333
9334 let committed_contents = r#"
9335 zero
9336 one
9337 two
9338 three
9339 four
9340 five
9341 "#
9342 .unindent();
9343 let file_contents = r#"
9344 one
9345 TWO
9346 three
9347 FOUR
9348 five
9349 "#
9350 .unindent();
9351
9352 let fs = FakeFs::new(cx.background_executor.clone());
9353 fs.insert_tree(
9354 "/dir",
9355 json!({
9356 ".git": {},
9357 "file.txt": file_contents.clone()
9358 }),
9359 )
9360 .await;
9361
9362 fs.set_head_for_repo(
9363 "/dir/.git".as_ref(),
9364 &[("file.txt", committed_contents.clone())],
9365 "deadbeef",
9366 );
9367 fs.set_index_for_repo(
9368 "/dir/.git".as_ref(),
9369 &[("file.txt", committed_contents.clone())],
9370 );
9371
9372 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9373
9374 let buffer = project
9375 .update(cx, |project, cx| {
9376 project.open_local_buffer("/dir/file.txt", cx)
9377 })
9378 .await
9379 .unwrap();
9380 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9381 let uncommitted_diff = project
9382 .update(cx, |project, cx| {
9383 project.open_uncommitted_diff(buffer.clone(), cx)
9384 })
9385 .await
9386 .unwrap();
9387
9388 // The hunks are initially unstaged.
9389 uncommitted_diff.read_with(cx, |diff, cx| {
9390 assert_hunks(
9391 diff.snapshot(cx).hunks(&snapshot),
9392 &snapshot,
9393 &diff.base_text_string(cx).unwrap(),
9394 &[
9395 (
9396 0..0,
9397 "zero\n",
9398 "",
9399 DiffHunkStatus::deleted(HasSecondaryHunk),
9400 ),
9401 (
9402 1..2,
9403 "two\n",
9404 "TWO\n",
9405 DiffHunkStatus::modified(HasSecondaryHunk),
9406 ),
9407 (
9408 3..4,
9409 "four\n",
9410 "FOUR\n",
9411 DiffHunkStatus::modified(HasSecondaryHunk),
9412 ),
9413 ],
9414 );
9415 });
9416
9417 // Pause IO events
9418 fs.pause_events();
9419
9420 // Stage the first hunk.
9421 uncommitted_diff.update(cx, |diff, cx| {
9422 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9423 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9424 assert_hunks(
9425 diff.snapshot(cx).hunks(&snapshot),
9426 &snapshot,
9427 &diff.base_text_string(cx).unwrap(),
9428 &[
9429 (
9430 0..0,
9431 "zero\n",
9432 "",
9433 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9434 ),
9435 (
9436 1..2,
9437 "two\n",
9438 "TWO\n",
9439 DiffHunkStatus::modified(HasSecondaryHunk),
9440 ),
9441 (
9442 3..4,
9443 "four\n",
9444 "FOUR\n",
9445 DiffHunkStatus::modified(HasSecondaryHunk),
9446 ),
9447 ],
9448 );
9449 });
9450
9451 // Stage the second hunk *before* receiving the FS event for the first hunk.
9452 cx.run_until_parked();
9453 uncommitted_diff.update(cx, |diff, cx| {
9454 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9455 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9456 assert_hunks(
9457 diff.snapshot(cx).hunks(&snapshot),
9458 &snapshot,
9459 &diff.base_text_string(cx).unwrap(),
9460 &[
9461 (
9462 0..0,
9463 "zero\n",
9464 "",
9465 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9466 ),
9467 (
9468 1..2,
9469 "two\n",
9470 "TWO\n",
9471 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9472 ),
9473 (
9474 3..4,
9475 "four\n",
9476 "FOUR\n",
9477 DiffHunkStatus::modified(HasSecondaryHunk),
9478 ),
9479 ],
9480 );
9481 });
9482
9483 // Process the FS event for staging the first hunk (second event is still pending).
9484 fs.flush_events(1);
9485 cx.run_until_parked();
9486
9487 // Stage the third hunk before receiving the second FS event.
9488 uncommitted_diff.update(cx, |diff, cx| {
9489 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9490 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9491 });
9492
9493 // Wait for all remaining IO.
9494 cx.run_until_parked();
9495 fs.flush_events(fs.buffered_event_count());
9496
9497 // Now all hunks are staged.
9498 cx.run_until_parked();
9499 uncommitted_diff.update(cx, |diff, cx| {
9500 assert_hunks(
9501 diff.snapshot(cx).hunks(&snapshot),
9502 &snapshot,
9503 &diff.base_text_string(cx).unwrap(),
9504 &[
9505 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9506 (
9507 1..2,
9508 "two\n",
9509 "TWO\n",
9510 DiffHunkStatus::modified(NoSecondaryHunk),
9511 ),
9512 (
9513 3..4,
9514 "four\n",
9515 "FOUR\n",
9516 DiffHunkStatus::modified(NoSecondaryHunk),
9517 ),
9518 ],
9519 );
9520 });
9521}
9522
9523#[gpui::test(iterations = 25)]
9524async fn test_staging_random_hunks(
9525 mut rng: StdRng,
9526 _executor: BackgroundExecutor,
9527 cx: &mut gpui::TestAppContext,
9528) {
9529 let operations = env::var("OPERATIONS")
9530 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9531 .unwrap_or(20);
9532
9533 use DiffHunkSecondaryStatus::*;
9534 init_test(cx);
9535
9536 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9537 let index_text = committed_text.clone();
9538 let buffer_text = (0..30)
9539 .map(|i| match i % 5 {
9540 0 => format!("line {i} (modified)\n"),
9541 _ => format!("line {i}\n"),
9542 })
9543 .collect::<String>();
9544
9545 let fs = FakeFs::new(cx.background_executor.clone());
9546 fs.insert_tree(
9547 path!("/dir"),
9548 json!({
9549 ".git": {},
9550 "file.txt": buffer_text.clone()
9551 }),
9552 )
9553 .await;
9554 fs.set_head_for_repo(
9555 path!("/dir/.git").as_ref(),
9556 &[("file.txt", committed_text.clone())],
9557 "deadbeef",
9558 );
9559 fs.set_index_for_repo(
9560 path!("/dir/.git").as_ref(),
9561 &[("file.txt", index_text.clone())],
9562 );
9563 let repo = fs
9564 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9565 .unwrap();
9566
9567 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9568 let buffer = project
9569 .update(cx, |project, cx| {
9570 project.open_local_buffer(path!("/dir/file.txt"), cx)
9571 })
9572 .await
9573 .unwrap();
9574 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9575 let uncommitted_diff = project
9576 .update(cx, |project, cx| {
9577 project.open_uncommitted_diff(buffer.clone(), cx)
9578 })
9579 .await
9580 .unwrap();
9581
9582 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9583 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9584 });
9585 assert_eq!(hunks.len(), 6);
9586
9587 for _i in 0..operations {
9588 let hunk_ix = rng.random_range(0..hunks.len());
9589 let hunk = &mut hunks[hunk_ix];
9590 let row = hunk.range.start.row;
9591
9592 if hunk.status().has_secondary_hunk() {
9593 log::info!("staging hunk at {row}");
9594 uncommitted_diff.update(cx, |diff, cx| {
9595 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9596 });
9597 hunk.secondary_status = SecondaryHunkRemovalPending;
9598 } else {
9599 log::info!("unstaging hunk at {row}");
9600 uncommitted_diff.update(cx, |diff, cx| {
9601 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9602 });
9603 hunk.secondary_status = SecondaryHunkAdditionPending;
9604 }
9605
9606 for _ in 0..rng.random_range(0..10) {
9607 log::info!("yielding");
9608 cx.executor().simulate_random_delay().await;
9609 }
9610 }
9611
9612 cx.executor().run_until_parked();
9613
9614 for hunk in &mut hunks {
9615 if hunk.secondary_status == SecondaryHunkRemovalPending {
9616 hunk.secondary_status = NoSecondaryHunk;
9617 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9618 hunk.secondary_status = HasSecondaryHunk;
9619 }
9620 }
9621
9622 log::info!(
9623 "index text:\n{}",
9624 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9625 .await
9626 .unwrap()
9627 );
9628
9629 uncommitted_diff.update(cx, |diff, cx| {
9630 let expected_hunks = hunks
9631 .iter()
9632 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9633 .collect::<Vec<_>>();
9634 let actual_hunks = diff
9635 .snapshot(cx)
9636 .hunks(&snapshot)
9637 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9638 .collect::<Vec<_>>();
9639 assert_eq!(actual_hunks, expected_hunks);
9640 });
9641}
9642
9643#[gpui::test]
9644async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9645 init_test(cx);
9646
9647 let committed_contents = r#"
9648 fn main() {
9649 println!("hello from HEAD");
9650 }
9651 "#
9652 .unindent();
9653 let file_contents = r#"
9654 fn main() {
9655 println!("hello from the working copy");
9656 }
9657 "#
9658 .unindent();
9659
9660 let fs = FakeFs::new(cx.background_executor.clone());
9661 fs.insert_tree(
9662 "/dir",
9663 json!({
9664 ".git": {},
9665 "src": {
9666 "main.rs": file_contents,
9667 }
9668 }),
9669 )
9670 .await;
9671
9672 fs.set_head_for_repo(
9673 Path::new("/dir/.git"),
9674 &[("src/main.rs", committed_contents.clone())],
9675 "deadbeef",
9676 );
9677 fs.set_index_for_repo(
9678 Path::new("/dir/.git"),
9679 &[("src/main.rs", committed_contents.clone())],
9680 );
9681
9682 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9683
9684 let buffer = project
9685 .update(cx, |project, cx| {
9686 project.open_local_buffer("/dir/src/main.rs", cx)
9687 })
9688 .await
9689 .unwrap();
9690 let uncommitted_diff = project
9691 .update(cx, |project, cx| {
9692 project.open_uncommitted_diff(buffer.clone(), cx)
9693 })
9694 .await
9695 .unwrap();
9696
9697 cx.run_until_parked();
9698 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9699 let snapshot = buffer.read(cx).snapshot();
9700 assert_hunks(
9701 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9702 &snapshot,
9703 &uncommitted_diff.base_text_string(cx).unwrap(),
9704 &[(
9705 1..2,
9706 " println!(\"hello from HEAD\");\n",
9707 " println!(\"hello from the working copy\");\n",
9708 DiffHunkStatus {
9709 kind: DiffHunkStatusKind::Modified,
9710 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9711 },
9712 )],
9713 );
9714 });
9715}
9716
9717// TODO: Should we test this on Windows also?
9718#[gpui::test]
9719#[cfg(not(windows))]
9720async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9721 use std::os::unix::fs::PermissionsExt;
9722 init_test(cx);
9723 cx.executor().allow_parking();
9724 let committed_contents = "bar\n";
9725 let file_contents = "baz\n";
9726 let root = TempTree::new(json!({
9727 "project": {
9728 "foo": committed_contents
9729 },
9730 }));
9731
9732 let work_dir = root.path().join("project");
9733 let file_path = work_dir.join("foo");
9734 let repo = git_init(work_dir.as_path());
9735 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9736 perms.set_mode(0o755);
9737 std::fs::set_permissions(&file_path, perms).unwrap();
9738 git_add("foo", &repo);
9739 git_commit("Initial commit", &repo);
9740 std::fs::write(&file_path, file_contents).unwrap();
9741
9742 let project = Project::test(
9743 Arc::new(RealFs::new(None, cx.executor())),
9744 [root.path()],
9745 cx,
9746 )
9747 .await;
9748
9749 let buffer = project
9750 .update(cx, |project, cx| {
9751 project.open_local_buffer(file_path.as_path(), cx)
9752 })
9753 .await
9754 .unwrap();
9755
9756 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9757
9758 let uncommitted_diff = project
9759 .update(cx, |project, cx| {
9760 project.open_uncommitted_diff(buffer.clone(), cx)
9761 })
9762 .await
9763 .unwrap();
9764
9765 uncommitted_diff.update(cx, |diff, cx| {
9766 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9767 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9768 });
9769
9770 cx.run_until_parked();
9771
9772 let output = smol::process::Command::new("git")
9773 .current_dir(&work_dir)
9774 .args(["diff", "--staged"])
9775 .output()
9776 .await
9777 .unwrap();
9778
9779 let staged_diff = String::from_utf8_lossy(&output.stdout);
9780
9781 assert!(
9782 !staged_diff.contains("new mode 100644"),
9783 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9784 staged_diff
9785 );
9786
9787 let output = smol::process::Command::new("git")
9788 .current_dir(&work_dir)
9789 .args(["ls-files", "-s"])
9790 .output()
9791 .await
9792 .unwrap();
9793 let index_contents = String::from_utf8_lossy(&output.stdout);
9794
9795 assert!(
9796 index_contents.contains("100755"),
9797 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9798 index_contents
9799 );
9800}
9801
9802#[gpui::test]
9803async fn test_repository_and_path_for_project_path(
9804 background_executor: BackgroundExecutor,
9805 cx: &mut gpui::TestAppContext,
9806) {
9807 init_test(cx);
9808 let fs = FakeFs::new(background_executor);
9809 fs.insert_tree(
9810 path!("/root"),
9811 json!({
9812 "c.txt": "",
9813 "dir1": {
9814 ".git": {},
9815 "deps": {
9816 "dep1": {
9817 ".git": {},
9818 "src": {
9819 "a.txt": ""
9820 }
9821 }
9822 },
9823 "src": {
9824 "b.txt": ""
9825 }
9826 },
9827 }),
9828 )
9829 .await;
9830
9831 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9832 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9833 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9834 project
9835 .update(cx, |project, cx| project.git_scans_complete(cx))
9836 .await;
9837 cx.run_until_parked();
9838
9839 project.read_with(cx, |project, cx| {
9840 let git_store = project.git_store().read(cx);
9841 let pairs = [
9842 ("c.txt", None),
9843 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9844 (
9845 "dir1/deps/dep1/src/a.txt",
9846 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9847 ),
9848 ];
9849 let expected = pairs
9850 .iter()
9851 .map(|(path, result)| {
9852 (
9853 path,
9854 result.map(|(repo, repo_path)| {
9855 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9856 }),
9857 )
9858 })
9859 .collect::<Vec<_>>();
9860 let actual = pairs
9861 .iter()
9862 .map(|(path, _)| {
9863 let project_path = (tree_id, rel_path(path)).into();
9864 let result = maybe!({
9865 let (repo, repo_path) =
9866 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9867 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9868 });
9869 (path, result)
9870 })
9871 .collect::<Vec<_>>();
9872 pretty_assertions::assert_eq!(expected, actual);
9873 });
9874
9875 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9876 .await
9877 .unwrap();
9878 cx.run_until_parked();
9879
9880 project.read_with(cx, |project, cx| {
9881 let git_store = project.git_store().read(cx);
9882 assert_eq!(
9883 git_store.repository_and_path_for_project_path(
9884 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9885 cx
9886 ),
9887 None
9888 );
9889 });
9890}
9891
9892#[gpui::test]
9893async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9894 init_test(cx);
9895 let fs = FakeFs::new(cx.background_executor.clone());
9896 let home = paths::home_dir();
9897 fs.insert_tree(
9898 home,
9899 json!({
9900 ".git": {},
9901 "project": {
9902 "a.txt": "A"
9903 },
9904 }),
9905 )
9906 .await;
9907
9908 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9909 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9910 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9911
9912 project
9913 .update(cx, |project, cx| project.git_scans_complete(cx))
9914 .await;
9915 tree.flush_fs_events(cx).await;
9916
9917 project.read_with(cx, |project, cx| {
9918 let containing = project
9919 .git_store()
9920 .read(cx)
9921 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9922 assert!(containing.is_none());
9923 });
9924
9925 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9926 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9927 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9928 project
9929 .update(cx, |project, cx| project.git_scans_complete(cx))
9930 .await;
9931 tree.flush_fs_events(cx).await;
9932
9933 project.read_with(cx, |project, cx| {
9934 let containing = project
9935 .git_store()
9936 .read(cx)
9937 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9938 assert_eq!(
9939 containing
9940 .unwrap()
9941 .0
9942 .read(cx)
9943 .work_directory_abs_path
9944 .as_ref(),
9945 home,
9946 );
9947 });
9948}
9949
9950#[gpui::test]
9951async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9952 init_test(cx);
9953 cx.executor().allow_parking();
9954
9955 let root = TempTree::new(json!({
9956 "project": {
9957 "a.txt": "a", // Modified
9958 "b.txt": "bb", // Added
9959 "c.txt": "ccc", // Unchanged
9960 "d.txt": "dddd", // Deleted
9961 },
9962 }));
9963
9964 // Set up git repository before creating the project.
9965 let work_dir = root.path().join("project");
9966 let repo = git_init(work_dir.as_path());
9967 git_add("a.txt", &repo);
9968 git_add("c.txt", &repo);
9969 git_add("d.txt", &repo);
9970 git_commit("Initial commit", &repo);
9971 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9972 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9973
9974 let project = Project::test(
9975 Arc::new(RealFs::new(None, cx.executor())),
9976 [root.path()],
9977 cx,
9978 )
9979 .await;
9980
9981 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9982 tree.flush_fs_events(cx).await;
9983 project
9984 .update(cx, |project, cx| project.git_scans_complete(cx))
9985 .await;
9986 cx.executor().run_until_parked();
9987
9988 let repository = project.read_with(cx, |project, cx| {
9989 project.repositories(cx).values().next().unwrap().clone()
9990 });
9991
9992 // Check that the right git state is observed on startup
9993 repository.read_with(cx, |repository, _| {
9994 let entries = repository.cached_status().collect::<Vec<_>>();
9995 assert_eq!(
9996 entries,
9997 [
9998 StatusEntry {
9999 repo_path: repo_path("a.txt"),
10000 status: StatusCode::Modified.worktree(),
10001 diff_stat: Some(DiffStat {
10002 added: 1,
10003 deleted: 1,
10004 }),
10005 },
10006 StatusEntry {
10007 repo_path: repo_path("b.txt"),
10008 status: FileStatus::Untracked,
10009 diff_stat: None,
10010 },
10011 StatusEntry {
10012 repo_path: repo_path("d.txt"),
10013 status: StatusCode::Deleted.worktree(),
10014 diff_stat: Some(DiffStat {
10015 added: 0,
10016 deleted: 1,
10017 }),
10018 },
10019 ]
10020 );
10021 });
10022
10023 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
10024
10025 tree.flush_fs_events(cx).await;
10026 project
10027 .update(cx, |project, cx| project.git_scans_complete(cx))
10028 .await;
10029 cx.executor().run_until_parked();
10030
10031 repository.read_with(cx, |repository, _| {
10032 let entries = repository.cached_status().collect::<Vec<_>>();
10033 assert_eq!(
10034 entries,
10035 [
10036 StatusEntry {
10037 repo_path: repo_path("a.txt"),
10038 status: StatusCode::Modified.worktree(),
10039 diff_stat: Some(DiffStat {
10040 added: 1,
10041 deleted: 1,
10042 }),
10043 },
10044 StatusEntry {
10045 repo_path: repo_path("b.txt"),
10046 status: FileStatus::Untracked,
10047 diff_stat: None,
10048 },
10049 StatusEntry {
10050 repo_path: repo_path("c.txt"),
10051 status: StatusCode::Modified.worktree(),
10052 diff_stat: Some(DiffStat {
10053 added: 1,
10054 deleted: 1,
10055 }),
10056 },
10057 StatusEntry {
10058 repo_path: repo_path("d.txt"),
10059 status: StatusCode::Deleted.worktree(),
10060 diff_stat: Some(DiffStat {
10061 added: 0,
10062 deleted: 1,
10063 }),
10064 },
10065 ]
10066 );
10067 });
10068
10069 git_add("a.txt", &repo);
10070 git_add("c.txt", &repo);
10071 git_remove_index(Path::new("d.txt"), &repo);
10072 git_commit("Another commit", &repo);
10073 tree.flush_fs_events(cx).await;
10074 project
10075 .update(cx, |project, cx| project.git_scans_complete(cx))
10076 .await;
10077 cx.executor().run_until_parked();
10078
10079 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
10080 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
10081 tree.flush_fs_events(cx).await;
10082 project
10083 .update(cx, |project, cx| project.git_scans_complete(cx))
10084 .await;
10085 cx.executor().run_until_parked();
10086
10087 repository.read_with(cx, |repository, _cx| {
10088 let entries = repository.cached_status().collect::<Vec<_>>();
10089
10090 // Deleting an untracked entry, b.txt, should leave no status
10091 // a.txt was tracked, and so should have a status
10092 assert_eq!(
10093 entries,
10094 [StatusEntry {
10095 repo_path: repo_path("a.txt"),
10096 status: StatusCode::Deleted.worktree(),
10097 diff_stat: Some(DiffStat {
10098 added: 0,
10099 deleted: 1,
10100 }),
10101 }]
10102 );
10103 });
10104}
10105
10106#[gpui::test]
10107#[ignore]
10108async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
10109 init_test(cx);
10110 cx.executor().allow_parking();
10111
10112 let root = TempTree::new(json!({
10113 "project": {
10114 "sub": {},
10115 "a.txt": "",
10116 },
10117 }));
10118
10119 let work_dir = root.path().join("project");
10120 let repo = git_init(work_dir.as_path());
10121 // a.txt exists in HEAD and the working copy but is deleted in the index.
10122 git_add("a.txt", &repo);
10123 git_commit("Initial commit", &repo);
10124 git_remove_index("a.txt".as_ref(), &repo);
10125 // `sub` is a nested git repository.
10126 let _sub = git_init(&work_dir.join("sub"));
10127
10128 let project = Project::test(
10129 Arc::new(RealFs::new(None, cx.executor())),
10130 [root.path()],
10131 cx,
10132 )
10133 .await;
10134
10135 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10136 tree.flush_fs_events(cx).await;
10137 project
10138 .update(cx, |project, cx| project.git_scans_complete(cx))
10139 .await;
10140 cx.executor().run_until_parked();
10141
10142 let repository = project.read_with(cx, |project, cx| {
10143 project
10144 .repositories(cx)
10145 .values()
10146 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
10147 .unwrap()
10148 .clone()
10149 });
10150
10151 repository.read_with(cx, |repository, _cx| {
10152 let entries = repository.cached_status().collect::<Vec<_>>();
10153
10154 // `sub` doesn't appear in our computed statuses.
10155 // a.txt appears with a combined `DA` status.
10156 assert_eq!(
10157 entries,
10158 [StatusEntry {
10159 repo_path: repo_path("a.txt"),
10160 status: TrackedStatus {
10161 index_status: StatusCode::Deleted,
10162 worktree_status: StatusCode::Added
10163 }
10164 .into(),
10165 diff_stat: None,
10166 }]
10167 )
10168 });
10169}
10170
10171#[track_caller]
10172/// We merge lhs into rhs.
10173fn merge_pending_ops_snapshots(
10174 source: Vec<pending_op::PendingOps>,
10175 mut target: Vec<pending_op::PendingOps>,
10176) -> Vec<pending_op::PendingOps> {
10177 for s_ops in source {
10178 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
10179 if ops.repo_path == s_ops.repo_path {
10180 Some(idx)
10181 } else {
10182 None
10183 }
10184 }) {
10185 let t_ops = &mut target[idx];
10186 for s_op in s_ops.ops {
10187 if let Some(op_idx) = t_ops
10188 .ops
10189 .iter()
10190 .zip(0..)
10191 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10192 {
10193 let t_op = &mut t_ops.ops[op_idx];
10194 match (s_op.job_status, t_op.job_status) {
10195 (pending_op::JobStatus::Running, _) => {}
10196 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10197 (s_st, t_st) if s_st == t_st => {}
10198 _ => unreachable!(),
10199 }
10200 } else {
10201 t_ops.ops.push(s_op);
10202 }
10203 }
10204 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10205 } else {
10206 target.push(s_ops);
10207 }
10208 }
10209 target
10210}
10211
10212#[gpui::test]
10213async fn test_repository_pending_ops_staging(
10214 executor: gpui::BackgroundExecutor,
10215 cx: &mut gpui::TestAppContext,
10216) {
10217 init_test(cx);
10218
10219 let fs = FakeFs::new(executor);
10220 fs.insert_tree(
10221 path!("/root"),
10222 json!({
10223 "my-repo": {
10224 ".git": {},
10225 "a.txt": "a",
10226 }
10227
10228 }),
10229 )
10230 .await;
10231
10232 fs.set_status_for_repo(
10233 path!("/root/my-repo/.git").as_ref(),
10234 &[("a.txt", FileStatus::Untracked)],
10235 );
10236
10237 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10238 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10239 project.update(cx, |project, cx| {
10240 let pending_ops_all = pending_ops_all.clone();
10241 cx.subscribe(project.git_store(), move |_, _, e, _| {
10242 if let GitStoreEvent::RepositoryUpdated(
10243 _,
10244 RepositoryEvent::PendingOpsChanged { pending_ops },
10245 _,
10246 ) = e
10247 {
10248 let merged = merge_pending_ops_snapshots(
10249 pending_ops.items(()),
10250 pending_ops_all.lock().items(()),
10251 );
10252 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10253 }
10254 })
10255 .detach();
10256 });
10257 project
10258 .update(cx, |project, cx| project.git_scans_complete(cx))
10259 .await;
10260
10261 let repo = project.read_with(cx, |project, cx| {
10262 project.repositories(cx).values().next().unwrap().clone()
10263 });
10264
10265 // Ensure we have no pending ops for any of the untracked files
10266 repo.read_with(cx, |repo, _cx| {
10267 assert!(repo.pending_ops().next().is_none());
10268 });
10269
10270 let mut id = 1u16;
10271
10272 let mut assert_stage = async |path: RepoPath, stage| {
10273 let git_status = if stage {
10274 pending_op::GitStatus::Staged
10275 } else {
10276 pending_op::GitStatus::Unstaged
10277 };
10278 repo.update(cx, |repo, cx| {
10279 let task = if stage {
10280 repo.stage_entries(vec![path.clone()], cx)
10281 } else {
10282 repo.unstage_entries(vec![path.clone()], cx)
10283 };
10284 let ops = repo.pending_ops_for_path(&path).unwrap();
10285 assert_eq!(
10286 ops.ops.last(),
10287 Some(&pending_op::PendingOp {
10288 id: id.into(),
10289 git_status,
10290 job_status: pending_op::JobStatus::Running
10291 })
10292 );
10293 task
10294 })
10295 .await
10296 .unwrap();
10297
10298 repo.read_with(cx, |repo, _cx| {
10299 let ops = repo.pending_ops_for_path(&path).unwrap();
10300 assert_eq!(
10301 ops.ops.last(),
10302 Some(&pending_op::PendingOp {
10303 id: id.into(),
10304 git_status,
10305 job_status: pending_op::JobStatus::Finished
10306 })
10307 );
10308 });
10309
10310 id += 1;
10311 };
10312
10313 assert_stage(repo_path("a.txt"), true).await;
10314 assert_stage(repo_path("a.txt"), false).await;
10315 assert_stage(repo_path("a.txt"), true).await;
10316 assert_stage(repo_path("a.txt"), false).await;
10317 assert_stage(repo_path("a.txt"), true).await;
10318
10319 cx.run_until_parked();
10320
10321 assert_eq!(
10322 pending_ops_all
10323 .lock()
10324 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10325 .unwrap()
10326 .ops,
10327 vec![
10328 pending_op::PendingOp {
10329 id: 1u16.into(),
10330 git_status: pending_op::GitStatus::Staged,
10331 job_status: pending_op::JobStatus::Finished
10332 },
10333 pending_op::PendingOp {
10334 id: 2u16.into(),
10335 git_status: pending_op::GitStatus::Unstaged,
10336 job_status: pending_op::JobStatus::Finished
10337 },
10338 pending_op::PendingOp {
10339 id: 3u16.into(),
10340 git_status: pending_op::GitStatus::Staged,
10341 job_status: pending_op::JobStatus::Finished
10342 },
10343 pending_op::PendingOp {
10344 id: 4u16.into(),
10345 git_status: pending_op::GitStatus::Unstaged,
10346 job_status: pending_op::JobStatus::Finished
10347 },
10348 pending_op::PendingOp {
10349 id: 5u16.into(),
10350 git_status: pending_op::GitStatus::Staged,
10351 job_status: pending_op::JobStatus::Finished
10352 }
10353 ],
10354 );
10355
10356 repo.update(cx, |repo, _cx| {
10357 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10358
10359 assert_eq!(
10360 git_statuses,
10361 [StatusEntry {
10362 repo_path: repo_path("a.txt"),
10363 status: TrackedStatus {
10364 index_status: StatusCode::Added,
10365 worktree_status: StatusCode::Unmodified
10366 }
10367 .into(),
10368 diff_stat: Some(DiffStat {
10369 added: 1,
10370 deleted: 0,
10371 }),
10372 }]
10373 );
10374 });
10375}
10376
10377#[gpui::test]
10378async fn test_repository_pending_ops_long_running_staging(
10379 executor: gpui::BackgroundExecutor,
10380 cx: &mut gpui::TestAppContext,
10381) {
10382 init_test(cx);
10383
10384 let fs = FakeFs::new(executor);
10385 fs.insert_tree(
10386 path!("/root"),
10387 json!({
10388 "my-repo": {
10389 ".git": {},
10390 "a.txt": "a",
10391 }
10392
10393 }),
10394 )
10395 .await;
10396
10397 fs.set_status_for_repo(
10398 path!("/root/my-repo/.git").as_ref(),
10399 &[("a.txt", FileStatus::Untracked)],
10400 );
10401
10402 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10403 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10404 project.update(cx, |project, cx| {
10405 let pending_ops_all = pending_ops_all.clone();
10406 cx.subscribe(project.git_store(), move |_, _, e, _| {
10407 if let GitStoreEvent::RepositoryUpdated(
10408 _,
10409 RepositoryEvent::PendingOpsChanged { pending_ops },
10410 _,
10411 ) = e
10412 {
10413 let merged = merge_pending_ops_snapshots(
10414 pending_ops.items(()),
10415 pending_ops_all.lock().items(()),
10416 );
10417 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10418 }
10419 })
10420 .detach();
10421 });
10422
10423 project
10424 .update(cx, |project, cx| project.git_scans_complete(cx))
10425 .await;
10426
10427 let repo = project.read_with(cx, |project, cx| {
10428 project.repositories(cx).values().next().unwrap().clone()
10429 });
10430
10431 repo.update(cx, |repo, cx| {
10432 repo.stage_entries(vec![repo_path("a.txt")], cx)
10433 })
10434 .detach();
10435
10436 repo.update(cx, |repo, cx| {
10437 repo.stage_entries(vec![repo_path("a.txt")], cx)
10438 })
10439 .unwrap()
10440 .with_timeout(Duration::from_secs(1), &cx.executor())
10441 .await
10442 .unwrap();
10443
10444 cx.run_until_parked();
10445
10446 assert_eq!(
10447 pending_ops_all
10448 .lock()
10449 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10450 .unwrap()
10451 .ops,
10452 vec![
10453 pending_op::PendingOp {
10454 id: 1u16.into(),
10455 git_status: pending_op::GitStatus::Staged,
10456 job_status: pending_op::JobStatus::Skipped
10457 },
10458 pending_op::PendingOp {
10459 id: 2u16.into(),
10460 git_status: pending_op::GitStatus::Staged,
10461 job_status: pending_op::JobStatus::Finished
10462 }
10463 ],
10464 );
10465
10466 repo.update(cx, |repo, _cx| {
10467 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10468
10469 assert_eq!(
10470 git_statuses,
10471 [StatusEntry {
10472 repo_path: repo_path("a.txt"),
10473 status: TrackedStatus {
10474 index_status: StatusCode::Added,
10475 worktree_status: StatusCode::Unmodified
10476 }
10477 .into(),
10478 diff_stat: Some(DiffStat {
10479 added: 1,
10480 deleted: 0,
10481 }),
10482 }]
10483 );
10484 });
10485}
10486
10487#[gpui::test]
10488async fn test_repository_pending_ops_stage_all(
10489 executor: gpui::BackgroundExecutor,
10490 cx: &mut gpui::TestAppContext,
10491) {
10492 init_test(cx);
10493
10494 let fs = FakeFs::new(executor);
10495 fs.insert_tree(
10496 path!("/root"),
10497 json!({
10498 "my-repo": {
10499 ".git": {},
10500 "a.txt": "a",
10501 "b.txt": "b"
10502 }
10503
10504 }),
10505 )
10506 .await;
10507
10508 fs.set_status_for_repo(
10509 path!("/root/my-repo/.git").as_ref(),
10510 &[
10511 ("a.txt", FileStatus::Untracked),
10512 ("b.txt", FileStatus::Untracked),
10513 ],
10514 );
10515
10516 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10517 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10518 project.update(cx, |project, cx| {
10519 let pending_ops_all = pending_ops_all.clone();
10520 cx.subscribe(project.git_store(), move |_, _, e, _| {
10521 if let GitStoreEvent::RepositoryUpdated(
10522 _,
10523 RepositoryEvent::PendingOpsChanged { pending_ops },
10524 _,
10525 ) = e
10526 {
10527 let merged = merge_pending_ops_snapshots(
10528 pending_ops.items(()),
10529 pending_ops_all.lock().items(()),
10530 );
10531 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10532 }
10533 })
10534 .detach();
10535 });
10536 project
10537 .update(cx, |project, cx| project.git_scans_complete(cx))
10538 .await;
10539
10540 let repo = project.read_with(cx, |project, cx| {
10541 project.repositories(cx).values().next().unwrap().clone()
10542 });
10543
10544 repo.update(cx, |repo, cx| {
10545 repo.stage_entries(vec![repo_path("a.txt")], cx)
10546 })
10547 .await
10548 .unwrap();
10549 repo.update(cx, |repo, cx| repo.stage_all(cx))
10550 .await
10551 .unwrap();
10552 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10553 .await
10554 .unwrap();
10555
10556 cx.run_until_parked();
10557
10558 assert_eq!(
10559 pending_ops_all
10560 .lock()
10561 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10562 .unwrap()
10563 .ops,
10564 vec![
10565 pending_op::PendingOp {
10566 id: 1u16.into(),
10567 git_status: pending_op::GitStatus::Staged,
10568 job_status: pending_op::JobStatus::Finished
10569 },
10570 pending_op::PendingOp {
10571 id: 2u16.into(),
10572 git_status: pending_op::GitStatus::Unstaged,
10573 job_status: pending_op::JobStatus::Finished
10574 },
10575 ],
10576 );
10577 assert_eq!(
10578 pending_ops_all
10579 .lock()
10580 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10581 .unwrap()
10582 .ops,
10583 vec![
10584 pending_op::PendingOp {
10585 id: 1u16.into(),
10586 git_status: pending_op::GitStatus::Staged,
10587 job_status: pending_op::JobStatus::Finished
10588 },
10589 pending_op::PendingOp {
10590 id: 2u16.into(),
10591 git_status: pending_op::GitStatus::Unstaged,
10592 job_status: pending_op::JobStatus::Finished
10593 },
10594 ],
10595 );
10596
10597 repo.update(cx, |repo, _cx| {
10598 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10599
10600 assert_eq!(
10601 git_statuses,
10602 [
10603 StatusEntry {
10604 repo_path: repo_path("a.txt"),
10605 status: FileStatus::Untracked,
10606 diff_stat: None,
10607 },
10608 StatusEntry {
10609 repo_path: repo_path("b.txt"),
10610 status: FileStatus::Untracked,
10611 diff_stat: None,
10612 },
10613 ]
10614 );
10615 });
10616}
10617
10618#[gpui::test]
10619async fn test_repository_subfolder_git_status(
10620 executor: gpui::BackgroundExecutor,
10621 cx: &mut gpui::TestAppContext,
10622) {
10623 init_test(cx);
10624
10625 let fs = FakeFs::new(executor);
10626 fs.insert_tree(
10627 path!("/root"),
10628 json!({
10629 "my-repo": {
10630 ".git": {},
10631 "a.txt": "a",
10632 "sub-folder-1": {
10633 "sub-folder-2": {
10634 "c.txt": "cc",
10635 "d": {
10636 "e.txt": "eee"
10637 }
10638 },
10639 }
10640 },
10641 }),
10642 )
10643 .await;
10644
10645 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10646 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10647
10648 fs.set_status_for_repo(
10649 path!("/root/my-repo/.git").as_ref(),
10650 &[(E_TXT, FileStatus::Untracked)],
10651 );
10652
10653 let project = Project::test(
10654 fs.clone(),
10655 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10656 cx,
10657 )
10658 .await;
10659
10660 project
10661 .update(cx, |project, cx| project.git_scans_complete(cx))
10662 .await;
10663 cx.run_until_parked();
10664
10665 let repository = project.read_with(cx, |project, cx| {
10666 project.repositories(cx).values().next().unwrap().clone()
10667 });
10668
10669 // Ensure that the git status is loaded correctly
10670 repository.read_with(cx, |repository, _cx| {
10671 assert_eq!(
10672 repository.work_directory_abs_path,
10673 Path::new(path!("/root/my-repo")).into()
10674 );
10675
10676 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10677 assert_eq!(
10678 repository
10679 .status_for_path(&repo_path(E_TXT))
10680 .unwrap()
10681 .status,
10682 FileStatus::Untracked
10683 );
10684 });
10685
10686 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10687 project
10688 .update(cx, |project, cx| project.git_scans_complete(cx))
10689 .await;
10690 cx.run_until_parked();
10691
10692 repository.read_with(cx, |repository, _cx| {
10693 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10694 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10695 });
10696}
10697
10698// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10699#[cfg(any())]
10700#[gpui::test]
10701async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10702 init_test(cx);
10703 cx.executor().allow_parking();
10704
10705 let root = TempTree::new(json!({
10706 "project": {
10707 "a.txt": "a",
10708 },
10709 }));
10710 let root_path = root.path();
10711
10712 let repo = git_init(&root_path.join("project"));
10713 git_add("a.txt", &repo);
10714 git_commit("init", &repo);
10715
10716 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10717
10718 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10719 tree.flush_fs_events(cx).await;
10720 project
10721 .update(cx, |project, cx| project.git_scans_complete(cx))
10722 .await;
10723 cx.executor().run_until_parked();
10724
10725 let repository = project.read_with(cx, |project, cx| {
10726 project.repositories(cx).values().next().unwrap().clone()
10727 });
10728
10729 git_branch("other-branch", &repo);
10730 git_checkout("refs/heads/other-branch", &repo);
10731 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10732 git_add("a.txt", &repo);
10733 git_commit("capitalize", &repo);
10734 let commit = repo
10735 .head()
10736 .expect("Failed to get HEAD")
10737 .peel_to_commit()
10738 .expect("HEAD is not a commit");
10739 git_checkout("refs/heads/main", &repo);
10740 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10741 git_add("a.txt", &repo);
10742 git_commit("improve letter", &repo);
10743 git_cherry_pick(&commit, &repo);
10744 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10745 .expect("No CHERRY_PICK_HEAD");
10746 pretty_assertions::assert_eq!(
10747 git_status(&repo),
10748 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10749 );
10750 tree.flush_fs_events(cx).await;
10751 project
10752 .update(cx, |project, cx| project.git_scans_complete(cx))
10753 .await;
10754 cx.executor().run_until_parked();
10755 let conflicts = repository.update(cx, |repository, _| {
10756 repository
10757 .merge_conflicts
10758 .iter()
10759 .cloned()
10760 .collect::<Vec<_>>()
10761 });
10762 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10763
10764 git_add("a.txt", &repo);
10765 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10766 git_commit("whatevs", &repo);
10767 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10768 .expect("Failed to remove CHERRY_PICK_HEAD");
10769 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10770 tree.flush_fs_events(cx).await;
10771 let conflicts = repository.update(cx, |repository, _| {
10772 repository
10773 .merge_conflicts
10774 .iter()
10775 .cloned()
10776 .collect::<Vec<_>>()
10777 });
10778 pretty_assertions::assert_eq!(conflicts, []);
10779}
10780
10781#[gpui::test]
10782async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10783 init_test(cx);
10784 let fs = FakeFs::new(cx.background_executor.clone());
10785 fs.insert_tree(
10786 path!("/root"),
10787 json!({
10788 ".git": {},
10789 ".gitignore": "*.txt\n",
10790 "a.xml": "<a></a>",
10791 "b.txt": "Some text"
10792 }),
10793 )
10794 .await;
10795
10796 fs.set_head_and_index_for_repo(
10797 path!("/root/.git").as_ref(),
10798 &[
10799 (".gitignore", "*.txt\n".into()),
10800 ("a.xml", "<a></a>".into()),
10801 ],
10802 );
10803
10804 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10805
10806 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10807 tree.flush_fs_events(cx).await;
10808 project
10809 .update(cx, |project, cx| project.git_scans_complete(cx))
10810 .await;
10811 cx.executor().run_until_parked();
10812
10813 let repository = project.read_with(cx, |project, cx| {
10814 project.repositories(cx).values().next().unwrap().clone()
10815 });
10816
10817 // One file is unmodified, the other is ignored.
10818 cx.read(|cx| {
10819 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10820 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10821 });
10822
10823 // Change the gitignore, and stage the newly non-ignored file.
10824 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10825 .await
10826 .unwrap();
10827 fs.set_index_for_repo(
10828 Path::new(path!("/root/.git")),
10829 &[
10830 (".gitignore", "*.txt\n".into()),
10831 ("a.xml", "<a></a>".into()),
10832 ("b.txt", "Some text".into()),
10833 ],
10834 );
10835
10836 cx.executor().run_until_parked();
10837 cx.read(|cx| {
10838 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10839 assert_entry_git_state(
10840 tree.read(cx),
10841 repository.read(cx),
10842 "b.txt",
10843 Some(StatusCode::Added),
10844 false,
10845 );
10846 });
10847}
10848
10849// NOTE:
10850// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10851// a directory which some program has already open.
10852// This is a limitation of the Windows.
10853// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10854// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10855#[gpui::test]
10856#[cfg_attr(target_os = "windows", ignore)]
10857async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10858 init_test(cx);
10859 cx.executor().allow_parking();
10860 let root = TempTree::new(json!({
10861 "projects": {
10862 "project1": {
10863 "a": "",
10864 "b": "",
10865 }
10866 },
10867
10868 }));
10869 let root_path = root.path();
10870
10871 let repo = git_init(&root_path.join("projects/project1"));
10872 git_add("a", &repo);
10873 git_commit("init", &repo);
10874 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10875
10876 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10877
10878 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10879 tree.flush_fs_events(cx).await;
10880 project
10881 .update(cx, |project, cx| project.git_scans_complete(cx))
10882 .await;
10883 cx.executor().run_until_parked();
10884
10885 let repository = project.read_with(cx, |project, cx| {
10886 project.repositories(cx).values().next().unwrap().clone()
10887 });
10888
10889 repository.read_with(cx, |repository, _| {
10890 assert_eq!(
10891 repository.work_directory_abs_path.as_ref(),
10892 root_path.join("projects/project1").as_path()
10893 );
10894 assert_eq!(
10895 repository
10896 .status_for_path(&repo_path("a"))
10897 .map(|entry| entry.status),
10898 Some(StatusCode::Modified.worktree()),
10899 );
10900 assert_eq!(
10901 repository
10902 .status_for_path(&repo_path("b"))
10903 .map(|entry| entry.status),
10904 Some(FileStatus::Untracked),
10905 );
10906 });
10907
10908 std::fs::rename(
10909 root_path.join("projects/project1"),
10910 root_path.join("projects/project2"),
10911 )
10912 .unwrap();
10913 tree.flush_fs_events(cx).await;
10914
10915 repository.read_with(cx, |repository, _| {
10916 assert_eq!(
10917 repository.work_directory_abs_path.as_ref(),
10918 root_path.join("projects/project2").as_path()
10919 );
10920 assert_eq!(
10921 repository.status_for_path(&repo_path("a")).unwrap().status,
10922 StatusCode::Modified.worktree(),
10923 );
10924 assert_eq!(
10925 repository.status_for_path(&repo_path("b")).unwrap().status,
10926 FileStatus::Untracked,
10927 );
10928 });
10929}
10930
10931// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10932// you can't rename a directory which some program has already open. This is a
10933// limitation of the Windows. See:
10934// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10935// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10936#[gpui::test]
10937#[cfg_attr(target_os = "windows", ignore)]
10938async fn test_file_status(cx: &mut gpui::TestAppContext) {
10939 init_test(cx);
10940 cx.executor().allow_parking();
10941 const IGNORE_RULE: &str = "**/target";
10942
10943 let root = TempTree::new(json!({
10944 "project": {
10945 "a.txt": "a",
10946 "b.txt": "bb",
10947 "c": {
10948 "d": {
10949 "e.txt": "eee"
10950 }
10951 },
10952 "f.txt": "ffff",
10953 "target": {
10954 "build_file": "???"
10955 },
10956 ".gitignore": IGNORE_RULE
10957 },
10958
10959 }));
10960 let root_path = root.path();
10961
10962 const A_TXT: &str = "a.txt";
10963 const B_TXT: &str = "b.txt";
10964 const E_TXT: &str = "c/d/e.txt";
10965 const F_TXT: &str = "f.txt";
10966 const DOTGITIGNORE: &str = ".gitignore";
10967 const BUILD_FILE: &str = "target/build_file";
10968
10969 // Set up git repository before creating the worktree.
10970 let work_dir = root.path().join("project");
10971 let mut repo = git_init(work_dir.as_path());
10972 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10973 git_add(A_TXT, &repo);
10974 git_add(E_TXT, &repo);
10975 git_add(DOTGITIGNORE, &repo);
10976 git_commit("Initial commit", &repo);
10977
10978 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10979
10980 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10981 tree.flush_fs_events(cx).await;
10982 project
10983 .update(cx, |project, cx| project.git_scans_complete(cx))
10984 .await;
10985 cx.executor().run_until_parked();
10986
10987 let repository = project.read_with(cx, |project, cx| {
10988 project.repositories(cx).values().next().unwrap().clone()
10989 });
10990
10991 // Check that the right git state is observed on startup
10992 repository.read_with(cx, |repository, _cx| {
10993 assert_eq!(
10994 repository.work_directory_abs_path.as_ref(),
10995 root_path.join("project").as_path()
10996 );
10997
10998 assert_eq!(
10999 repository
11000 .status_for_path(&repo_path(B_TXT))
11001 .unwrap()
11002 .status,
11003 FileStatus::Untracked,
11004 );
11005 assert_eq!(
11006 repository
11007 .status_for_path(&repo_path(F_TXT))
11008 .unwrap()
11009 .status,
11010 FileStatus::Untracked,
11011 );
11012 });
11013
11014 // Modify a file in the working copy.
11015 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
11016 tree.flush_fs_events(cx).await;
11017 project
11018 .update(cx, |project, cx| project.git_scans_complete(cx))
11019 .await;
11020 cx.executor().run_until_parked();
11021
11022 // The worktree detects that the file's git status has changed.
11023 repository.read_with(cx, |repository, _| {
11024 assert_eq!(
11025 repository
11026 .status_for_path(&repo_path(A_TXT))
11027 .unwrap()
11028 .status,
11029 StatusCode::Modified.worktree(),
11030 );
11031 });
11032
11033 // Create a commit in the git repository.
11034 git_add(A_TXT, &repo);
11035 git_add(B_TXT, &repo);
11036 git_commit("Committing modified and added", &repo);
11037 tree.flush_fs_events(cx).await;
11038 project
11039 .update(cx, |project, cx| project.git_scans_complete(cx))
11040 .await;
11041 cx.executor().run_until_parked();
11042
11043 // The worktree detects that the files' git status have changed.
11044 repository.read_with(cx, |repository, _cx| {
11045 assert_eq!(
11046 repository
11047 .status_for_path(&repo_path(F_TXT))
11048 .unwrap()
11049 .status,
11050 FileStatus::Untracked,
11051 );
11052 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
11053 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
11054 });
11055
11056 // Modify files in the working copy and perform git operations on other files.
11057 git_reset(0, &repo);
11058 git_remove_index(Path::new(B_TXT), &repo);
11059 git_stash(&mut repo);
11060 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
11061 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
11062 tree.flush_fs_events(cx).await;
11063 project
11064 .update(cx, |project, cx| project.git_scans_complete(cx))
11065 .await;
11066 cx.executor().run_until_parked();
11067
11068 // Check that more complex repo changes are tracked
11069 repository.read_with(cx, |repository, _cx| {
11070 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
11071 assert_eq!(
11072 repository
11073 .status_for_path(&repo_path(B_TXT))
11074 .unwrap()
11075 .status,
11076 FileStatus::Untracked,
11077 );
11078 assert_eq!(
11079 repository
11080 .status_for_path(&repo_path(E_TXT))
11081 .unwrap()
11082 .status,
11083 StatusCode::Modified.worktree(),
11084 );
11085 });
11086
11087 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
11088 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
11089 std::fs::write(
11090 work_dir.join(DOTGITIGNORE),
11091 [IGNORE_RULE, "f.txt"].join("\n"),
11092 )
11093 .unwrap();
11094
11095 git_add(Path::new(DOTGITIGNORE), &repo);
11096 git_commit("Committing modified git ignore", &repo);
11097
11098 tree.flush_fs_events(cx).await;
11099 cx.executor().run_until_parked();
11100
11101 let mut renamed_dir_name = "first_directory/second_directory";
11102 const RENAMED_FILE: &str = "rf.txt";
11103
11104 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
11105 std::fs::write(
11106 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
11107 "new-contents",
11108 )
11109 .unwrap();
11110
11111 tree.flush_fs_events(cx).await;
11112 project
11113 .update(cx, |project, cx| project.git_scans_complete(cx))
11114 .await;
11115 cx.executor().run_until_parked();
11116
11117 repository.read_with(cx, |repository, _cx| {
11118 assert_eq!(
11119 repository
11120 .status_for_path(&RepoPath::from_rel_path(
11121 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11122 ))
11123 .unwrap()
11124 .status,
11125 FileStatus::Untracked,
11126 );
11127 });
11128
11129 renamed_dir_name = "new_first_directory/second_directory";
11130
11131 std::fs::rename(
11132 work_dir.join("first_directory"),
11133 work_dir.join("new_first_directory"),
11134 )
11135 .unwrap();
11136
11137 tree.flush_fs_events(cx).await;
11138 project
11139 .update(cx, |project, cx| project.git_scans_complete(cx))
11140 .await;
11141 cx.executor().run_until_parked();
11142
11143 repository.read_with(cx, |repository, _cx| {
11144 assert_eq!(
11145 repository
11146 .status_for_path(&RepoPath::from_rel_path(
11147 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11148 ))
11149 .unwrap()
11150 .status,
11151 FileStatus::Untracked,
11152 );
11153 });
11154}
11155
11156#[gpui::test]
11157#[ignore]
11158async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
11159 init_test(cx);
11160 cx.executor().allow_parking();
11161
11162 const IGNORE_RULE: &str = "**/target";
11163
11164 let root = TempTree::new(json!({
11165 "project": {
11166 "src": {
11167 "main.rs": "fn main() {}"
11168 },
11169 "target": {
11170 "debug": {
11171 "important_text.txt": "important text",
11172 },
11173 },
11174 ".gitignore": IGNORE_RULE
11175 },
11176
11177 }));
11178 let root_path = root.path();
11179
11180 // Set up git repository before creating the worktree.
11181 let work_dir = root.path().join("project");
11182 let repo = git_init(work_dir.as_path());
11183 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11184 git_add("src/main.rs", &repo);
11185 git_add(".gitignore", &repo);
11186 git_commit("Initial commit", &repo);
11187
11188 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11189 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11190 let project_events = Arc::new(Mutex::new(Vec::new()));
11191 project.update(cx, |project, cx| {
11192 let repo_events = repository_updates.clone();
11193 cx.subscribe(project.git_store(), move |_, _, e, _| {
11194 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11195 repo_events.lock().push(e.clone());
11196 }
11197 })
11198 .detach();
11199 let project_events = project_events.clone();
11200 cx.subscribe_self(move |_, e, _| {
11201 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11202 project_events.lock().extend(
11203 updates
11204 .iter()
11205 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11206 .filter(|(path, _)| path != "fs-event-sentinel"),
11207 );
11208 }
11209 })
11210 .detach();
11211 });
11212
11213 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11214 tree.flush_fs_events(cx).await;
11215 tree.update(cx, |tree, cx| {
11216 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11217 })
11218 .await
11219 .unwrap();
11220 tree.update(cx, |tree, _| {
11221 assert_eq!(
11222 tree.entries(true, 0)
11223 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11224 .collect::<Vec<_>>(),
11225 vec![
11226 (rel_path(""), false),
11227 (rel_path("project/"), false),
11228 (rel_path("project/.gitignore"), false),
11229 (rel_path("project/src"), false),
11230 (rel_path("project/src/main.rs"), false),
11231 (rel_path("project/target"), true),
11232 (rel_path("project/target/debug"), true),
11233 (rel_path("project/target/debug/important_text.txt"), true),
11234 ]
11235 );
11236 });
11237
11238 assert_eq!(
11239 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11240 vec![RepositoryEvent::StatusesChanged,],
11241 "Initial worktree scan should produce a repo update event"
11242 );
11243 assert_eq!(
11244 project_events.lock().drain(..).collect::<Vec<_>>(),
11245 vec![
11246 ("project/target".to_string(), PathChange::Loaded),
11247 ("project/target/debug".to_string(), PathChange::Loaded),
11248 (
11249 "project/target/debug/important_text.txt".to_string(),
11250 PathChange::Loaded
11251 ),
11252 ],
11253 "Initial project changes should show that all not-ignored and all opened files are loaded"
11254 );
11255
11256 let deps_dir = work_dir.join("target").join("debug").join("deps");
11257 std::fs::create_dir_all(&deps_dir).unwrap();
11258 tree.flush_fs_events(cx).await;
11259 project
11260 .update(cx, |project, cx| project.git_scans_complete(cx))
11261 .await;
11262 cx.executor().run_until_parked();
11263 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11264 tree.flush_fs_events(cx).await;
11265 project
11266 .update(cx, |project, cx| project.git_scans_complete(cx))
11267 .await;
11268 cx.executor().run_until_parked();
11269 std::fs::remove_dir_all(&deps_dir).unwrap();
11270 tree.flush_fs_events(cx).await;
11271 project
11272 .update(cx, |project, cx| project.git_scans_complete(cx))
11273 .await;
11274 cx.executor().run_until_parked();
11275
11276 tree.update(cx, |tree, _| {
11277 assert_eq!(
11278 tree.entries(true, 0)
11279 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11280 .collect::<Vec<_>>(),
11281 vec![
11282 (rel_path(""), false),
11283 (rel_path("project/"), false),
11284 (rel_path("project/.gitignore"), false),
11285 (rel_path("project/src"), false),
11286 (rel_path("project/src/main.rs"), false),
11287 (rel_path("project/target"), true),
11288 (rel_path("project/target/debug"), true),
11289 (rel_path("project/target/debug/important_text.txt"), true),
11290 ],
11291 "No stray temp files should be left after the flycheck changes"
11292 );
11293 });
11294
11295 assert_eq!(
11296 repository_updates
11297 .lock()
11298 .iter()
11299 .cloned()
11300 .collect::<Vec<_>>(),
11301 Vec::new(),
11302 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11303 );
11304 assert_eq!(
11305 project_events.lock().as_slice(),
11306 vec![
11307 ("project/target/debug/deps".to_string(), PathChange::Added),
11308 ("project/target/debug/deps".to_string(), PathChange::Removed),
11309 ],
11310 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11311 No updates for more nested directories should happen as those are ignored",
11312 );
11313}
11314
11315// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11316// to different timings/ordering of events.
11317#[ignore]
11318#[gpui::test]
11319async fn test_odd_events_for_ignored_dirs(
11320 executor: BackgroundExecutor,
11321 cx: &mut gpui::TestAppContext,
11322) {
11323 init_test(cx);
11324 let fs = FakeFs::new(executor);
11325 fs.insert_tree(
11326 path!("/root"),
11327 json!({
11328 ".git": {},
11329 ".gitignore": "**/target/",
11330 "src": {
11331 "main.rs": "fn main() {}",
11332 },
11333 "target": {
11334 "debug": {
11335 "foo.txt": "foo",
11336 "deps": {}
11337 }
11338 }
11339 }),
11340 )
11341 .await;
11342 fs.set_head_and_index_for_repo(
11343 path!("/root/.git").as_ref(),
11344 &[
11345 (".gitignore", "**/target/".into()),
11346 ("src/main.rs", "fn main() {}".into()),
11347 ],
11348 );
11349
11350 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11351 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11352 let project_events = Arc::new(Mutex::new(Vec::new()));
11353 project.update(cx, |project, cx| {
11354 let repository_updates = repository_updates.clone();
11355 cx.subscribe(project.git_store(), move |_, _, e, _| {
11356 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11357 repository_updates.lock().push(e.clone());
11358 }
11359 })
11360 .detach();
11361 let project_events = project_events.clone();
11362 cx.subscribe_self(move |_, e, _| {
11363 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11364 project_events.lock().extend(
11365 updates
11366 .iter()
11367 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11368 .filter(|(path, _)| path != "fs-event-sentinel"),
11369 );
11370 }
11371 })
11372 .detach();
11373 });
11374
11375 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11376 tree.update(cx, |tree, cx| {
11377 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11378 })
11379 .await
11380 .unwrap();
11381 tree.flush_fs_events(cx).await;
11382 project
11383 .update(cx, |project, cx| project.git_scans_complete(cx))
11384 .await;
11385 cx.run_until_parked();
11386 tree.update(cx, |tree, _| {
11387 assert_eq!(
11388 tree.entries(true, 0)
11389 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11390 .collect::<Vec<_>>(),
11391 vec![
11392 (rel_path(""), false),
11393 (rel_path(".gitignore"), false),
11394 (rel_path("src"), false),
11395 (rel_path("src/main.rs"), false),
11396 (rel_path("target"), true),
11397 (rel_path("target/debug"), true),
11398 (rel_path("target/debug/deps"), true),
11399 (rel_path("target/debug/foo.txt"), true),
11400 ]
11401 );
11402 });
11403
11404 assert_eq!(
11405 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11406 vec![
11407 RepositoryEvent::HeadChanged,
11408 RepositoryEvent::StatusesChanged,
11409 RepositoryEvent::StatusesChanged,
11410 ],
11411 "Initial worktree scan should produce a repo update event"
11412 );
11413 assert_eq!(
11414 project_events.lock().drain(..).collect::<Vec<_>>(),
11415 vec![
11416 ("target".to_string(), PathChange::Loaded),
11417 ("target/debug".to_string(), PathChange::Loaded),
11418 ("target/debug/deps".to_string(), PathChange::Loaded),
11419 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11420 ],
11421 "All non-ignored entries and all opened firs should be getting a project event",
11422 );
11423
11424 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11425 // This may happen multiple times during a single flycheck, but once is enough for testing.
11426 fs.emit_fs_event("/root/target/debug/deps", None);
11427 tree.flush_fs_events(cx).await;
11428 project
11429 .update(cx, |project, cx| project.git_scans_complete(cx))
11430 .await;
11431 cx.executor().run_until_parked();
11432
11433 assert_eq!(
11434 repository_updates
11435 .lock()
11436 .iter()
11437 .cloned()
11438 .collect::<Vec<_>>(),
11439 Vec::new(),
11440 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11441 );
11442 assert_eq!(
11443 project_events.lock().as_slice(),
11444 Vec::new(),
11445 "No further project events should happen, as only ignored dirs received FS events",
11446 );
11447}
11448
11449#[gpui::test]
11450async fn test_repos_in_invisible_worktrees(
11451 executor: BackgroundExecutor,
11452 cx: &mut gpui::TestAppContext,
11453) {
11454 init_test(cx);
11455 let fs = FakeFs::new(executor);
11456 fs.insert_tree(
11457 path!("/root"),
11458 json!({
11459 "dir1": {
11460 ".git": {},
11461 "dep1": {
11462 ".git": {},
11463 "src": {
11464 "a.txt": "",
11465 },
11466 },
11467 "b.txt": "",
11468 },
11469 }),
11470 )
11471 .await;
11472
11473 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11474 let _visible_worktree =
11475 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11476 project
11477 .update(cx, |project, cx| project.git_scans_complete(cx))
11478 .await;
11479
11480 let repos = project.read_with(cx, |project, cx| {
11481 project
11482 .repositories(cx)
11483 .values()
11484 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11485 .collect::<Vec<_>>()
11486 });
11487 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11488
11489 let (_invisible_worktree, _) = project
11490 .update(cx, |project, cx| {
11491 project.worktree_store().update(cx, |worktree_store, cx| {
11492 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11493 })
11494 })
11495 .await
11496 .expect("failed to create worktree");
11497 project
11498 .update(cx, |project, cx| project.git_scans_complete(cx))
11499 .await;
11500
11501 let repos = project.read_with(cx, |project, cx| {
11502 project
11503 .repositories(cx)
11504 .values()
11505 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11506 .collect::<Vec<_>>()
11507 });
11508 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11509}
11510
11511#[gpui::test(iterations = 10)]
11512async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11513 init_test(cx);
11514 cx.update(|cx| {
11515 cx.update_global::<SettingsStore, _>(|store, cx| {
11516 store.update_user_settings(cx, |settings| {
11517 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11518 });
11519 });
11520 });
11521 let fs = FakeFs::new(cx.background_executor.clone());
11522 fs.insert_tree(
11523 path!("/root"),
11524 json!({
11525 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11526 "tree": {
11527 ".git": {},
11528 ".gitignore": "ignored-dir\n",
11529 "tracked-dir": {
11530 "tracked-file1": "",
11531 "ancestor-ignored-file1": "",
11532 },
11533 "ignored-dir": {
11534 "ignored-file1": ""
11535 }
11536 }
11537 }),
11538 )
11539 .await;
11540 fs.set_head_and_index_for_repo(
11541 path!("/root/tree/.git").as_ref(),
11542 &[
11543 (".gitignore", "ignored-dir\n".into()),
11544 ("tracked-dir/tracked-file1", "".into()),
11545 ],
11546 );
11547
11548 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11549
11550 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11551 tree.flush_fs_events(cx).await;
11552 project
11553 .update(cx, |project, cx| project.git_scans_complete(cx))
11554 .await;
11555 cx.executor().run_until_parked();
11556
11557 let repository = project.read_with(cx, |project, cx| {
11558 project.repositories(cx).values().next().unwrap().clone()
11559 });
11560
11561 tree.read_with(cx, |tree, _| {
11562 tree.as_local()
11563 .unwrap()
11564 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11565 })
11566 .recv()
11567 .await;
11568
11569 cx.read(|cx| {
11570 assert_entry_git_state(
11571 tree.read(cx),
11572 repository.read(cx),
11573 "tracked-dir/tracked-file1",
11574 None,
11575 false,
11576 );
11577 assert_entry_git_state(
11578 tree.read(cx),
11579 repository.read(cx),
11580 "tracked-dir/ancestor-ignored-file1",
11581 None,
11582 false,
11583 );
11584 assert_entry_git_state(
11585 tree.read(cx),
11586 repository.read(cx),
11587 "ignored-dir/ignored-file1",
11588 None,
11589 true,
11590 );
11591 });
11592
11593 fs.create_file(
11594 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11595 Default::default(),
11596 )
11597 .await
11598 .unwrap();
11599 fs.set_index_for_repo(
11600 path!("/root/tree/.git").as_ref(),
11601 &[
11602 (".gitignore", "ignored-dir\n".into()),
11603 ("tracked-dir/tracked-file1", "".into()),
11604 ("tracked-dir/tracked-file2", "".into()),
11605 ],
11606 );
11607 fs.create_file(
11608 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11609 Default::default(),
11610 )
11611 .await
11612 .unwrap();
11613 fs.create_file(
11614 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11615 Default::default(),
11616 )
11617 .await
11618 .unwrap();
11619
11620 cx.executor().run_until_parked();
11621 cx.read(|cx| {
11622 assert_entry_git_state(
11623 tree.read(cx),
11624 repository.read(cx),
11625 "tracked-dir/tracked-file2",
11626 Some(StatusCode::Added),
11627 false,
11628 );
11629 assert_entry_git_state(
11630 tree.read(cx),
11631 repository.read(cx),
11632 "tracked-dir/ancestor-ignored-file2",
11633 None,
11634 false,
11635 );
11636 assert_entry_git_state(
11637 tree.read(cx),
11638 repository.read(cx),
11639 "ignored-dir/ignored-file2",
11640 None,
11641 true,
11642 );
11643 assert!(
11644 tree.read(cx)
11645 .entry_for_path(&rel_path(".git"))
11646 .unwrap()
11647 .is_ignored
11648 );
11649 });
11650}
11651
11652#[gpui::test]
11653async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11654 init_test(cx);
11655
11656 let fs = FakeFs::new(cx.executor());
11657 fs.insert_tree(
11658 path!("/project"),
11659 json!({
11660 ".git": {
11661 "worktrees": {
11662 "some-worktree": {
11663 "commondir": "../..\n",
11664 // For is_git_dir
11665 "HEAD": "",
11666 "config": ""
11667 }
11668 },
11669 "modules": {
11670 "subdir": {
11671 "some-submodule": {
11672 // For is_git_dir
11673 "HEAD": "",
11674 "config": "",
11675 }
11676 }
11677 }
11678 },
11679 "src": {
11680 "a.txt": "A",
11681 },
11682 "some-worktree": {
11683 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11684 "src": {
11685 "b.txt": "B",
11686 }
11687 },
11688 "subdir": {
11689 "some-submodule": {
11690 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11691 "c.txt": "C",
11692 }
11693 }
11694 }),
11695 )
11696 .await;
11697
11698 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11699 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11700 scan_complete.await;
11701
11702 let mut repositories = project.update(cx, |project, cx| {
11703 project
11704 .repositories(cx)
11705 .values()
11706 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11707 .collect::<Vec<_>>()
11708 });
11709 repositories.sort();
11710 pretty_assertions::assert_eq!(
11711 repositories,
11712 [
11713 Path::new(path!("/project")).into(),
11714 Path::new(path!("/project/some-worktree")).into(),
11715 Path::new(path!("/project/subdir/some-submodule")).into(),
11716 ]
11717 );
11718
11719 // Generate a git-related event for the worktree and check that it's refreshed.
11720 fs.with_git_state(
11721 path!("/project/some-worktree/.git").as_ref(),
11722 true,
11723 |state| {
11724 state
11725 .head_contents
11726 .insert(repo_path("src/b.txt"), "b".to_owned());
11727 state
11728 .index_contents
11729 .insert(repo_path("src/b.txt"), "b".to_owned());
11730 },
11731 )
11732 .unwrap();
11733 cx.run_until_parked();
11734
11735 let buffer = project
11736 .update(cx, |project, cx| {
11737 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11738 })
11739 .await
11740 .unwrap();
11741 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11742 let (repo, _) = project
11743 .git_store()
11744 .read(cx)
11745 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11746 .unwrap();
11747 pretty_assertions::assert_eq!(
11748 repo.read(cx).work_directory_abs_path,
11749 Path::new(path!("/project/some-worktree")).into(),
11750 );
11751 pretty_assertions::assert_eq!(
11752 repo.read(cx).original_repo_abs_path,
11753 Path::new(path!("/project")).into(),
11754 );
11755 assert!(
11756 repo.read(cx).linked_worktree_path().is_some(),
11757 "linked worktree should be detected as a linked worktree"
11758 );
11759 let barrier = repo.update(cx, |repo, _| repo.barrier());
11760 (repo.clone(), barrier)
11761 });
11762 barrier.await.unwrap();
11763 worktree_repo.update(cx, |repo, _| {
11764 pretty_assertions::assert_eq!(
11765 repo.status_for_path(&repo_path("src/b.txt"))
11766 .unwrap()
11767 .status,
11768 StatusCode::Modified.worktree(),
11769 );
11770 });
11771
11772 // The same for the submodule.
11773 fs.with_git_state(
11774 path!("/project/subdir/some-submodule/.git").as_ref(),
11775 true,
11776 |state| {
11777 state
11778 .head_contents
11779 .insert(repo_path("c.txt"), "c".to_owned());
11780 state
11781 .index_contents
11782 .insert(repo_path("c.txt"), "c".to_owned());
11783 },
11784 )
11785 .unwrap();
11786 cx.run_until_parked();
11787
11788 let buffer = project
11789 .update(cx, |project, cx| {
11790 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11791 })
11792 .await
11793 .unwrap();
11794 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11795 let (repo, _) = project
11796 .git_store()
11797 .read(cx)
11798 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11799 .unwrap();
11800 pretty_assertions::assert_eq!(
11801 repo.read(cx).work_directory_abs_path,
11802 Path::new(path!("/project/subdir/some-submodule")).into(),
11803 );
11804 pretty_assertions::assert_eq!(
11805 repo.read(cx).original_repo_abs_path,
11806 Path::new(path!("/project/subdir/some-submodule")).into(),
11807 );
11808 assert!(
11809 repo.read(cx).linked_worktree_path().is_none(),
11810 "submodule should not be detected as a linked worktree"
11811 );
11812 let barrier = repo.update(cx, |repo, _| repo.barrier());
11813 (repo.clone(), barrier)
11814 });
11815 barrier.await.unwrap();
11816 submodule_repo.update(cx, |repo, _| {
11817 pretty_assertions::assert_eq!(
11818 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11819 StatusCode::Modified.worktree(),
11820 );
11821 });
11822}
11823
11824#[gpui::test]
11825async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11826 init_test(cx);
11827 let fs = FakeFs::new(cx.background_executor.clone());
11828 fs.insert_tree(
11829 path!("/root"),
11830 json!({
11831 "project": {
11832 ".git": {},
11833 "child1": {
11834 "a.txt": "A",
11835 },
11836 "child2": {
11837 "b.txt": "B",
11838 }
11839 }
11840 }),
11841 )
11842 .await;
11843
11844 let project = Project::test(
11845 fs.clone(),
11846 [
11847 path!("/root/project/child1").as_ref(),
11848 path!("/root/project/child2").as_ref(),
11849 ],
11850 cx,
11851 )
11852 .await;
11853
11854 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11855 tree.flush_fs_events(cx).await;
11856 project
11857 .update(cx, |project, cx| project.git_scans_complete(cx))
11858 .await;
11859 cx.executor().run_until_parked();
11860
11861 let repos = project.read_with(cx, |project, cx| {
11862 project
11863 .repositories(cx)
11864 .values()
11865 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11866 .collect::<Vec<_>>()
11867 });
11868 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11869}
11870
11871#[gpui::test]
11872async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11873 init_test(cx);
11874
11875 let file_1_committed = String::from(r#"file_1_committed"#);
11876 let file_1_staged = String::from(r#"file_1_staged"#);
11877 let file_2_committed = String::from(r#"file_2_committed"#);
11878 let file_2_staged = String::from(r#"file_2_staged"#);
11879 let buffer_contents = String::from(r#"buffer"#);
11880
11881 let fs = FakeFs::new(cx.background_executor.clone());
11882 fs.insert_tree(
11883 path!("/dir"),
11884 json!({
11885 ".git": {},
11886 "src": {
11887 "file_1.rs": file_1_committed.clone(),
11888 "file_2.rs": file_2_committed.clone(),
11889 }
11890 }),
11891 )
11892 .await;
11893
11894 fs.set_head_for_repo(
11895 path!("/dir/.git").as_ref(),
11896 &[
11897 ("src/file_1.rs", file_1_committed.clone()),
11898 ("src/file_2.rs", file_2_committed.clone()),
11899 ],
11900 "deadbeef",
11901 );
11902 fs.set_index_for_repo(
11903 path!("/dir/.git").as_ref(),
11904 &[
11905 ("src/file_1.rs", file_1_staged.clone()),
11906 ("src/file_2.rs", file_2_staged.clone()),
11907 ],
11908 );
11909
11910 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11911
11912 let buffer = project
11913 .update(cx, |project, cx| {
11914 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11915 })
11916 .await
11917 .unwrap();
11918
11919 buffer.update(cx, |buffer, cx| {
11920 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11921 });
11922
11923 let unstaged_diff = project
11924 .update(cx, |project, cx| {
11925 project.open_unstaged_diff(buffer.clone(), cx)
11926 })
11927 .await
11928 .unwrap();
11929
11930 cx.run_until_parked();
11931
11932 unstaged_diff.update(cx, |unstaged_diff, cx| {
11933 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11934 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11935 });
11936
11937 // Save the buffer as `file_2.rs`, which should trigger the
11938 // `BufferChangedFilePath` event.
11939 project
11940 .update(cx, |project, cx| {
11941 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11942 let path = ProjectPath {
11943 worktree_id,
11944 path: rel_path("src/file_2.rs").into(),
11945 };
11946 project.save_buffer_as(buffer.clone(), path, cx)
11947 })
11948 .await
11949 .unwrap();
11950
11951 cx.run_until_parked();
11952
11953 // Verify that the diff bases have been updated to file_2's contents due to
11954 // the `BufferChangedFilePath` event being handled.
11955 unstaged_diff.update(cx, |unstaged_diff, cx| {
11956 let snapshot = buffer.read(cx).snapshot();
11957 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11958 assert_eq!(
11959 base_text, file_2_staged,
11960 "Diff bases should be automatically updated to file_2 staged content"
11961 );
11962
11963 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11964 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11965 });
11966
11967 let uncommitted_diff = project
11968 .update(cx, |project, cx| {
11969 project.open_uncommitted_diff(buffer.clone(), cx)
11970 })
11971 .await
11972 .unwrap();
11973
11974 cx.run_until_parked();
11975
11976 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11977 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11978 assert_eq!(
11979 base_text, file_2_committed,
11980 "Uncommitted diff should compare against file_2 committed content"
11981 );
11982 });
11983}
11984
11985async fn search(
11986 project: &Entity<Project>,
11987 query: SearchQuery,
11988 cx: &mut gpui::TestAppContext,
11989) -> Result<HashMap<String, Vec<Range<usize>>>> {
11990 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11991 let mut results = HashMap::default();
11992 while let Ok(search_result) = search_rx.rx.recv().await {
11993 match search_result {
11994 SearchResult::Buffer { buffer, ranges } => {
11995 results.entry(buffer).or_insert(ranges);
11996 }
11997 SearchResult::LimitReached => {}
11998 }
11999 }
12000 Ok(results
12001 .into_iter()
12002 .map(|(buffer, ranges)| {
12003 buffer.update(cx, |buffer, cx| {
12004 let path = buffer
12005 .file()
12006 .unwrap()
12007 .full_path(cx)
12008 .to_string_lossy()
12009 .to_string();
12010 let ranges = ranges
12011 .into_iter()
12012 .map(|range| range.to_offset(buffer))
12013 .collect::<Vec<_>>();
12014 (path, ranges)
12015 })
12016 })
12017 .collect())
12018}
12019
12020#[gpui::test]
12021async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
12022 init_test(cx);
12023
12024 let fs = FakeFs::new(cx.executor());
12025
12026 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
12027 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
12028 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
12029 fs.insert_tree(path!("/dir"), json!({})).await;
12030 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
12031
12032 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12033
12034 let buffer = project
12035 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
12036 .await
12037 .unwrap();
12038
12039 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
12040 (buffer.encoding(), buffer.text(), buffer.is_dirty())
12041 });
12042 assert_eq!(initial_encoding, encoding_rs::UTF_8);
12043 assert_eq!(initial_text, "Hi");
12044 assert!(!initial_dirty);
12045
12046 let reload_receiver = buffer.update(cx, |buffer, cx| {
12047 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
12048 });
12049 cx.executor().run_until_parked();
12050
12051 // Wait for reload to complete
12052 let _ = reload_receiver.await;
12053
12054 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
12055 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
12056 (buffer.encoding(), buffer.text(), buffer.is_dirty())
12057 });
12058 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
12059 assert_eq!(reloaded_text, "楈");
12060 assert!(!reloaded_dirty);
12061
12062 // Undo the reload
12063 buffer.update(cx, |buffer, cx| {
12064 buffer.undo(cx);
12065 });
12066
12067 buffer.read_with(cx, |buffer, _| {
12068 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
12069 assert_eq!(buffer.text(), "Hi");
12070 assert!(!buffer.is_dirty());
12071 });
12072
12073 buffer.update(cx, |buffer, cx| {
12074 buffer.redo(cx);
12075 });
12076
12077 buffer.read_with(cx, |buffer, _| {
12078 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
12079 assert_ne!(buffer.text(), "Hi");
12080 assert!(!buffer.is_dirty());
12081 });
12082}
12083
12084#[gpui::test]
12085async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
12086 init_test(cx);
12087
12088 let fs = FakeFs::new(cx.executor());
12089 fs.insert_tree(
12090 path!("/root"),
12091 json!({
12092 "a": {
12093 ".git": {},
12094 ".zed": {
12095 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
12096 },
12097 "src": { "main.rs": "" }
12098 },
12099 "b": {
12100 ".git": {},
12101 ".zed": {
12102 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
12103 },
12104 "src": { "lib.rs": "" }
12105 },
12106 }),
12107 )
12108 .await;
12109
12110 let repos_created = Rc::new(RefCell::new(Vec::new()));
12111 let _observe = {
12112 let repos_created = repos_created.clone();
12113 cx.update(|cx| {
12114 cx.observe_new::<Repository>(move |repo, _, cx| {
12115 repos_created.borrow_mut().push(cx.entity().downgrade());
12116 let _ = repo;
12117 })
12118 })
12119 };
12120
12121 let project = Project::test(
12122 fs.clone(),
12123 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
12124 cx,
12125 )
12126 .await;
12127
12128 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
12129 scan_complete.await;
12130
12131 project.read_with(cx, |project, cx| {
12132 assert!(
12133 project.worktree_store().read(cx).initial_scan_completed(),
12134 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
12135 );
12136 });
12137
12138 let created_repos_len = repos_created.borrow().len();
12139 assert_eq!(
12140 created_repos_len, 2,
12141 "Expected 2 repositories to be created during scan, got {}",
12142 created_repos_len
12143 );
12144
12145 project.read_with(cx, |project, cx| {
12146 let git_store = project.git_store().read(cx);
12147 assert_eq!(
12148 git_store.repositories().len(),
12149 2,
12150 "Expected 2 repositories in GitStore"
12151 );
12152 });
12153}
12154
12155pub fn init_test(cx: &mut gpui::TestAppContext) {
12156 zlog::init_test();
12157
12158 cx.update(|cx| {
12159 let settings_store = SettingsStore::test(cx);
12160 cx.set_global(settings_store);
12161 release_channel::init(semver::Version::new(0, 0, 0), cx);
12162 });
12163}
12164
12165fn json_lang() -> Arc<Language> {
12166 Arc::new(Language::new(
12167 LanguageConfig {
12168 name: "JSON".into(),
12169 matcher: LanguageMatcher {
12170 path_suffixes: vec!["json".to_string()],
12171 ..Default::default()
12172 },
12173 ..Default::default()
12174 },
12175 None,
12176 ))
12177}
12178
12179fn js_lang() -> Arc<Language> {
12180 Arc::new(Language::new(
12181 LanguageConfig {
12182 name: "JavaScript".into(),
12183 matcher: LanguageMatcher {
12184 path_suffixes: vec!["js".to_string()],
12185 ..Default::default()
12186 },
12187 ..Default::default()
12188 },
12189 None,
12190 ))
12191}
12192
12193fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
12194 struct PythonMootToolchainLister(Arc<FakeFs>);
12195 #[async_trait]
12196 impl ToolchainLister for PythonMootToolchainLister {
12197 async fn list(
12198 &self,
12199 worktree_root: PathBuf,
12200 subroot_relative_path: Arc<RelPath>,
12201 _: Option<HashMap<String, String>>,
12202 ) -> ToolchainList {
12203 // This lister will always return a path .venv directories within ancestors
12204 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
12205 let mut toolchains = vec![];
12206 for ancestor in ancestors {
12207 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
12208 if self.0.is_dir(&venv_path).await {
12209 toolchains.push(Toolchain {
12210 name: SharedString::new_static("Python Venv"),
12211 path: venv_path.to_string_lossy().into_owned().into(),
12212 language_name: LanguageName(SharedString::new_static("Python")),
12213 as_json: serde_json::Value::Null,
12214 })
12215 }
12216 }
12217 ToolchainList {
12218 toolchains,
12219 ..Default::default()
12220 }
12221 }
12222 async fn resolve(
12223 &self,
12224 _: PathBuf,
12225 _: Option<HashMap<String, String>>,
12226 ) -> anyhow::Result<Toolchain> {
12227 Err(anyhow::anyhow!("Not implemented"))
12228 }
12229 fn meta(&self) -> ToolchainMetadata {
12230 ToolchainMetadata {
12231 term: SharedString::new_static("Virtual Environment"),
12232 new_toolchain_placeholder: SharedString::new_static(
12233 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12234 ),
12235 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12236 }
12237 }
12238 fn activation_script(
12239 &self,
12240 _: &Toolchain,
12241 _: ShellKind,
12242 _: &gpui::App,
12243 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12244 Box::pin(async { vec![] })
12245 }
12246 }
12247 Arc::new(
12248 Language::new(
12249 LanguageConfig {
12250 name: "Python".into(),
12251 matcher: LanguageMatcher {
12252 path_suffixes: vec!["py".to_string()],
12253 ..Default::default()
12254 },
12255 ..Default::default()
12256 },
12257 None, // We're not testing Python parsing with this language.
12258 )
12259 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12260 "pyproject.toml",
12261 ))))
12262 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12263 )
12264}
12265
12266fn typescript_lang() -> Arc<Language> {
12267 Arc::new(Language::new(
12268 LanguageConfig {
12269 name: "TypeScript".into(),
12270 matcher: LanguageMatcher {
12271 path_suffixes: vec!["ts".to_string()],
12272 ..Default::default()
12273 },
12274 ..Default::default()
12275 },
12276 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12277 ))
12278}
12279
12280fn tsx_lang() -> Arc<Language> {
12281 Arc::new(Language::new(
12282 LanguageConfig {
12283 name: "tsx".into(),
12284 matcher: LanguageMatcher {
12285 path_suffixes: vec!["tsx".to_string()],
12286 ..Default::default()
12287 },
12288 ..Default::default()
12289 },
12290 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12291 ))
12292}
12293
12294fn get_all_tasks(
12295 project: &Entity<Project>,
12296 task_contexts: Arc<TaskContexts>,
12297 cx: &mut App,
12298) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12299 let new_tasks = project.update(cx, |project, cx| {
12300 project.task_store().update(cx, |task_store, cx| {
12301 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12302 this.used_and_current_resolved_tasks(task_contexts, cx)
12303 })
12304 })
12305 });
12306
12307 cx.background_spawn(async move {
12308 let (mut old, new) = new_tasks.await;
12309 old.extend(new);
12310 old
12311 })
12312}
12313
12314#[track_caller]
12315fn assert_entry_git_state(
12316 tree: &Worktree,
12317 repository: &Repository,
12318 path: &str,
12319 index_status: Option<StatusCode>,
12320 is_ignored: bool,
12321) {
12322 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12323 let entry = tree
12324 .entry_for_path(&rel_path(path))
12325 .unwrap_or_else(|| panic!("entry {path} not found"));
12326 let status = repository
12327 .status_for_path(&repo_path(path))
12328 .map(|entry| entry.status);
12329 let expected = index_status.map(|index_status| {
12330 TrackedStatus {
12331 index_status,
12332 worktree_status: StatusCode::Unmodified,
12333 }
12334 .into()
12335 });
12336 assert_eq!(
12337 status, expected,
12338 "expected {path} to have git status: {expected:?}"
12339 );
12340 assert_eq!(
12341 entry.is_ignored, is_ignored,
12342 "expected {path} to have is_ignored: {is_ignored}"
12343 );
12344}
12345
12346#[track_caller]
12347fn git_init(path: &Path) -> git2::Repository {
12348 let mut init_opts = RepositoryInitOptions::new();
12349 init_opts.initial_head("main");
12350 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12351}
12352
12353#[track_caller]
12354fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12355 let path = path.as_ref();
12356 let mut index = repo.index().expect("Failed to get index");
12357 index.add_path(path).expect("Failed to add file");
12358 index.write().expect("Failed to write index");
12359}
12360
12361#[track_caller]
12362fn git_remove_index(path: &Path, repo: &git2::Repository) {
12363 let mut index = repo.index().expect("Failed to get index");
12364 index.remove_path(path).expect("Failed to add file");
12365 index.write().expect("Failed to write index");
12366}
12367
12368#[track_caller]
12369fn git_commit(msg: &'static str, repo: &git2::Repository) {
12370 use git2::Signature;
12371
12372 let signature = Signature::now("test", "test@zed.dev").unwrap();
12373 let oid = repo.index().unwrap().write_tree().unwrap();
12374 let tree = repo.find_tree(oid).unwrap();
12375 if let Ok(head) = repo.head() {
12376 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12377
12378 let parent_commit = parent_obj.as_commit().unwrap();
12379
12380 repo.commit(
12381 Some("HEAD"),
12382 &signature,
12383 &signature,
12384 msg,
12385 &tree,
12386 &[parent_commit],
12387 )
12388 .expect("Failed to commit with parent");
12389 } else {
12390 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12391 .expect("Failed to commit");
12392 }
12393}
12394
12395#[cfg(any())]
12396#[track_caller]
12397fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12398 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12399}
12400
12401#[track_caller]
12402fn git_stash(repo: &mut git2::Repository) {
12403 use git2::Signature;
12404
12405 let signature = Signature::now("test", "test@zed.dev").unwrap();
12406 repo.stash_save(&signature, "N/A", None)
12407 .expect("Failed to stash");
12408}
12409
12410#[track_caller]
12411fn git_reset(offset: usize, repo: &git2::Repository) {
12412 let head = repo.head().expect("Couldn't get repo head");
12413 let object = head.peel(git2::ObjectType::Commit).unwrap();
12414 let commit = object.as_commit().unwrap();
12415 let new_head = commit
12416 .parents()
12417 .inspect(|parnet| {
12418 parnet.message();
12419 })
12420 .nth(offset)
12421 .expect("Not enough history");
12422 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12423 .expect("Could not reset");
12424}
12425
12426#[cfg(any())]
12427#[track_caller]
12428fn git_branch(name: &str, repo: &git2::Repository) {
12429 let head = repo
12430 .head()
12431 .expect("Couldn't get repo head")
12432 .peel_to_commit()
12433 .expect("HEAD is not a commit");
12434 repo.branch(name, &head, false).expect("Failed to commit");
12435}
12436
12437#[cfg(any())]
12438#[track_caller]
12439fn git_checkout(name: &str, repo: &git2::Repository) {
12440 repo.set_head(name).expect("Failed to set head");
12441 repo.checkout_head(None).expect("Failed to check out head");
12442}
12443
12444#[cfg(any())]
12445#[track_caller]
12446fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12447 repo.statuses(None)
12448 .unwrap()
12449 .iter()
12450 .map(|status| (status.path().unwrap().to_string(), status.status()))
12451 .collect()
12452}
12453
12454#[gpui::test]
12455async fn test_find_project_path_abs(
12456 background_executor: BackgroundExecutor,
12457 cx: &mut gpui::TestAppContext,
12458) {
12459 // find_project_path should work with absolute paths
12460 init_test(cx);
12461
12462 let fs = FakeFs::new(background_executor);
12463 fs.insert_tree(
12464 path!("/root"),
12465 json!({
12466 "project1": {
12467 "file1.txt": "content1",
12468 "subdir": {
12469 "file2.txt": "content2"
12470 }
12471 },
12472 "project2": {
12473 "file3.txt": "content3"
12474 }
12475 }),
12476 )
12477 .await;
12478
12479 let project = Project::test(
12480 fs.clone(),
12481 [
12482 path!("/root/project1").as_ref(),
12483 path!("/root/project2").as_ref(),
12484 ],
12485 cx,
12486 )
12487 .await;
12488
12489 // Make sure the worktrees are fully initialized
12490 project
12491 .update(cx, |project, cx| project.git_scans_complete(cx))
12492 .await;
12493 cx.run_until_parked();
12494
12495 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12496 project.read_with(cx, |project, cx| {
12497 let worktrees: Vec<_> = project.worktrees(cx).collect();
12498 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12499 let id1 = worktrees[0].read(cx).id();
12500 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12501 let id2 = worktrees[1].read(cx).id();
12502 (abs_path1, id1, abs_path2, id2)
12503 });
12504
12505 project.update(cx, |project, cx| {
12506 let abs_path = project1_abs_path.join("file1.txt");
12507 let found_path = project.find_project_path(abs_path, cx).unwrap();
12508 assert_eq!(found_path.worktree_id, project1_id);
12509 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12510
12511 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12512 let found_path = project.find_project_path(abs_path, cx).unwrap();
12513 assert_eq!(found_path.worktree_id, project1_id);
12514 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12515
12516 let abs_path = project2_abs_path.join("file3.txt");
12517 let found_path = project.find_project_path(abs_path, cx).unwrap();
12518 assert_eq!(found_path.worktree_id, project2_id);
12519 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12520
12521 let abs_path = project1_abs_path.join("nonexistent.txt");
12522 let found_path = project.find_project_path(abs_path, cx);
12523 assert!(
12524 found_path.is_some(),
12525 "Should find project path for nonexistent file in worktree"
12526 );
12527
12528 // Test with an absolute path outside any worktree
12529 let abs_path = Path::new("/some/other/path");
12530 let found_path = project.find_project_path(abs_path, cx);
12531 assert!(
12532 found_path.is_none(),
12533 "Should not find project path for path outside any worktree"
12534 );
12535 });
12536}
12537
12538#[gpui::test]
12539async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12540 init_test(cx);
12541
12542 let fs = FakeFs::new(cx.executor());
12543 fs.insert_tree(
12544 path!("/root"),
12545 json!({
12546 "a": {
12547 ".git": {},
12548 "src": {
12549 "main.rs": "fn main() {}",
12550 }
12551 },
12552 "b": {
12553 ".git": {},
12554 "src": {
12555 "main.rs": "fn main() {}",
12556 },
12557 "script": {
12558 "run.sh": "#!/bin/bash"
12559 }
12560 }
12561 }),
12562 )
12563 .await;
12564
12565 let project = Project::test(
12566 fs.clone(),
12567 [
12568 path!("/root/a").as_ref(),
12569 path!("/root/b/script").as_ref(),
12570 path!("/root/b").as_ref(),
12571 ],
12572 cx,
12573 )
12574 .await;
12575 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12576 scan_complete.await;
12577
12578 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12579 assert_eq!(worktrees.len(), 3);
12580
12581 let worktree_id_by_abs_path = worktrees
12582 .into_iter()
12583 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12584 .collect::<HashMap<_, _>>();
12585 let worktree_id = worktree_id_by_abs_path
12586 .get(Path::new(path!("/root/b/script")))
12587 .unwrap();
12588
12589 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12590 assert_eq!(repos.len(), 2);
12591
12592 project.update(cx, |project, cx| {
12593 project.remove_worktree(*worktree_id, cx);
12594 });
12595 cx.run_until_parked();
12596
12597 let mut repo_paths = project
12598 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12599 .values()
12600 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12601 .collect::<Vec<_>>();
12602 repo_paths.sort();
12603
12604 pretty_assertions::assert_eq!(
12605 repo_paths,
12606 [
12607 Path::new(path!("/root/a")).into(),
12608 Path::new(path!("/root/b")).into(),
12609 ]
12610 );
12611
12612 let active_repo_path = project
12613 .read_with(cx, |p, cx| {
12614 p.active_repository(cx)
12615 .map(|r| r.read(cx).work_directory_abs_path.clone())
12616 })
12617 .unwrap();
12618 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12619
12620 let worktree_id = worktree_id_by_abs_path
12621 .get(Path::new(path!("/root/a")))
12622 .unwrap();
12623 project.update(cx, |project, cx| {
12624 project.remove_worktree(*worktree_id, cx);
12625 });
12626 cx.run_until_parked();
12627
12628 let active_repo_path = project
12629 .read_with(cx, |p, cx| {
12630 p.active_repository(cx)
12631 .map(|r| r.read(cx).work_directory_abs_path.clone())
12632 })
12633 .unwrap();
12634 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12635
12636 let worktree_id = worktree_id_by_abs_path
12637 .get(Path::new(path!("/root/b")))
12638 .unwrap();
12639 project.update(cx, |project, cx| {
12640 project.remove_worktree(*worktree_id, cx);
12641 });
12642 cx.run_until_parked();
12643
12644 let active_repo_path = project.read_with(cx, |p, cx| {
12645 p.active_repository(cx)
12646 .map(|r| r.read(cx).work_directory_abs_path.clone())
12647 });
12648 assert!(active_repo_path.is_none());
12649}
12650
12651#[gpui::test]
12652async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12653 use DiffHunkSecondaryStatus::*;
12654 init_test(cx);
12655
12656 let committed_contents = r#"
12657 one
12658 two
12659 three
12660 "#
12661 .unindent();
12662 let file_contents = r#"
12663 one
12664 TWO
12665 three
12666 "#
12667 .unindent();
12668
12669 let fs = FakeFs::new(cx.background_executor.clone());
12670 fs.insert_tree(
12671 path!("/dir"),
12672 json!({
12673 ".git": {},
12674 "file.txt": file_contents.clone()
12675 }),
12676 )
12677 .await;
12678
12679 fs.set_head_and_index_for_repo(
12680 path!("/dir/.git").as_ref(),
12681 &[("file.txt", committed_contents.clone())],
12682 );
12683
12684 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12685
12686 let buffer = project
12687 .update(cx, |project, cx| {
12688 project.open_local_buffer(path!("/dir/file.txt"), cx)
12689 })
12690 .await
12691 .unwrap();
12692 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12693 let uncommitted_diff = project
12694 .update(cx, |project, cx| {
12695 project.open_uncommitted_diff(buffer.clone(), cx)
12696 })
12697 .await
12698 .unwrap();
12699
12700 // The hunk is initially unstaged.
12701 uncommitted_diff.read_with(cx, |diff, cx| {
12702 assert_hunks(
12703 diff.snapshot(cx).hunks(&snapshot),
12704 &snapshot,
12705 &diff.base_text_string(cx).unwrap(),
12706 &[(
12707 1..2,
12708 "two\n",
12709 "TWO\n",
12710 DiffHunkStatus::modified(HasSecondaryHunk),
12711 )],
12712 );
12713 });
12714
12715 // Get the repository handle.
12716 let repo = project.read_with(cx, |project, cx| {
12717 project.repositories(cx).values().next().unwrap().clone()
12718 });
12719
12720 // Stage the file.
12721 let stage_task = repo.update(cx, |repo, cx| {
12722 repo.stage_entries(vec![repo_path("file.txt")], cx)
12723 });
12724
12725 // Run a few ticks to let the job start and mark hunks as pending,
12726 // but don't run_until_parked which would complete the entire operation.
12727 for _ in 0..10 {
12728 cx.executor().tick();
12729 let [hunk]: [_; 1] = uncommitted_diff
12730 .read_with(cx, |diff, cx| {
12731 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12732 })
12733 .try_into()
12734 .unwrap();
12735 match hunk.secondary_status {
12736 HasSecondaryHunk => {}
12737 SecondaryHunkRemovalPending => break,
12738 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12739 _ => panic!("unexpected hunk state"),
12740 }
12741 }
12742 uncommitted_diff.read_with(cx, |diff, cx| {
12743 assert_hunks(
12744 diff.snapshot(cx).hunks(&snapshot),
12745 &snapshot,
12746 &diff.base_text_string(cx).unwrap(),
12747 &[(
12748 1..2,
12749 "two\n",
12750 "TWO\n",
12751 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12752 )],
12753 );
12754 });
12755
12756 // Let the staging complete.
12757 stage_task.await.unwrap();
12758 cx.run_until_parked();
12759
12760 // The hunk is now fully staged.
12761 uncommitted_diff.read_with(cx, |diff, cx| {
12762 assert_hunks(
12763 diff.snapshot(cx).hunks(&snapshot),
12764 &snapshot,
12765 &diff.base_text_string(cx).unwrap(),
12766 &[(
12767 1..2,
12768 "two\n",
12769 "TWO\n",
12770 DiffHunkStatus::modified(NoSecondaryHunk),
12771 )],
12772 );
12773 });
12774
12775 // Simulate a commit by updating HEAD to match the current file contents.
12776 // The FakeGitRepository's commit method is a no-op, so we need to manually
12777 // update HEAD to simulate the commit completing.
12778 fs.set_head_for_repo(
12779 path!("/dir/.git").as_ref(),
12780 &[("file.txt", file_contents.clone())],
12781 "newhead",
12782 );
12783 cx.run_until_parked();
12784
12785 // After committing, there are no more hunks.
12786 uncommitted_diff.read_with(cx, |diff, cx| {
12787 assert_hunks(
12788 diff.snapshot(cx).hunks(&snapshot),
12789 &snapshot,
12790 &diff.base_text_string(cx).unwrap(),
12791 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12792 );
12793 });
12794}
12795
12796#[gpui::test]
12797async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12798 init_test(cx);
12799
12800 // Configure read_only_files setting
12801 cx.update(|cx| {
12802 cx.update_global::<SettingsStore, _>(|store, cx| {
12803 store.update_user_settings(cx, |settings| {
12804 settings.project.worktree.read_only_files = Some(vec![
12805 "**/generated/**".to_string(),
12806 "**/*.gen.rs".to_string(),
12807 ]);
12808 });
12809 });
12810 });
12811
12812 let fs = FakeFs::new(cx.background_executor.clone());
12813 fs.insert_tree(
12814 path!("/root"),
12815 json!({
12816 "src": {
12817 "main.rs": "fn main() {}",
12818 "types.gen.rs": "// Generated file",
12819 },
12820 "generated": {
12821 "schema.rs": "// Auto-generated schema",
12822 }
12823 }),
12824 )
12825 .await;
12826
12827 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12828
12829 // Open a regular file - should be read-write
12830 let regular_buffer = project
12831 .update(cx, |project, cx| {
12832 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12833 })
12834 .await
12835 .unwrap();
12836
12837 regular_buffer.read_with(cx, |buffer, _| {
12838 assert!(!buffer.read_only(), "Regular file should not be read-only");
12839 });
12840
12841 // Open a file matching *.gen.rs pattern - should be read-only
12842 let gen_buffer = project
12843 .update(cx, |project, cx| {
12844 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12845 })
12846 .await
12847 .unwrap();
12848
12849 gen_buffer.read_with(cx, |buffer, _| {
12850 assert!(
12851 buffer.read_only(),
12852 "File matching *.gen.rs pattern should be read-only"
12853 );
12854 });
12855
12856 // Open a file in generated directory - should be read-only
12857 let generated_buffer = project
12858 .update(cx, |project, cx| {
12859 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12860 })
12861 .await
12862 .unwrap();
12863
12864 generated_buffer.read_with(cx, |buffer, _| {
12865 assert!(
12866 buffer.read_only(),
12867 "File in generated directory should be read-only"
12868 );
12869 });
12870}
12871
12872#[gpui::test]
12873async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12874 init_test(cx);
12875
12876 // Explicitly set read_only_files to empty (default behavior)
12877 cx.update(|cx| {
12878 cx.update_global::<SettingsStore, _>(|store, cx| {
12879 store.update_user_settings(cx, |settings| {
12880 settings.project.worktree.read_only_files = Some(vec![]);
12881 });
12882 });
12883 });
12884
12885 let fs = FakeFs::new(cx.background_executor.clone());
12886 fs.insert_tree(
12887 path!("/root"),
12888 json!({
12889 "src": {
12890 "main.rs": "fn main() {}",
12891 },
12892 "generated": {
12893 "schema.rs": "// Auto-generated schema",
12894 }
12895 }),
12896 )
12897 .await;
12898
12899 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12900
12901 // All files should be read-write when read_only_files is empty
12902 let main_buffer = project
12903 .update(cx, |project, cx| {
12904 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12905 })
12906 .await
12907 .unwrap();
12908
12909 main_buffer.read_with(cx, |buffer, _| {
12910 assert!(
12911 !buffer.read_only(),
12912 "Files should not be read-only when read_only_files is empty"
12913 );
12914 });
12915
12916 let generated_buffer = project
12917 .update(cx, |project, cx| {
12918 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12919 })
12920 .await
12921 .unwrap();
12922
12923 generated_buffer.read_with(cx, |buffer, _| {
12924 assert!(
12925 !buffer.read_only(),
12926 "Generated files should not be read-only when read_only_files is empty"
12927 );
12928 });
12929}
12930
12931#[gpui::test]
12932async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12933 init_test(cx);
12934
12935 // Configure to make lock files read-only
12936 cx.update(|cx| {
12937 cx.update_global::<SettingsStore, _>(|store, cx| {
12938 store.update_user_settings(cx, |settings| {
12939 settings.project.worktree.read_only_files = Some(vec![
12940 "**/*.lock".to_string(),
12941 "**/package-lock.json".to_string(),
12942 ]);
12943 });
12944 });
12945 });
12946
12947 let fs = FakeFs::new(cx.background_executor.clone());
12948 fs.insert_tree(
12949 path!("/root"),
12950 json!({
12951 "Cargo.lock": "# Lock file",
12952 "Cargo.toml": "[package]",
12953 "package-lock.json": "{}",
12954 "package.json": "{}",
12955 }),
12956 )
12957 .await;
12958
12959 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12960
12961 // Cargo.lock should be read-only
12962 let cargo_lock = project
12963 .update(cx, |project, cx| {
12964 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12965 })
12966 .await
12967 .unwrap();
12968
12969 cargo_lock.read_with(cx, |buffer, _| {
12970 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12971 });
12972
12973 // Cargo.toml should be read-write
12974 let cargo_toml = project
12975 .update(cx, |project, cx| {
12976 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12977 })
12978 .await
12979 .unwrap();
12980
12981 cargo_toml.read_with(cx, |buffer, _| {
12982 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12983 });
12984
12985 // package-lock.json should be read-only
12986 let package_lock = project
12987 .update(cx, |project, cx| {
12988 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12989 })
12990 .await
12991 .unwrap();
12992
12993 package_lock.read_with(cx, |buffer, _| {
12994 assert!(buffer.read_only(), "package-lock.json should be read-only");
12995 });
12996
12997 // package.json should be read-write
12998 let package_json = project
12999 .update(cx, |project, cx| {
13000 project.open_local_buffer(path!("/root/package.json"), cx)
13001 })
13002 .await
13003 .unwrap();
13004
13005 package_json.read_with(cx, |buffer, _| {
13006 assert!(!buffer.read_only(), "package.json should not be read-only");
13007 });
13008}
13009
13010mod disable_ai_settings_tests {
13011 use gpui::TestAppContext;
13012 use project::*;
13013 use settings::{Settings, SettingsStore};
13014
13015 #[gpui::test]
13016 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
13017 cx.update(|cx| {
13018 settings::init(cx);
13019
13020 // Test 1: Default is false (AI enabled)
13021 assert!(
13022 !DisableAiSettings::get_global(cx).disable_ai,
13023 "Default should allow AI"
13024 );
13025 });
13026
13027 let disable_true = serde_json::json!({
13028 "disable_ai": true
13029 })
13030 .to_string();
13031 let disable_false = serde_json::json!({
13032 "disable_ai": false
13033 })
13034 .to_string();
13035
13036 cx.update_global::<SettingsStore, _>(|store, cx| {
13037 store.set_user_settings(&disable_false, cx).unwrap();
13038 store.set_global_settings(&disable_true, cx).unwrap();
13039 });
13040 cx.update(|cx| {
13041 assert!(
13042 DisableAiSettings::get_global(cx).disable_ai,
13043 "Local false cannot override global true"
13044 );
13045 });
13046
13047 cx.update_global::<SettingsStore, _>(|store, cx| {
13048 store.set_global_settings(&disable_false, cx).unwrap();
13049 store.set_user_settings(&disable_true, cx).unwrap();
13050 });
13051
13052 cx.update(|cx| {
13053 assert!(
13054 DisableAiSettings::get_global(cx).disable_ai,
13055 "Local false cannot override global true"
13056 );
13057 });
13058 }
13059
13060 #[gpui::test]
13061 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
13062 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
13063 use worktree::WorktreeId;
13064
13065 cx.update(|cx| {
13066 settings::init(cx);
13067
13068 // Default should allow AI
13069 assert!(
13070 !DisableAiSettings::get_global(cx).disable_ai,
13071 "Default should allow AI"
13072 );
13073 });
13074
13075 let worktree_id = WorktreeId::from_usize(1);
13076 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
13077 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
13078 };
13079 let project_path = rel_path("project");
13080 let settings_location = SettingsLocation {
13081 worktree_id,
13082 path: project_path.as_ref(),
13083 };
13084
13085 // Test: Project-level disable_ai=true should disable AI for files in that project
13086 cx.update_global::<SettingsStore, _>(|store, cx| {
13087 store
13088 .set_local_settings(
13089 worktree_id,
13090 LocalSettingsPath::InWorktree(project_path.clone()),
13091 LocalSettingsKind::Settings,
13092 Some(r#"{ "disable_ai": true }"#),
13093 cx,
13094 )
13095 .unwrap();
13096 });
13097
13098 cx.update(|cx| {
13099 let settings = DisableAiSettings::get(Some(settings_location), cx);
13100 assert!(
13101 settings.disable_ai,
13102 "Project-level disable_ai=true should disable AI for files in that project"
13103 );
13104 // Global should now also be true since project-level disable_ai is merged into global
13105 assert!(
13106 DisableAiSettings::get_global(cx).disable_ai,
13107 "Global setting should be affected by project-level disable_ai=true"
13108 );
13109 });
13110
13111 // Test: Setting project-level to false should allow AI for that project
13112 cx.update_global::<SettingsStore, _>(|store, cx| {
13113 store
13114 .set_local_settings(
13115 worktree_id,
13116 LocalSettingsPath::InWorktree(project_path.clone()),
13117 LocalSettingsKind::Settings,
13118 Some(r#"{ "disable_ai": false }"#),
13119 cx,
13120 )
13121 .unwrap();
13122 });
13123
13124 cx.update(|cx| {
13125 let settings = DisableAiSettings::get(Some(settings_location), cx);
13126 assert!(
13127 !settings.disable_ai,
13128 "Project-level disable_ai=false should allow AI"
13129 );
13130 // Global should also be false now
13131 assert!(
13132 !DisableAiSettings::get_global(cx).disable_ai,
13133 "Global setting should be false when project-level is false"
13134 );
13135 });
13136
13137 // Test: User-level true + project-level false = AI disabled (saturation)
13138 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
13139 cx.update_global::<SettingsStore, _>(|store, cx| {
13140 store.set_user_settings(&disable_true, cx).unwrap();
13141 store
13142 .set_local_settings(
13143 worktree_id,
13144 LocalSettingsPath::InWorktree(project_path.clone()),
13145 LocalSettingsKind::Settings,
13146 Some(r#"{ "disable_ai": false }"#),
13147 cx,
13148 )
13149 .unwrap();
13150 });
13151
13152 cx.update(|cx| {
13153 let settings = DisableAiSettings::get(Some(settings_location), cx);
13154 assert!(
13155 settings.disable_ai,
13156 "Project-level false cannot override user-level true (SaturatingBool)"
13157 );
13158 });
13159 }
13160}