1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(
2206 lsp::RegistrationParams {
2207 registrations: vec![lsp::Registration {
2208 id: Default::default(),
2209 method: "workspace/didChangeWatchedFiles".to_string(),
2210 register_options: serde_json::to_value(
2211 lsp::DidChangeWatchedFilesRegistrationOptions {
2212 watchers: vec![
2213 lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.toml").to_string(),
2216 ),
2217 kind: None,
2218 },
2219 lsp::FileSystemWatcher {
2220 glob_pattern: lsp::GlobPattern::String(
2221 path!("/the-root/src/*.{rs,c}").to_string(),
2222 ),
2223 kind: None,
2224 },
2225 lsp::FileSystemWatcher {
2226 glob_pattern: lsp::GlobPattern::String(
2227 path!("/the-root/target/y/**/*.rs").to_string(),
2228 ),
2229 kind: None,
2230 },
2231 lsp::FileSystemWatcher {
2232 glob_pattern: lsp::GlobPattern::String(
2233 path!("/the/stdlib/src/**/*.rs").to_string(),
2234 ),
2235 kind: None,
2236 },
2237 lsp::FileSystemWatcher {
2238 glob_pattern: lsp::GlobPattern::String(
2239 path!("**/Cargo.lock").to_string(),
2240 ),
2241 kind: None,
2242 },
2243 ],
2244 },
2245 )
2246 .ok(),
2247 }],
2248 },
2249 DEFAULT_LSP_REQUEST_TIMEOUT,
2250 )
2251 .await
2252 .into_response()
2253 .unwrap();
2254 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2255 let file_changes = file_changes.clone();
2256 move |params, _| {
2257 let mut file_changes = file_changes.lock();
2258 file_changes.extend(params.changes);
2259 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2260 }
2261 });
2262
2263 cx.executor().run_until_parked();
2264 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2265 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2266
2267 let mut new_watched_paths = fs.watched_paths();
2268 new_watched_paths.retain(|path| {
2269 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2270 });
2271 assert_eq!(
2272 &new_watched_paths,
2273 &[
2274 Path::new(path!("/the-root")),
2275 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2276 Path::new(path!("/the/stdlib/src"))
2277 ]
2278 );
2279
2280 // Now the language server has asked us to watch an ignored directory path,
2281 // so we recursively load it.
2282 project.update(cx, |project, cx| {
2283 let worktree = project.visible_worktrees(cx).next().unwrap();
2284 assert_eq!(
2285 worktree
2286 .read(cx)
2287 .snapshot()
2288 .entries(true, 0)
2289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2290 .collect::<Vec<_>>(),
2291 &[
2292 ("", false),
2293 (".gitignore", false),
2294 ("Cargo.lock", false),
2295 ("src", false),
2296 ("src/a.rs", false),
2297 ("src/b.rs", false),
2298 ("target", true),
2299 ("target/x", true),
2300 ("target/y", true),
2301 ("target/y/out", true),
2302 ("target/y/out/y.rs", true),
2303 ("target/z", true),
2304 ]
2305 );
2306 });
2307
2308 // Perform some file system mutations, two of which match the watched patterns,
2309 // and one of which does not.
2310 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2317 .await
2318 .unwrap();
2319 fs.create_file(
2320 path!("/the-root/target/x/out/x2.rs").as_ref(),
2321 Default::default(),
2322 )
2323 .await
2324 .unwrap();
2325 fs.create_file(
2326 path!("/the-root/target/y/out/y2.rs").as_ref(),
2327 Default::default(),
2328 )
2329 .await
2330 .unwrap();
2331 fs.save(
2332 path!("/the-root/Cargo.lock").as_ref(),
2333 &"".into(),
2334 Default::default(),
2335 )
2336 .await
2337 .unwrap();
2338 fs.save(
2339 path!("/the-stdlib/LICENSE").as_ref(),
2340 &"".into(),
2341 Default::default(),
2342 )
2343 .await
2344 .unwrap();
2345 fs.save(
2346 path!("/the/stdlib/src/string.rs").as_ref(),
2347 &"".into(),
2348 Default::default(),
2349 )
2350 .await
2351 .unwrap();
2352
2353 // The language server receives events for the FS mutations that match its watch patterns.
2354 cx.executor().run_until_parked();
2355 assert_eq!(
2356 &*file_changes.lock(),
2357 &[
2358 lsp::FileEvent {
2359 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2360 typ: lsp::FileChangeType::CHANGED,
2361 },
2362 lsp::FileEvent {
2363 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2364 typ: lsp::FileChangeType::DELETED,
2365 },
2366 lsp::FileEvent {
2367 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2368 typ: lsp::FileChangeType::CREATED,
2369 },
2370 lsp::FileEvent {
2371 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2372 typ: lsp::FileChangeType::CREATED,
2373 },
2374 lsp::FileEvent {
2375 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2376 typ: lsp::FileChangeType::CHANGED,
2377 },
2378 ]
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let fs = FakeFs::new(cx.executor());
2387 fs.insert_tree(
2388 path!("/dir"),
2389 json!({
2390 "a.rs": "let a = 1;",
2391 "b.rs": "let b = 2;"
2392 }),
2393 )
2394 .await;
2395
2396 let project = Project::test(
2397 fs,
2398 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2399 cx,
2400 )
2401 .await;
2402 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2403
2404 let buffer_a = project
2405 .update(cx, |project, cx| {
2406 project.open_local_buffer(path!("/dir/a.rs"), cx)
2407 })
2408 .await
2409 .unwrap();
2410 let buffer_b = project
2411 .update(cx, |project, cx| {
2412 project.open_local_buffer(path!("/dir/b.rs"), cx)
2413 })
2414 .await
2415 .unwrap();
2416
2417 lsp_store.update(cx, |lsp_store, cx| {
2418 lsp_store
2419 .update_diagnostics(
2420 LanguageServerId(0),
2421 lsp::PublishDiagnosticsParams {
2422 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2423 version: None,
2424 diagnostics: vec![lsp::Diagnostic {
2425 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2426 severity: Some(lsp::DiagnosticSeverity::ERROR),
2427 message: "error 1".to_string(),
2428 ..Default::default()
2429 }],
2430 },
2431 None,
2432 DiagnosticSourceKind::Pushed,
2433 &[],
2434 cx,
2435 )
2436 .unwrap();
2437 lsp_store
2438 .update_diagnostics(
2439 LanguageServerId(0),
2440 lsp::PublishDiagnosticsParams {
2441 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2442 version: None,
2443 diagnostics: vec![lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2445 severity: Some(DiagnosticSeverity::WARNING),
2446 message: "error 2".to_string(),
2447 ..Default::default()
2448 }],
2449 },
2450 None,
2451 DiagnosticSourceKind::Pushed,
2452 &[],
2453 cx,
2454 )
2455 .unwrap();
2456 });
2457
2458 buffer_a.update(cx, |buffer, _| {
2459 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2460 assert_eq!(
2461 chunks
2462 .iter()
2463 .map(|(s, d)| (s.as_str(), *d))
2464 .collect::<Vec<_>>(),
2465 &[
2466 ("let ", None),
2467 ("a", Some(DiagnosticSeverity::ERROR)),
2468 (" = 1;", None),
2469 ]
2470 );
2471 });
2472 buffer_b.update(cx, |buffer, _| {
2473 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2474 assert_eq!(
2475 chunks
2476 .iter()
2477 .map(|(s, d)| (s.as_str(), *d))
2478 .collect::<Vec<_>>(),
2479 &[
2480 ("let ", None),
2481 ("b", Some(DiagnosticSeverity::WARNING)),
2482 (" = 2;", None),
2483 ]
2484 );
2485 });
2486}
2487
2488#[gpui::test]
2489async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2490 init_test(cx);
2491
2492 let fs = FakeFs::new(cx.executor());
2493 fs.insert_tree(
2494 path!("/root"),
2495 json!({
2496 "dir": {
2497 ".git": {
2498 "HEAD": "ref: refs/heads/main",
2499 },
2500 ".gitignore": "b.rs",
2501 "a.rs": "let a = 1;",
2502 "b.rs": "let b = 2;",
2503 },
2504 "other.rs": "let b = c;"
2505 }),
2506 )
2507 .await;
2508
2509 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2510 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2511 let (worktree, _) = project
2512 .update(cx, |project, cx| {
2513 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2514 })
2515 .await
2516 .unwrap();
2517 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2518
2519 let (worktree, _) = project
2520 .update(cx, |project, cx| {
2521 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2522 })
2523 .await
2524 .unwrap();
2525 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2526
2527 let server_id = LanguageServerId(0);
2528 lsp_store.update(cx, |lsp_store, cx| {
2529 lsp_store
2530 .update_diagnostics(
2531 server_id,
2532 lsp::PublishDiagnosticsParams {
2533 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2534 version: None,
2535 diagnostics: vec![lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2537 severity: Some(lsp::DiagnosticSeverity::ERROR),
2538 message: "unused variable 'b'".to_string(),
2539 ..Default::default()
2540 }],
2541 },
2542 None,
2543 DiagnosticSourceKind::Pushed,
2544 &[],
2545 cx,
2546 )
2547 .unwrap();
2548 lsp_store
2549 .update_diagnostics(
2550 server_id,
2551 lsp::PublishDiagnosticsParams {
2552 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2553 version: None,
2554 diagnostics: vec![lsp::Diagnostic {
2555 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2556 severity: Some(lsp::DiagnosticSeverity::ERROR),
2557 message: "unknown variable 'c'".to_string(),
2558 ..Default::default()
2559 }],
2560 },
2561 None,
2562 DiagnosticSourceKind::Pushed,
2563 &[],
2564 cx,
2565 )
2566 .unwrap();
2567 });
2568
2569 let main_ignored_buffer = project
2570 .update(cx, |project, cx| {
2571 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2572 })
2573 .await
2574 .unwrap();
2575 main_ignored_buffer.update(cx, |buffer, _| {
2576 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2577 assert_eq!(
2578 chunks
2579 .iter()
2580 .map(|(s, d)| (s.as_str(), *d))
2581 .collect::<Vec<_>>(),
2582 &[
2583 ("let ", None),
2584 ("b", Some(DiagnosticSeverity::ERROR)),
2585 (" = 2;", None),
2586 ],
2587 "Gigitnored buffers should still get in-buffer diagnostics",
2588 );
2589 });
2590 let other_buffer = project
2591 .update(cx, |project, cx| {
2592 project.open_buffer((other_worktree_id, rel_path("")), cx)
2593 })
2594 .await
2595 .unwrap();
2596 other_buffer.update(cx, |buffer, _| {
2597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2598 assert_eq!(
2599 chunks
2600 .iter()
2601 .map(|(s, d)| (s.as_str(), *d))
2602 .collect::<Vec<_>>(),
2603 &[
2604 ("let b = ", None),
2605 ("c", Some(DiagnosticSeverity::ERROR)),
2606 (";", None),
2607 ],
2608 "Buffers from hidden projects should still get in-buffer diagnostics"
2609 );
2610 });
2611
2612 project.update(cx, |project, cx| {
2613 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2614 assert_eq!(
2615 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2616 vec![(
2617 ProjectPath {
2618 worktree_id: main_worktree_id,
2619 path: rel_path("b.rs").into(),
2620 },
2621 server_id,
2622 DiagnosticSummary {
2623 error_count: 1,
2624 warning_count: 0,
2625 }
2626 )]
2627 );
2628 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2629 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2630 });
2631}
2632
2633#[gpui::test]
2634async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2635 init_test(cx);
2636
2637 let progress_token = "the-progress-token";
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 path!("/dir"),
2642 json!({
2643 "a.rs": "fn a() { A }",
2644 "b.rs": "const y: i32 = 1",
2645 }),
2646 )
2647 .await;
2648
2649 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651
2652 language_registry.add(rust_lang());
2653 let mut fake_servers = language_registry.register_fake_lsp(
2654 "Rust",
2655 FakeLspAdapter {
2656 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2657 disk_based_diagnostics_sources: vec!["disk".into()],
2658 ..Default::default()
2659 },
2660 );
2661
2662 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2663
2664 // Cause worktree to start the fake language server
2665 let _ = project
2666 .update(cx, |project, cx| {
2667 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2668 })
2669 .await
2670 .unwrap();
2671
2672 let mut events = cx.events(&project);
2673
2674 let fake_server = fake_servers.next().await.unwrap();
2675 assert_eq!(
2676 events.next().await.unwrap(),
2677 Event::LanguageServerAdded(
2678 LanguageServerId(0),
2679 fake_server.server.name(),
2680 Some(worktree_id)
2681 ),
2682 );
2683
2684 fake_server
2685 .start_progress(format!("{}/0", progress_token))
2686 .await;
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsStarted {
2690 language_server_id: LanguageServerId(0),
2691 }
2692 );
2693
2694 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2695 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2696 version: None,
2697 diagnostics: vec![lsp::Diagnostic {
2698 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2699 severity: Some(lsp::DiagnosticSeverity::ERROR),
2700 message: "undefined variable 'A'".to_string(),
2701 ..Default::default()
2702 }],
2703 });
2704 assert_eq!(
2705 events.next().await.unwrap(),
2706 Event::DiagnosticsUpdated {
2707 language_server_id: LanguageServerId(0),
2708 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2709 }
2710 );
2711
2712 fake_server.end_progress(format!("{}/0", progress_token));
2713 assert_eq!(
2714 events.next().await.unwrap(),
2715 Event::DiskBasedDiagnosticsFinished {
2716 language_server_id: LanguageServerId(0)
2717 }
2718 );
2719
2720 let buffer = project
2721 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2722 .await
2723 .unwrap();
2724
2725 buffer.update(cx, |buffer, _| {
2726 let snapshot = buffer.snapshot();
2727 let diagnostics = snapshot
2728 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2729 .collect::<Vec<_>>();
2730 assert_eq!(
2731 diagnostics,
2732 &[DiagnosticEntryRef {
2733 range: Point::new(0, 9)..Point::new(0, 10),
2734 diagnostic: &Diagnostic {
2735 severity: lsp::DiagnosticSeverity::ERROR,
2736 message: "undefined variable 'A'".to_string(),
2737 group_id: 0,
2738 is_primary: true,
2739 source_kind: DiagnosticSourceKind::Pushed,
2740 ..Diagnostic::default()
2741 }
2742 }]
2743 )
2744 });
2745
2746 // Ensure publishing empty diagnostics twice only results in one update event.
2747 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2748 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2749 version: None,
2750 diagnostics: Default::default(),
2751 });
2752 assert_eq!(
2753 events.next().await.unwrap(),
2754 Event::DiagnosticsUpdated {
2755 language_server_id: LanguageServerId(0),
2756 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2757 }
2758 );
2759
2760 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2761 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2762 version: None,
2763 diagnostics: Default::default(),
2764 });
2765 cx.executor().run_until_parked();
2766 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2767}
2768
2769#[gpui::test]
2770async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2771 init_test(cx);
2772
2773 let progress_token = "the-progress-token";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2777
2778 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2779
2780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2781 language_registry.add(rust_lang());
2782 let mut fake_servers = language_registry.register_fake_lsp(
2783 "Rust",
2784 FakeLspAdapter {
2785 name: "the-language-server",
2786 disk_based_diagnostics_sources: vec!["disk".into()],
2787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2788 ..FakeLspAdapter::default()
2789 },
2790 );
2791
2792 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2793
2794 let (buffer, _handle) = project
2795 .update(cx, |project, cx| {
2796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2797 })
2798 .await
2799 .unwrap();
2800 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2801 // Simulate diagnostics starting to update.
2802 let fake_server = fake_servers.next().await.unwrap();
2803 cx.executor().run_until_parked();
2804 fake_server.start_progress(progress_token).await;
2805
2806 // Restart the server before the diagnostics finish updating.
2807 project.update(cx, |project, cx| {
2808 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2809 });
2810 let mut events = cx.events(&project);
2811
2812 // Simulate the newly started server sending more diagnostics.
2813 let fake_server = fake_servers.next().await.unwrap();
2814 cx.executor().run_until_parked();
2815 assert_eq!(
2816 events.next().await.unwrap(),
2817 Event::LanguageServerRemoved(LanguageServerId(0))
2818 );
2819 assert_eq!(
2820 events.next().await.unwrap(),
2821 Event::LanguageServerAdded(
2822 LanguageServerId(1),
2823 fake_server.server.name(),
2824 Some(worktree_id)
2825 )
2826 );
2827 fake_server.start_progress(progress_token).await;
2828 assert_eq!(
2829 events.next().await.unwrap(),
2830 Event::LanguageServerBufferRegistered {
2831 server_id: LanguageServerId(1),
2832 buffer_id,
2833 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2834 name: Some(fake_server.server.name())
2835 }
2836 );
2837 assert_eq!(
2838 events.next().await.unwrap(),
2839 Event::DiskBasedDiagnosticsStarted {
2840 language_server_id: LanguageServerId(1)
2841 }
2842 );
2843 project.update(cx, |project, cx| {
2844 assert_eq!(
2845 project
2846 .language_servers_running_disk_based_diagnostics(cx)
2847 .collect::<Vec<_>>(),
2848 [LanguageServerId(1)]
2849 );
2850 });
2851
2852 // All diagnostics are considered done, despite the old server's diagnostic
2853 // task never completing.
2854 fake_server.end_progress(progress_token);
2855 assert_eq!(
2856 events.next().await.unwrap(),
2857 Event::DiskBasedDiagnosticsFinished {
2858 language_server_id: LanguageServerId(1)
2859 }
2860 );
2861 project.update(cx, |project, cx| {
2862 assert_eq!(
2863 project
2864 .language_servers_running_disk_based_diagnostics(cx)
2865 .collect::<Vec<_>>(),
2866 [] as [language::LanguageServerId; 0]
2867 );
2868 });
2869}
2870
2871#[gpui::test]
2872async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let fs = FakeFs::new(cx.executor());
2876 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(rust_lang());
2882 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2883
2884 let (buffer, _) = project
2885 .update(cx, |project, cx| {
2886 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2887 })
2888 .await
2889 .unwrap();
2890
2891 // Publish diagnostics
2892 let fake_server = fake_servers.next().await.unwrap();
2893 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2894 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2895 version: None,
2896 diagnostics: vec![lsp::Diagnostic {
2897 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2898 severity: Some(lsp::DiagnosticSeverity::ERROR),
2899 message: "the message".to_string(),
2900 ..Default::default()
2901 }],
2902 });
2903
2904 cx.executor().run_until_parked();
2905 buffer.update(cx, |buffer, _| {
2906 assert_eq!(
2907 buffer
2908 .snapshot()
2909 .diagnostics_in_range::<_, usize>(0..1, false)
2910 .map(|entry| entry.diagnostic.message.clone())
2911 .collect::<Vec<_>>(),
2912 ["the message".to_string()]
2913 );
2914 });
2915 project.update(cx, |project, cx| {
2916 assert_eq!(
2917 project.diagnostic_summary(false, cx),
2918 DiagnosticSummary {
2919 error_count: 1,
2920 warning_count: 0,
2921 }
2922 );
2923 });
2924
2925 project.update(cx, |project, cx| {
2926 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2927 });
2928
2929 // The diagnostics are cleared.
2930 cx.executor().run_until_parked();
2931 buffer.update(cx, |buffer, _| {
2932 assert_eq!(
2933 buffer
2934 .snapshot()
2935 .diagnostics_in_range::<_, usize>(0..1, false)
2936 .map(|entry| entry.diagnostic.message.clone())
2937 .collect::<Vec<_>>(),
2938 Vec::<String>::new(),
2939 );
2940 });
2941 project.update(cx, |project, cx| {
2942 assert_eq!(
2943 project.diagnostic_summary(false, cx),
2944 DiagnosticSummary {
2945 error_count: 0,
2946 warning_count: 0,
2947 }
2948 );
2949 });
2950}
2951
2952#[gpui::test]
2953async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2958
2959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2960 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2961
2962 language_registry.add(rust_lang());
2963 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2964
2965 let (buffer, _handle) = project
2966 .update(cx, |project, cx| {
2967 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2968 })
2969 .await
2970 .unwrap();
2971
2972 // Before restarting the server, report diagnostics with an unknown buffer version.
2973 let fake_server = fake_servers.next().await.unwrap();
2974 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2975 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2976 version: Some(10000),
2977 diagnostics: Vec::new(),
2978 });
2979 cx.executor().run_until_parked();
2980 project.update(cx, |project, cx| {
2981 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2982 });
2983
2984 let mut fake_server = fake_servers.next().await.unwrap();
2985 let notification = fake_server
2986 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2987 .await
2988 .text_document;
2989 assert_eq!(notification.version, 0);
2990}
2991
2992#[gpui::test]
2993async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let progress_token = "the-progress-token";
2997
2998 let fs = FakeFs::new(cx.executor());
2999 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3000
3001 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3002
3003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3004 language_registry.add(rust_lang());
3005 let mut fake_servers = language_registry.register_fake_lsp(
3006 "Rust",
3007 FakeLspAdapter {
3008 name: "the-language-server",
3009 disk_based_diagnostics_sources: vec!["disk".into()],
3010 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3011 ..Default::default()
3012 },
3013 );
3014
3015 let (buffer, _handle) = project
3016 .update(cx, |project, cx| {
3017 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3018 })
3019 .await
3020 .unwrap();
3021
3022 // Simulate diagnostics starting to update.
3023 let mut fake_server = fake_servers.next().await.unwrap();
3024 fake_server
3025 .start_progress_with(
3026 "another-token",
3027 lsp::WorkDoneProgressBegin {
3028 cancellable: Some(false),
3029 ..Default::default()
3030 },
3031 DEFAULT_LSP_REQUEST_TIMEOUT,
3032 )
3033 .await;
3034 // Ensure progress notification is fully processed before starting the next one
3035 cx.executor().run_until_parked();
3036
3037 fake_server
3038 .start_progress_with(
3039 progress_token,
3040 lsp::WorkDoneProgressBegin {
3041 cancellable: Some(true),
3042 ..Default::default()
3043 },
3044 DEFAULT_LSP_REQUEST_TIMEOUT,
3045 )
3046 .await;
3047 // Ensure progress notification is fully processed before cancelling
3048 cx.executor().run_until_parked();
3049
3050 project.update(cx, |project, cx| {
3051 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3052 });
3053 cx.executor().run_until_parked();
3054
3055 let cancel_notification = fake_server
3056 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3057 .await;
3058 assert_eq!(
3059 cancel_notification.token,
3060 NumberOrString::String(progress_token.into())
3061 );
3062}
3063
3064#[gpui::test]
3065async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3066 init_test(cx);
3067
3068 let fs = FakeFs::new(cx.executor());
3069 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3070 .await;
3071
3072 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3073 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3074
3075 let mut fake_rust_servers = language_registry.register_fake_lsp(
3076 "Rust",
3077 FakeLspAdapter {
3078 name: "rust-lsp",
3079 ..Default::default()
3080 },
3081 );
3082 let mut fake_js_servers = language_registry.register_fake_lsp(
3083 "JavaScript",
3084 FakeLspAdapter {
3085 name: "js-lsp",
3086 ..Default::default()
3087 },
3088 );
3089 language_registry.add(rust_lang());
3090 language_registry.add(js_lang());
3091
3092 let _rs_buffer = project
3093 .update(cx, |project, cx| {
3094 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3095 })
3096 .await
3097 .unwrap();
3098 let _js_buffer = project
3099 .update(cx, |project, cx| {
3100 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3101 })
3102 .await
3103 .unwrap();
3104
3105 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3106 assert_eq!(
3107 fake_rust_server_1
3108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3109 .await
3110 .text_document
3111 .uri
3112 .as_str(),
3113 uri!("file:///dir/a.rs")
3114 );
3115
3116 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3117 assert_eq!(
3118 fake_js_server
3119 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3120 .await
3121 .text_document
3122 .uri
3123 .as_str(),
3124 uri!("file:///dir/b.js")
3125 );
3126
3127 // Disable Rust language server, ensuring only that server gets stopped.
3128 cx.update(|cx| {
3129 SettingsStore::update_global(cx, |settings, cx| {
3130 settings.update_user_settings(cx, |settings| {
3131 settings.languages_mut().insert(
3132 "Rust".into(),
3133 LanguageSettingsContent {
3134 enable_language_server: Some(false),
3135 ..Default::default()
3136 },
3137 );
3138 });
3139 })
3140 });
3141 fake_rust_server_1
3142 .receive_notification::<lsp::notification::Exit>()
3143 .await;
3144
3145 // Enable Rust and disable JavaScript language servers, ensuring that the
3146 // former gets started again and that the latter stops.
3147 cx.update(|cx| {
3148 SettingsStore::update_global(cx, |settings, cx| {
3149 settings.update_user_settings(cx, |settings| {
3150 settings.languages_mut().insert(
3151 "Rust".into(),
3152 LanguageSettingsContent {
3153 enable_language_server: Some(true),
3154 ..Default::default()
3155 },
3156 );
3157 settings.languages_mut().insert(
3158 "JavaScript".into(),
3159 LanguageSettingsContent {
3160 enable_language_server: Some(false),
3161 ..Default::default()
3162 },
3163 );
3164 });
3165 })
3166 });
3167 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3168 assert_eq!(
3169 fake_rust_server_2
3170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3171 .await
3172 .text_document
3173 .uri
3174 .as_str(),
3175 uri!("file:///dir/a.rs")
3176 );
3177 fake_js_server
3178 .receive_notification::<lsp::notification::Exit>()
3179 .await;
3180}
3181
3182#[gpui::test(iterations = 3)]
3183async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3184 init_test(cx);
3185
3186 let text = "
3187 fn a() { A }
3188 fn b() { BB }
3189 fn c() { CCC }
3190 "
3191 .unindent();
3192
3193 let fs = FakeFs::new(cx.executor());
3194 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3195
3196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3198
3199 language_registry.add(rust_lang());
3200 let mut fake_servers = language_registry.register_fake_lsp(
3201 "Rust",
3202 FakeLspAdapter {
3203 disk_based_diagnostics_sources: vec!["disk".into()],
3204 ..Default::default()
3205 },
3206 );
3207
3208 let buffer = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer(path!("/dir/a.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let _handle = project.update(cx, |project, cx| {
3216 project.register_buffer_with_language_servers(&buffer, cx)
3217 });
3218
3219 let mut fake_server = fake_servers.next().await.unwrap();
3220 let open_notification = fake_server
3221 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3222 .await;
3223
3224 // Edit the buffer, moving the content down
3225 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3226 let change_notification_1 = fake_server
3227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3228 .await;
3229 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3230
3231 // Report some diagnostics for the initial version of the buffer
3232 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3233 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 version: Some(open_notification.text_document.version),
3235 diagnostics: vec![
3236 lsp::Diagnostic {
3237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3238 severity: Some(DiagnosticSeverity::ERROR),
3239 message: "undefined variable 'A'".to_string(),
3240 source: Some("disk".to_string()),
3241 ..Default::default()
3242 },
3243 lsp::Diagnostic {
3244 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3245 severity: Some(DiagnosticSeverity::ERROR),
3246 message: "undefined variable 'BB'".to_string(),
3247 source: Some("disk".to_string()),
3248 ..Default::default()
3249 },
3250 lsp::Diagnostic {
3251 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3252 severity: Some(DiagnosticSeverity::ERROR),
3253 source: Some("disk".to_string()),
3254 message: "undefined variable 'CCC'".to_string(),
3255 ..Default::default()
3256 },
3257 ],
3258 });
3259
3260 // The diagnostics have moved down since they were created.
3261 cx.executor().run_until_parked();
3262 buffer.update(cx, |buffer, _| {
3263 assert_eq!(
3264 buffer
3265 .snapshot()
3266 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3267 .collect::<Vec<_>>(),
3268 &[
3269 DiagnosticEntry {
3270 range: Point::new(3, 9)..Point::new(3, 11),
3271 diagnostic: Diagnostic {
3272 source: Some("disk".into()),
3273 severity: DiagnosticSeverity::ERROR,
3274 message: "undefined variable 'BB'".to_string(),
3275 is_disk_based: true,
3276 group_id: 1,
3277 is_primary: true,
3278 source_kind: DiagnosticSourceKind::Pushed,
3279 ..Diagnostic::default()
3280 },
3281 },
3282 DiagnosticEntry {
3283 range: Point::new(4, 9)..Point::new(4, 12),
3284 diagnostic: Diagnostic {
3285 source: Some("disk".into()),
3286 severity: DiagnosticSeverity::ERROR,
3287 message: "undefined variable 'CCC'".to_string(),
3288 is_disk_based: true,
3289 group_id: 2,
3290 is_primary: true,
3291 source_kind: DiagnosticSourceKind::Pushed,
3292 ..Diagnostic::default()
3293 }
3294 }
3295 ]
3296 );
3297 assert_eq!(
3298 chunks_with_diagnostics(buffer, 0..buffer.len()),
3299 [
3300 ("\n\nfn a() { ".to_string(), None),
3301 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3302 (" }\nfn b() { ".to_string(), None),
3303 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3304 (" }\nfn c() { ".to_string(), None),
3305 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3306 (" }\n".to_string(), None),
3307 ]
3308 );
3309 assert_eq!(
3310 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3311 [
3312 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3313 (" }\nfn c() { ".to_string(), None),
3314 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3315 ]
3316 );
3317 });
3318
3319 // Ensure overlapping diagnostics are highlighted correctly.
3320 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3321 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3322 version: Some(open_notification.text_document.version),
3323 diagnostics: vec![
3324 lsp::Diagnostic {
3325 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3326 severity: Some(DiagnosticSeverity::ERROR),
3327 message: "undefined variable 'A'".to_string(),
3328 source: Some("disk".to_string()),
3329 ..Default::default()
3330 },
3331 lsp::Diagnostic {
3332 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3333 severity: Some(DiagnosticSeverity::WARNING),
3334 message: "unreachable statement".to_string(),
3335 source: Some("disk".to_string()),
3336 ..Default::default()
3337 },
3338 ],
3339 });
3340
3341 cx.executor().run_until_parked();
3342 buffer.update(cx, |buffer, _| {
3343 assert_eq!(
3344 buffer
3345 .snapshot()
3346 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3347 .collect::<Vec<_>>(),
3348 &[
3349 DiagnosticEntry {
3350 range: Point::new(2, 9)..Point::new(2, 12),
3351 diagnostic: Diagnostic {
3352 source: Some("disk".into()),
3353 severity: DiagnosticSeverity::WARNING,
3354 message: "unreachable statement".to_string(),
3355 is_disk_based: true,
3356 group_id: 4,
3357 is_primary: true,
3358 source_kind: DiagnosticSourceKind::Pushed,
3359 ..Diagnostic::default()
3360 }
3361 },
3362 DiagnosticEntry {
3363 range: Point::new(2, 9)..Point::new(2, 10),
3364 diagnostic: Diagnostic {
3365 source: Some("disk".into()),
3366 severity: DiagnosticSeverity::ERROR,
3367 message: "undefined variable 'A'".to_string(),
3368 is_disk_based: true,
3369 group_id: 3,
3370 is_primary: true,
3371 source_kind: DiagnosticSourceKind::Pushed,
3372 ..Diagnostic::default()
3373 },
3374 }
3375 ]
3376 );
3377 assert_eq!(
3378 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3379 [
3380 ("fn a() { ".to_string(), None),
3381 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3383 ("\n".to_string(), None),
3384 ]
3385 );
3386 assert_eq!(
3387 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3388 [
3389 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3390 ("\n".to_string(), None),
3391 ]
3392 );
3393 });
3394
3395 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3396 // changes since the last save.
3397 buffer.update(cx, |buffer, cx| {
3398 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3399 buffer.edit(
3400 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3401 None,
3402 cx,
3403 );
3404 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3405 });
3406 let change_notification_2 = fake_server
3407 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3408 .await;
3409 assert!(
3410 change_notification_2.text_document.version > change_notification_1.text_document.version
3411 );
3412
3413 // Handle out-of-order diagnostics
3414 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3415 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3416 version: Some(change_notification_2.text_document.version),
3417 diagnostics: vec![
3418 lsp::Diagnostic {
3419 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3420 severity: Some(DiagnosticSeverity::ERROR),
3421 message: "undefined variable 'BB'".to_string(),
3422 source: Some("disk".to_string()),
3423 ..Default::default()
3424 },
3425 lsp::Diagnostic {
3426 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3427 severity: Some(DiagnosticSeverity::WARNING),
3428 message: "undefined variable 'A'".to_string(),
3429 source: Some("disk".to_string()),
3430 ..Default::default()
3431 },
3432 ],
3433 });
3434
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(
3438 buffer
3439 .snapshot()
3440 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3441 .collect::<Vec<_>>(),
3442 &[
3443 DiagnosticEntry {
3444 range: Point::new(2, 21)..Point::new(2, 22),
3445 diagnostic: Diagnostic {
3446 source: Some("disk".into()),
3447 severity: DiagnosticSeverity::WARNING,
3448 message: "undefined variable 'A'".to_string(),
3449 is_disk_based: true,
3450 group_id: 6,
3451 is_primary: true,
3452 source_kind: DiagnosticSourceKind::Pushed,
3453 ..Diagnostic::default()
3454 }
3455 },
3456 DiagnosticEntry {
3457 range: Point::new(3, 9)..Point::new(3, 14),
3458 diagnostic: Diagnostic {
3459 source: Some("disk".into()),
3460 severity: DiagnosticSeverity::ERROR,
3461 message: "undefined variable 'BB'".to_string(),
3462 is_disk_based: true,
3463 group_id: 5,
3464 is_primary: true,
3465 source_kind: DiagnosticSourceKind::Pushed,
3466 ..Diagnostic::default()
3467 },
3468 }
3469 ]
3470 );
3471 });
3472}
3473
3474#[gpui::test]
3475async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3476 init_test(cx);
3477
3478 let text = concat!(
3479 "let one = ;\n", //
3480 "let two = \n",
3481 "let three = 3;\n",
3482 );
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3486
3487 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3488 let buffer = project
3489 .update(cx, |project, cx| {
3490 project.open_local_buffer(path!("/dir/a.rs"), cx)
3491 })
3492 .await
3493 .unwrap();
3494
3495 project.update(cx, |project, cx| {
3496 project.lsp_store().update(cx, |lsp_store, cx| {
3497 lsp_store
3498 .update_diagnostic_entries(
3499 LanguageServerId(0),
3500 PathBuf::from(path!("/dir/a.rs")),
3501 None,
3502 None,
3503 vec![
3504 DiagnosticEntry {
3505 range: Unclipped(PointUtf16::new(0, 10))
3506 ..Unclipped(PointUtf16::new(0, 10)),
3507 diagnostic: Diagnostic {
3508 severity: DiagnosticSeverity::ERROR,
3509 message: "syntax error 1".to_string(),
3510 source_kind: DiagnosticSourceKind::Pushed,
3511 ..Diagnostic::default()
3512 },
3513 },
3514 DiagnosticEntry {
3515 range: Unclipped(PointUtf16::new(1, 10))
3516 ..Unclipped(PointUtf16::new(1, 10)),
3517 diagnostic: Diagnostic {
3518 severity: DiagnosticSeverity::ERROR,
3519 message: "syntax error 2".to_string(),
3520 source_kind: DiagnosticSourceKind::Pushed,
3521 ..Diagnostic::default()
3522 },
3523 },
3524 ],
3525 cx,
3526 )
3527 .unwrap();
3528 })
3529 });
3530
3531 // An empty range is extended forward to include the following character.
3532 // At the end of a line, an empty range is extended backward to include
3533 // the preceding character.
3534 buffer.update(cx, |buffer, _| {
3535 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3536 assert_eq!(
3537 chunks
3538 .iter()
3539 .map(|(s, d)| (s.as_str(), *d))
3540 .collect::<Vec<_>>(),
3541 &[
3542 ("let one = ", None),
3543 (";", Some(DiagnosticSeverity::ERROR)),
3544 ("\nlet two =", None),
3545 (" ", Some(DiagnosticSeverity::ERROR)),
3546 ("\nlet three = 3;\n", None)
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let fs = FakeFs::new(cx.executor());
3557 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3558 .await;
3559
3560 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3561 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3562
3563 lsp_store.update(cx, |lsp_store, cx| {
3564 lsp_store
3565 .update_diagnostic_entries(
3566 LanguageServerId(0),
3567 Path::new(path!("/dir/a.rs")).to_owned(),
3568 None,
3569 None,
3570 vec![DiagnosticEntry {
3571 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3572 diagnostic: Diagnostic {
3573 severity: DiagnosticSeverity::ERROR,
3574 is_primary: true,
3575 message: "syntax error a1".to_string(),
3576 source_kind: DiagnosticSourceKind::Pushed,
3577 ..Diagnostic::default()
3578 },
3579 }],
3580 cx,
3581 )
3582 .unwrap();
3583 lsp_store
3584 .update_diagnostic_entries(
3585 LanguageServerId(1),
3586 Path::new(path!("/dir/a.rs")).to_owned(),
3587 None,
3588 None,
3589 vec![DiagnosticEntry {
3590 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::ERROR,
3593 is_primary: true,
3594 message: "syntax error b1".to_string(),
3595 source_kind: DiagnosticSourceKind::Pushed,
3596 ..Diagnostic::default()
3597 },
3598 }],
3599 cx,
3600 )
3601 .unwrap();
3602
3603 assert_eq!(
3604 lsp_store.diagnostic_summary(false, cx),
3605 DiagnosticSummary {
3606 error_count: 2,
3607 warning_count: 0,
3608 }
3609 );
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let text = "
3618 fn a() {
3619 f1();
3620 }
3621 fn b() {
3622 f2();
3623 }
3624 fn c() {
3625 f3();
3626 }
3627 "
3628 .unindent();
3629
3630 let fs = FakeFs::new(cx.executor());
3631 fs.insert_tree(
3632 path!("/dir"),
3633 json!({
3634 "a.rs": text.clone(),
3635 }),
3636 )
3637 .await;
3638
3639 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3643 language_registry.add(rust_lang());
3644 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3645
3646 let (buffer, _handle) = project
3647 .update(cx, |project, cx| {
3648 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3649 })
3650 .await
3651 .unwrap();
3652
3653 let mut fake_server = fake_servers.next().await.unwrap();
3654 let lsp_document_version = fake_server
3655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3656 .await
3657 .text_document
3658 .version;
3659
3660 // Simulate editing the buffer after the language server computes some edits.
3661 buffer.update(cx, |buffer, cx| {
3662 buffer.edit(
3663 [(
3664 Point::new(0, 0)..Point::new(0, 0),
3665 "// above first function\n",
3666 )],
3667 None,
3668 cx,
3669 );
3670 buffer.edit(
3671 [(
3672 Point::new(2, 0)..Point::new(2, 0),
3673 " // inside first function\n",
3674 )],
3675 None,
3676 cx,
3677 );
3678 buffer.edit(
3679 [(
3680 Point::new(6, 4)..Point::new(6, 4),
3681 "// inside second function ",
3682 )],
3683 None,
3684 cx,
3685 );
3686
3687 assert_eq!(
3688 buffer.text(),
3689 "
3690 // above first function
3691 fn a() {
3692 // inside first function
3693 f1();
3694 }
3695 fn b() {
3696 // inside second function f2();
3697 }
3698 fn c() {
3699 f3();
3700 }
3701 "
3702 .unindent()
3703 );
3704 });
3705
3706 let edits = lsp_store
3707 .update(cx, |lsp_store, cx| {
3708 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3709 &buffer,
3710 vec![
3711 // replace body of first function
3712 lsp::TextEdit {
3713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3714 new_text: "
3715 fn a() {
3716 f10();
3717 }
3718 "
3719 .unindent(),
3720 },
3721 // edit inside second function
3722 lsp::TextEdit {
3723 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3724 new_text: "00".into(),
3725 },
3726 // edit inside third function via two distinct edits
3727 lsp::TextEdit {
3728 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3729 new_text: "4000".into(),
3730 },
3731 lsp::TextEdit {
3732 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3733 new_text: "".into(),
3734 },
3735 ],
3736 LanguageServerId(0),
3737 Some(lsp_document_version),
3738 cx,
3739 )
3740 })
3741 .await
3742 .unwrap();
3743
3744 buffer.update(cx, |buffer, cx| {
3745 for (range, new_text) in edits {
3746 buffer.edit([(range, new_text)], None, cx);
3747 }
3748 assert_eq!(
3749 buffer.text(),
3750 "
3751 // above first function
3752 fn a() {
3753 // inside first function
3754 f10();
3755 }
3756 fn b() {
3757 // inside second function f200();
3758 }
3759 fn c() {
3760 f4000();
3761 }
3762 "
3763 .unindent()
3764 );
3765 });
3766}
3767
3768#[gpui::test]
3769async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3770 init_test(cx);
3771
3772 let text = "
3773 use a::b;
3774 use a::c;
3775
3776 fn f() {
3777 b();
3778 c();
3779 }
3780 "
3781 .unindent();
3782
3783 let fs = FakeFs::new(cx.executor());
3784 fs.insert_tree(
3785 path!("/dir"),
3786 json!({
3787 "a.rs": text.clone(),
3788 }),
3789 )
3790 .await;
3791
3792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3793 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3794 let buffer = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 // Simulate the language server sending us a small edit in the form of a very large diff.
3802 // Rust-analyzer does this when performing a merge-imports code action.
3803 let edits = lsp_store
3804 .update(cx, |lsp_store, cx| {
3805 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3806 &buffer,
3807 [
3808 // Replace the first use statement without editing the semicolon.
3809 lsp::TextEdit {
3810 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3811 new_text: "a::{b, c}".into(),
3812 },
3813 // Reinsert the remainder of the file between the semicolon and the final
3814 // newline of the file.
3815 lsp::TextEdit {
3816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3817 new_text: "\n\n".into(),
3818 },
3819 lsp::TextEdit {
3820 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3821 new_text: "
3822 fn f() {
3823 b();
3824 c();
3825 }"
3826 .unindent(),
3827 },
3828 // Delete everything after the first newline of the file.
3829 lsp::TextEdit {
3830 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3831 new_text: "".into(),
3832 },
3833 ],
3834 LanguageServerId(0),
3835 None,
3836 cx,
3837 )
3838 })
3839 .await
3840 .unwrap();
3841
3842 buffer.update(cx, |buffer, cx| {
3843 let edits = edits
3844 .into_iter()
3845 .map(|(range, text)| {
3846 (
3847 range.start.to_point(buffer)..range.end.to_point(buffer),
3848 text,
3849 )
3850 })
3851 .collect::<Vec<_>>();
3852
3853 assert_eq!(
3854 edits,
3855 [
3856 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3857 (Point::new(1, 0)..Point::new(2, 0), "".into())
3858 ]
3859 );
3860
3861 for (range, new_text) in edits {
3862 buffer.edit([(range, new_text)], None, cx);
3863 }
3864 assert_eq!(
3865 buffer.text(),
3866 "
3867 use a::{b, c};
3868
3869 fn f() {
3870 b();
3871 c();
3872 }
3873 "
3874 .unindent()
3875 );
3876 });
3877}
3878
3879#[gpui::test]
3880async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3881 cx: &mut gpui::TestAppContext,
3882) {
3883 init_test(cx);
3884
3885 let text = "Path()";
3886
3887 let fs = FakeFs::new(cx.executor());
3888 fs.insert_tree(
3889 path!("/dir"),
3890 json!({
3891 "a.rs": text
3892 }),
3893 )
3894 .await;
3895
3896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3897 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3898 let buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 // Simulate the language server sending us a pair of edits at the same location,
3906 // with an insertion following a replacement (which violates the LSP spec).
3907 let edits = lsp_store
3908 .update(cx, |lsp_store, cx| {
3909 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3910 &buffer,
3911 [
3912 lsp::TextEdit {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3914 new_text: "Path".into(),
3915 },
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3918 new_text: "from path import Path\n\n\n".into(),
3919 },
3920 ],
3921 LanguageServerId(0),
3922 None,
3923 cx,
3924 )
3925 })
3926 .await
3927 .unwrap();
3928
3929 buffer.update(cx, |buffer, cx| {
3930 buffer.edit(edits, None, cx);
3931 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3932 });
3933}
3934
3935#[gpui::test]
3936async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3937 init_test(cx);
3938
3939 let text = "
3940 use a::b;
3941 use a::c;
3942
3943 fn f() {
3944 b();
3945 c();
3946 }
3947 "
3948 .unindent();
3949
3950 let fs = FakeFs::new(cx.executor());
3951 fs.insert_tree(
3952 path!("/dir"),
3953 json!({
3954 "a.rs": text.clone(),
3955 }),
3956 )
3957 .await;
3958
3959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3960 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3961 let buffer = project
3962 .update(cx, |project, cx| {
3963 project.open_local_buffer(path!("/dir/a.rs"), cx)
3964 })
3965 .await
3966 .unwrap();
3967
3968 // Simulate the language server sending us edits in a non-ordered fashion,
3969 // with ranges sometimes being inverted or pointing to invalid locations.
3970 let edits = lsp_store
3971 .update(cx, |lsp_store, cx| {
3972 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3973 &buffer,
3974 [
3975 lsp::TextEdit {
3976 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3977 new_text: "\n\n".into(),
3978 },
3979 lsp::TextEdit {
3980 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3981 new_text: "a::{b, c}".into(),
3982 },
3983 lsp::TextEdit {
3984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3985 new_text: "".into(),
3986 },
3987 lsp::TextEdit {
3988 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3989 new_text: "
3990 fn f() {
3991 b();
3992 c();
3993 }"
3994 .unindent(),
3995 },
3996 ],
3997 LanguageServerId(0),
3998 None,
3999 cx,
4000 )
4001 })
4002 .await
4003 .unwrap();
4004
4005 buffer.update(cx, |buffer, cx| {
4006 let edits = edits
4007 .into_iter()
4008 .map(|(range, text)| {
4009 (
4010 range.start.to_point(buffer)..range.end.to_point(buffer),
4011 text,
4012 )
4013 })
4014 .collect::<Vec<_>>();
4015
4016 assert_eq!(
4017 edits,
4018 [
4019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4020 (Point::new(1, 0)..Point::new(2, 0), "".into())
4021 ]
4022 );
4023
4024 for (range, new_text) in edits {
4025 buffer.edit([(range, new_text)], None, cx);
4026 }
4027 assert_eq!(
4028 buffer.text(),
4029 "
4030 use a::{b, c};
4031
4032 fn f() {
4033 b();
4034 c();
4035 }
4036 "
4037 .unindent()
4038 );
4039 });
4040}
4041
4042fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4043 buffer: &Buffer,
4044 range: Range<T>,
4045) -> Vec<(String, Option<DiagnosticSeverity>)> {
4046 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4047 for chunk in buffer.snapshot().chunks(range, true) {
4048 if chunks
4049 .last()
4050 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4051 {
4052 chunks.last_mut().unwrap().0.push_str(chunk.text);
4053 } else {
4054 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4055 }
4056 }
4057 chunks
4058}
4059
4060#[gpui::test(iterations = 10)]
4061async fn test_definition(cx: &mut gpui::TestAppContext) {
4062 init_test(cx);
4063
4064 let fs = FakeFs::new(cx.executor());
4065 fs.insert_tree(
4066 path!("/dir"),
4067 json!({
4068 "a.rs": "const fn a() { A }",
4069 "b.rs": "const y: i32 = crate::a()",
4070 }),
4071 )
4072 .await;
4073
4074 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4075
4076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4077 language_registry.add(rust_lang());
4078 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4079
4080 let (buffer, _handle) = project
4081 .update(cx, |project, cx| {
4082 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4083 })
4084 .await
4085 .unwrap();
4086
4087 let fake_server = fake_servers.next().await.unwrap();
4088 cx.executor().run_until_parked();
4089
4090 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4091 let params = params.text_document_position_params;
4092 assert_eq!(
4093 params.text_document.uri.to_file_path().unwrap(),
4094 Path::new(path!("/dir/b.rs")),
4095 );
4096 assert_eq!(params.position, lsp::Position::new(0, 22));
4097
4098 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4099 lsp::Location::new(
4100 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4101 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4102 ),
4103 )))
4104 });
4105 let mut definitions = project
4106 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4107 .await
4108 .unwrap()
4109 .unwrap();
4110
4111 // Assert no new language server started
4112 cx.executor().run_until_parked();
4113 assert!(fake_servers.try_next().is_err());
4114
4115 assert_eq!(definitions.len(), 1);
4116 let definition = definitions.pop().unwrap();
4117 cx.update(|cx| {
4118 let target_buffer = definition.target.buffer.read(cx);
4119 assert_eq!(
4120 target_buffer
4121 .file()
4122 .unwrap()
4123 .as_local()
4124 .unwrap()
4125 .abs_path(cx),
4126 Path::new(path!("/dir/a.rs")),
4127 );
4128 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4129 assert_eq!(
4130 list_worktrees(&project, cx),
4131 [
4132 (path!("/dir/a.rs").as_ref(), false),
4133 (path!("/dir/b.rs").as_ref(), true)
4134 ],
4135 );
4136
4137 drop(definition);
4138 });
4139 cx.update(|cx| {
4140 assert_eq!(
4141 list_worktrees(&project, cx),
4142 [(path!("/dir/b.rs").as_ref(), true)]
4143 );
4144 });
4145
4146 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4147 project
4148 .read(cx)
4149 .worktrees(cx)
4150 .map(|worktree| {
4151 let worktree = worktree.read(cx);
4152 (
4153 worktree.as_local().unwrap().abs_path().as_ref(),
4154 worktree.is_visible(),
4155 )
4156 })
4157 .collect::<Vec<_>>()
4158 }
4159}
4160
4161#[gpui::test]
4162async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4163 init_test(cx);
4164
4165 let fs = FakeFs::new(cx.executor());
4166 fs.insert_tree(
4167 path!("/dir"),
4168 json!({
4169 "a.ts": "",
4170 }),
4171 )
4172 .await;
4173
4174 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4175
4176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4177 language_registry.add(typescript_lang());
4178 let mut fake_language_servers = language_registry.register_fake_lsp(
4179 "TypeScript",
4180 FakeLspAdapter {
4181 capabilities: lsp::ServerCapabilities {
4182 completion_provider: Some(lsp::CompletionOptions {
4183 trigger_characters: Some(vec![".".to_string()]),
4184 ..Default::default()
4185 }),
4186 ..Default::default()
4187 },
4188 ..Default::default()
4189 },
4190 );
4191
4192 let (buffer, _handle) = project
4193 .update(cx, |p, cx| {
4194 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4195 })
4196 .await
4197 .unwrap();
4198
4199 let fake_server = fake_language_servers.next().await.unwrap();
4200 cx.executor().run_until_parked();
4201
4202 // When text_edit exists, it takes precedence over insert_text and label
4203 let text = "let a = obj.fqn";
4204 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4205 let completions = project.update(cx, |project, cx| {
4206 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4207 });
4208
4209 fake_server
4210 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4211 Ok(Some(lsp::CompletionResponse::Array(vec![
4212 lsp::CompletionItem {
4213 label: "labelText".into(),
4214 insert_text: Some("insertText".into()),
4215 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4216 range: lsp::Range::new(
4217 lsp::Position::new(0, text.len() as u32 - 3),
4218 lsp::Position::new(0, text.len() as u32),
4219 ),
4220 new_text: "textEditText".into(),
4221 })),
4222 ..Default::default()
4223 },
4224 ])))
4225 })
4226 .next()
4227 .await;
4228
4229 let completions = completions
4230 .await
4231 .unwrap()
4232 .into_iter()
4233 .flat_map(|response| response.completions)
4234 .collect::<Vec<_>>();
4235 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4236
4237 assert_eq!(completions.len(), 1);
4238 assert_eq!(completions[0].new_text, "textEditText");
4239 assert_eq!(
4240 completions[0].replace_range.to_offset(&snapshot),
4241 text.len() - 3..text.len()
4242 );
4243}
4244
4245#[gpui::test]
4246async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4247 init_test(cx);
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 path!("/dir"),
4252 json!({
4253 "a.ts": "",
4254 }),
4255 )
4256 .await;
4257
4258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4259
4260 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4261 language_registry.add(typescript_lang());
4262 let mut fake_language_servers = language_registry.register_fake_lsp(
4263 "TypeScript",
4264 FakeLspAdapter {
4265 capabilities: lsp::ServerCapabilities {
4266 completion_provider: Some(lsp::CompletionOptions {
4267 trigger_characters: Some(vec![".".to_string()]),
4268 ..Default::default()
4269 }),
4270 ..Default::default()
4271 },
4272 ..Default::default()
4273 },
4274 );
4275
4276 let (buffer, _handle) = project
4277 .update(cx, |p, cx| {
4278 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4279 })
4280 .await
4281 .unwrap();
4282
4283 let fake_server = fake_language_servers.next().await.unwrap();
4284 cx.executor().run_until_parked();
4285 let text = "let a = obj.fqn";
4286
4287 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4288 {
4289 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4290 let completions = project.update(cx, |project, cx| {
4291 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4292 });
4293
4294 fake_server
4295 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4296 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4297 is_incomplete: false,
4298 item_defaults: Some(lsp::CompletionListItemDefaults {
4299 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4300 lsp::Range::new(
4301 lsp::Position::new(0, text.len() as u32 - 3),
4302 lsp::Position::new(0, text.len() as u32),
4303 ),
4304 )),
4305 ..Default::default()
4306 }),
4307 items: vec![lsp::CompletionItem {
4308 label: "labelText".into(),
4309 text_edit_text: Some("textEditText".into()),
4310 text_edit: None,
4311 ..Default::default()
4312 }],
4313 })))
4314 })
4315 .next()
4316 .await;
4317
4318 let completions = completions
4319 .await
4320 .unwrap()
4321 .into_iter()
4322 .flat_map(|response| response.completions)
4323 .collect::<Vec<_>>();
4324 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4325
4326 assert_eq!(completions.len(), 1);
4327 assert_eq!(completions[0].new_text, "textEditText");
4328 assert_eq!(
4329 completions[0].replace_range.to_offset(&snapshot),
4330 text.len() - 3..text.len()
4331 );
4332 }
4333
4334 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4335 {
4336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4337 let completions = project.update(cx, |project, cx| {
4338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4339 });
4340
4341 fake_server
4342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4343 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4344 is_incomplete: false,
4345 item_defaults: Some(lsp::CompletionListItemDefaults {
4346 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4347 lsp::Range::new(
4348 lsp::Position::new(0, text.len() as u32 - 3),
4349 lsp::Position::new(0, text.len() as u32),
4350 ),
4351 )),
4352 ..Default::default()
4353 }),
4354 items: vec![lsp::CompletionItem {
4355 label: "labelText".into(),
4356 text_edit_text: None,
4357 insert_text: Some("irrelevant".into()),
4358 text_edit: None,
4359 ..Default::default()
4360 }],
4361 })))
4362 })
4363 .next()
4364 .await;
4365
4366 let completions = completions
4367 .await
4368 .unwrap()
4369 .into_iter()
4370 .flat_map(|response| response.completions)
4371 .collect::<Vec<_>>();
4372 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4373
4374 assert_eq!(completions.len(), 1);
4375 assert_eq!(completions[0].new_text, "labelText");
4376 assert_eq!(
4377 completions[0].replace_range.to_offset(&snapshot),
4378 text.len() - 3..text.len()
4379 );
4380 }
4381}
4382
4383#[gpui::test]
4384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4385 init_test(cx);
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "a.ts": "",
4392 }),
4393 )
4394 .await;
4395
4396 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4397
4398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4399 language_registry.add(typescript_lang());
4400 let mut fake_language_servers = language_registry.register_fake_lsp(
4401 "TypeScript",
4402 FakeLspAdapter {
4403 capabilities: lsp::ServerCapabilities {
4404 completion_provider: Some(lsp::CompletionOptions {
4405 trigger_characters: Some(vec![":".to_string()]),
4406 ..Default::default()
4407 }),
4408 ..Default::default()
4409 },
4410 ..Default::default()
4411 },
4412 );
4413
4414 let (buffer, _handle) = project
4415 .update(cx, |p, cx| {
4416 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4417 })
4418 .await
4419 .unwrap();
4420
4421 let fake_server = fake_language_servers.next().await.unwrap();
4422 cx.executor().run_until_parked();
4423
4424 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4425 let text = "let a = b.fqn";
4426 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4427 let completions = project.update(cx, |project, cx| {
4428 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4429 });
4430
4431 fake_server
4432 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4433 Ok(Some(lsp::CompletionResponse::Array(vec![
4434 lsp::CompletionItem {
4435 label: "fullyQualifiedName?".into(),
4436 insert_text: Some("fullyQualifiedName".into()),
4437 ..Default::default()
4438 },
4439 ])))
4440 })
4441 .next()
4442 .await;
4443 let completions = completions
4444 .await
4445 .unwrap()
4446 .into_iter()
4447 .flat_map(|response| response.completions)
4448 .collect::<Vec<_>>();
4449 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4450 assert_eq!(completions.len(), 1);
4451 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4452 assert_eq!(
4453 completions[0].replace_range.to_offset(&snapshot),
4454 text.len() - 3..text.len()
4455 );
4456
4457 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4458 let text = "let a = \"atoms/cmp\"";
4459 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4460 let completions = project.update(cx, |project, cx| {
4461 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4462 });
4463
4464 fake_server
4465 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4466 Ok(Some(lsp::CompletionResponse::Array(vec![
4467 lsp::CompletionItem {
4468 label: "component".into(),
4469 ..Default::default()
4470 },
4471 ])))
4472 })
4473 .next()
4474 .await;
4475 let completions = completions
4476 .await
4477 .unwrap()
4478 .into_iter()
4479 .flat_map(|response| response.completions)
4480 .collect::<Vec<_>>();
4481 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4482 assert_eq!(completions.len(), 1);
4483 assert_eq!(completions[0].new_text, "component");
4484 assert_eq!(
4485 completions[0].replace_range.to_offset(&snapshot),
4486 text.len() - 4..text.len() - 1
4487 );
4488}
4489
4490#[gpui::test]
4491async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4492 init_test(cx);
4493
4494 let fs = FakeFs::new(cx.executor());
4495 fs.insert_tree(
4496 path!("/dir"),
4497 json!({
4498 "a.ts": "",
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(typescript_lang());
4507 let mut fake_language_servers = language_registry.register_fake_lsp(
4508 "TypeScript",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 completion_provider: Some(lsp::CompletionOptions {
4512 trigger_characters: Some(vec![":".to_string()]),
4513 ..Default::default()
4514 }),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |p, cx| {
4523 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_language_servers.next().await.unwrap();
4529 cx.executor().run_until_parked();
4530
4531 let text = "let a = b.fqn";
4532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4533 let completions = project.update(cx, |project, cx| {
4534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4535 });
4536
4537 fake_server
4538 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4539 Ok(Some(lsp::CompletionResponse::Array(vec![
4540 lsp::CompletionItem {
4541 label: "fullyQualifiedName?".into(),
4542 insert_text: Some("fully\rQualified\r\nName".into()),
4543 ..Default::default()
4544 },
4545 ])))
4546 })
4547 .next()
4548 .await;
4549 let completions = completions
4550 .await
4551 .unwrap()
4552 .into_iter()
4553 .flat_map(|response| response.completions)
4554 .collect::<Vec<_>>();
4555 assert_eq!(completions.len(), 1);
4556 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4557}
4558
4559#[gpui::test(iterations = 10)]
4560async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4561 init_test(cx);
4562
4563 let fs = FakeFs::new(cx.executor());
4564 fs.insert_tree(
4565 path!("/dir"),
4566 json!({
4567 "a.ts": "a",
4568 }),
4569 )
4570 .await;
4571
4572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4573
4574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4575 language_registry.add(typescript_lang());
4576 let mut fake_language_servers = language_registry.register_fake_lsp(
4577 "TypeScript",
4578 FakeLspAdapter {
4579 capabilities: lsp::ServerCapabilities {
4580 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4581 lsp::CodeActionOptions {
4582 resolve_provider: Some(true),
4583 ..lsp::CodeActionOptions::default()
4584 },
4585 )),
4586 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4587 commands: vec!["_the/command".to_string()],
4588 ..lsp::ExecuteCommandOptions::default()
4589 }),
4590 ..lsp::ServerCapabilities::default()
4591 },
4592 ..FakeLspAdapter::default()
4593 },
4594 );
4595
4596 let (buffer, _handle) = project
4597 .update(cx, |p, cx| {
4598 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4599 })
4600 .await
4601 .unwrap();
4602
4603 let fake_server = fake_language_servers.next().await.unwrap();
4604 cx.executor().run_until_parked();
4605
4606 // Language server returns code actions that contain commands, and not edits.
4607 let actions = project.update(cx, |project, cx| {
4608 project.code_actions(&buffer, 0..0, None, cx)
4609 });
4610 fake_server
4611 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4612 Ok(Some(vec![
4613 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4614 title: "The code action".into(),
4615 data: Some(serde_json::json!({
4616 "command": "_the/command",
4617 })),
4618 ..lsp::CodeAction::default()
4619 }),
4620 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4621 title: "two".into(),
4622 ..lsp::CodeAction::default()
4623 }),
4624 ]))
4625 })
4626 .next()
4627 .await;
4628
4629 let action = actions.await.unwrap().unwrap()[0].clone();
4630 let apply = project.update(cx, |project, cx| {
4631 project.apply_code_action(buffer.clone(), action, true, cx)
4632 });
4633
4634 // Resolving the code action does not populate its edits. In absence of
4635 // edits, we must execute the given command.
4636 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4637 |mut action, _| async move {
4638 if action.data.is_some() {
4639 action.command = Some(lsp::Command {
4640 title: "The command".into(),
4641 command: "_the/command".into(),
4642 arguments: Some(vec![json!("the-argument")]),
4643 });
4644 }
4645 Ok(action)
4646 },
4647 );
4648
4649 // While executing the command, the language server sends the editor
4650 // a `workspaceEdit` request.
4651 fake_server
4652 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4653 let fake = fake_server.clone();
4654 move |params, _| {
4655 assert_eq!(params.command, "_the/command");
4656 let fake = fake.clone();
4657 async move {
4658 fake.server
4659 .request::<lsp::request::ApplyWorkspaceEdit>(
4660 lsp::ApplyWorkspaceEditParams {
4661 label: None,
4662 edit: lsp::WorkspaceEdit {
4663 changes: Some(
4664 [(
4665 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4666 vec![lsp::TextEdit {
4667 range: lsp::Range::new(
4668 lsp::Position::new(0, 0),
4669 lsp::Position::new(0, 0),
4670 ),
4671 new_text: "X".into(),
4672 }],
4673 )]
4674 .into_iter()
4675 .collect(),
4676 ),
4677 ..Default::default()
4678 },
4679 },
4680 DEFAULT_LSP_REQUEST_TIMEOUT,
4681 )
4682 .await
4683 .into_response()
4684 .unwrap();
4685 Ok(Some(json!(null)))
4686 }
4687 }
4688 })
4689 .next()
4690 .await;
4691
4692 // Applying the code action returns a project transaction containing the edits
4693 // sent by the language server in its `workspaceEdit` request.
4694 let transaction = apply.await.unwrap();
4695 assert!(transaction.0.contains_key(&buffer));
4696 buffer.update(cx, |buffer, cx| {
4697 assert_eq!(buffer.text(), "Xa");
4698 buffer.undo(cx);
4699 assert_eq!(buffer.text(), "a");
4700 });
4701}
4702
4703#[gpui::test]
4704async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706 let fs = FakeFs::new(cx.background_executor.clone());
4707 let expected_contents = "content";
4708 fs.as_fake()
4709 .insert_tree(
4710 "/root",
4711 json!({
4712 "test.txt": expected_contents
4713 }),
4714 )
4715 .await;
4716
4717 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4718
4719 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4720 let worktree = project.worktrees(cx).next().unwrap();
4721 let entry_id = worktree
4722 .read(cx)
4723 .entry_for_path(rel_path("test.txt"))
4724 .unwrap()
4725 .id;
4726 (worktree, entry_id)
4727 });
4728 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4729 let _result = project
4730 .update(cx, |project, cx| {
4731 project.rename_entry(
4732 entry_id,
4733 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4734 cx,
4735 )
4736 })
4737 .await
4738 .unwrap();
4739 worktree.read_with(cx, |worktree, _| {
4740 assert!(
4741 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4742 "Old file should have been removed"
4743 );
4744 assert!(
4745 worktree
4746 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4747 .is_some(),
4748 "Whole directory hierarchy and the new file should have been created"
4749 );
4750 });
4751 assert_eq!(
4752 worktree
4753 .update(cx, |worktree, cx| {
4754 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4755 })
4756 .await
4757 .unwrap()
4758 .text,
4759 expected_contents,
4760 "Moved file's contents should be preserved"
4761 );
4762
4763 let entry_id = worktree.read_with(cx, |worktree, _| {
4764 worktree
4765 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4766 .unwrap()
4767 .id
4768 });
4769
4770 let _result = project
4771 .update(cx, |project, cx| {
4772 project.rename_entry(
4773 entry_id,
4774 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4775 cx,
4776 )
4777 })
4778 .await
4779 .unwrap();
4780 worktree.read_with(cx, |worktree, _| {
4781 assert!(
4782 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4783 "First file should not reappear"
4784 );
4785 assert!(
4786 worktree
4787 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4788 .is_none(),
4789 "Old file should have been removed"
4790 );
4791 assert!(
4792 worktree
4793 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4794 .is_some(),
4795 "No error should have occurred after moving into existing directory"
4796 );
4797 });
4798 assert_eq!(
4799 worktree
4800 .update(cx, |worktree, cx| {
4801 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4802 })
4803 .await
4804 .unwrap()
4805 .text,
4806 expected_contents,
4807 "Moved file's contents should be preserved"
4808 );
4809}
4810
4811#[gpui::test(iterations = 10)]
4812async fn test_save_file(cx: &mut gpui::TestAppContext) {
4813 init_test(cx);
4814
4815 let fs = FakeFs::new(cx.executor());
4816 fs.insert_tree(
4817 path!("/dir"),
4818 json!({
4819 "file1": "the old contents",
4820 }),
4821 )
4822 .await;
4823
4824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4825 let buffer = project
4826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4827 .await
4828 .unwrap();
4829 buffer.update(cx, |buffer, cx| {
4830 assert_eq!(buffer.text(), "the old contents");
4831 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4832 });
4833
4834 project
4835 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4836 .await
4837 .unwrap();
4838
4839 let new_text = fs
4840 .load(Path::new(path!("/dir/file1")))
4841 .await
4842 .unwrap()
4843 .replace("\r\n", "\n");
4844 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4845}
4846
4847#[gpui::test(iterations = 10)]
4848async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4849 // Issue: #24349
4850 init_test(cx);
4851
4852 let fs = FakeFs::new(cx.executor());
4853 fs.insert_tree(path!("/dir"), json!({})).await;
4854
4855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4856 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4857
4858 language_registry.add(rust_lang());
4859 let mut fake_rust_servers = language_registry.register_fake_lsp(
4860 "Rust",
4861 FakeLspAdapter {
4862 name: "the-rust-language-server",
4863 capabilities: lsp::ServerCapabilities {
4864 completion_provider: Some(lsp::CompletionOptions {
4865 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4866 ..Default::default()
4867 }),
4868 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4869 lsp::TextDocumentSyncOptions {
4870 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4871 ..Default::default()
4872 },
4873 )),
4874 ..Default::default()
4875 },
4876 ..Default::default()
4877 },
4878 );
4879
4880 let buffer = project
4881 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4882 .unwrap()
4883 .await;
4884 project.update(cx, |this, cx| {
4885 this.register_buffer_with_language_servers(&buffer, cx);
4886 buffer.update(cx, |buffer, cx| {
4887 assert!(!this.has_language_servers_for(buffer, cx));
4888 })
4889 });
4890
4891 project
4892 .update(cx, |this, cx| {
4893 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4894 this.save_buffer_as(
4895 buffer.clone(),
4896 ProjectPath {
4897 worktree_id,
4898 path: rel_path("file.rs").into(),
4899 },
4900 cx,
4901 )
4902 })
4903 .await
4904 .unwrap();
4905 // A server is started up, and it is notified about Rust files.
4906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4907 assert_eq!(
4908 fake_rust_server
4909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4910 .await
4911 .text_document,
4912 lsp::TextDocumentItem {
4913 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4914 version: 0,
4915 text: "".to_string(),
4916 language_id: "rust".to_string(),
4917 }
4918 );
4919
4920 project.update(cx, |this, cx| {
4921 buffer.update(cx, |buffer, cx| {
4922 assert!(this.has_language_servers_for(buffer, cx));
4923 })
4924 });
4925}
4926
4927#[gpui::test(iterations = 30)]
4928async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4929 init_test(cx);
4930
4931 let fs = FakeFs::new(cx.executor());
4932 fs.insert_tree(
4933 path!("/dir"),
4934 json!({
4935 "file1": "the original contents",
4936 }),
4937 )
4938 .await;
4939
4940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4941 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4942 let buffer = project
4943 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4944 .await
4945 .unwrap();
4946
4947 // Change the buffer's file on disk, and then wait for the file change
4948 // to be detected by the worktree, so that the buffer starts reloading.
4949 fs.save(
4950 path!("/dir/file1").as_ref(),
4951 &"the first contents".into(),
4952 Default::default(),
4953 )
4954 .await
4955 .unwrap();
4956 worktree.next_event(cx).await;
4957
4958 // Change the buffer's file again. Depending on the random seed, the
4959 // previous file change may still be in progress.
4960 fs.save(
4961 path!("/dir/file1").as_ref(),
4962 &"the second contents".into(),
4963 Default::default(),
4964 )
4965 .await
4966 .unwrap();
4967 worktree.next_event(cx).await;
4968
4969 cx.executor().run_until_parked();
4970 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4971 buffer.read_with(cx, |buffer, _| {
4972 assert_eq!(buffer.text(), on_disk_text);
4973 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4974 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4975 });
4976}
4977
4978#[gpui::test(iterations = 30)]
4979async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4980 init_test(cx);
4981
4982 let fs = FakeFs::new(cx.executor());
4983 fs.insert_tree(
4984 path!("/dir"),
4985 json!({
4986 "file1": "the original contents",
4987 }),
4988 )
4989 .await;
4990
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4993 let buffer = project
4994 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4995 .await
4996 .unwrap();
4997
4998 // Change the buffer's file on disk, and then wait for the file change
4999 // to be detected by the worktree, so that the buffer starts reloading.
5000 fs.save(
5001 path!("/dir/file1").as_ref(),
5002 &"the first contents".into(),
5003 Default::default(),
5004 )
5005 .await
5006 .unwrap();
5007 worktree.next_event(cx).await;
5008
5009 cx.executor()
5010 .spawn(cx.executor().simulate_random_delay())
5011 .await;
5012
5013 // Perform a noop edit, causing the buffer's version to increase.
5014 buffer.update(cx, |buffer, cx| {
5015 buffer.edit([(0..0, " ")], None, cx);
5016 buffer.undo(cx);
5017 });
5018
5019 cx.executor().run_until_parked();
5020 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5021 buffer.read_with(cx, |buffer, _| {
5022 let buffer_text = buffer.text();
5023 if buffer_text == on_disk_text {
5024 assert!(
5025 !buffer.is_dirty() && !buffer.has_conflict(),
5026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5027 );
5028 }
5029 // If the file change occurred while the buffer was processing the first
5030 // change, the buffer will be in a conflicting state.
5031 else {
5032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5034 }
5035 });
5036}
5037
5038#[gpui::test]
5039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5040 init_test(cx);
5041
5042 let fs = FakeFs::new(cx.executor());
5043 fs.insert_tree(
5044 path!("/dir"),
5045 json!({
5046 "file1": "the old contents",
5047 }),
5048 )
5049 .await;
5050
5051 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5052 let buffer = project
5053 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5054 .await
5055 .unwrap();
5056 buffer.update(cx, |buffer, cx| {
5057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5058 });
5059
5060 project
5061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5062 .await
5063 .unwrap();
5064
5065 let new_text = fs
5066 .load(Path::new(path!("/dir/file1")))
5067 .await
5068 .unwrap()
5069 .replace("\r\n", "\n");
5070 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5071}
5072
5073#[gpui::test]
5074async fn test_save_as(cx: &mut gpui::TestAppContext) {
5075 init_test(cx);
5076
5077 let fs = FakeFs::new(cx.executor());
5078 fs.insert_tree("/dir", json!({})).await;
5079
5080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5081
5082 let languages = project.update(cx, |project, _| project.languages().clone());
5083 languages.add(rust_lang());
5084
5085 let buffer = project.update(cx, |project, cx| {
5086 project.create_local_buffer("", None, false, cx)
5087 });
5088 buffer.update(cx, |buffer, cx| {
5089 buffer.edit([(0..0, "abc")], None, cx);
5090 assert!(buffer.is_dirty());
5091 assert!(!buffer.has_conflict());
5092 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5093 });
5094 project
5095 .update(cx, |project, cx| {
5096 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5097 let path = ProjectPath {
5098 worktree_id,
5099 path: rel_path("file1.rs").into(),
5100 };
5101 project.save_buffer_as(buffer.clone(), path, cx)
5102 })
5103 .await
5104 .unwrap();
5105 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5106
5107 cx.executor().run_until_parked();
5108 buffer.update(cx, |buffer, cx| {
5109 assert_eq!(
5110 buffer.file().unwrap().full_path(cx),
5111 Path::new("dir/file1.rs")
5112 );
5113 assert!(!buffer.is_dirty());
5114 assert!(!buffer.has_conflict());
5115 assert_eq!(buffer.language().unwrap().name(), "Rust");
5116 });
5117
5118 let opened_buffer = project
5119 .update(cx, |project, cx| {
5120 project.open_local_buffer("/dir/file1.rs", cx)
5121 })
5122 .await
5123 .unwrap();
5124 assert_eq!(opened_buffer, buffer);
5125}
5126
5127#[gpui::test]
5128async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5129 init_test(cx);
5130
5131 let fs = FakeFs::new(cx.executor());
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "data_a.txt": "data about a"
5138 }),
5139 )
5140 .await;
5141
5142 let buffer = project
5143 .update(cx, |project, cx| {
5144 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5145 })
5146 .await
5147 .unwrap();
5148
5149 buffer.update(cx, |buffer, cx| {
5150 buffer.edit([(11..12, "b")], None, cx);
5151 });
5152
5153 // Save buffer's contents as a new file and confirm that the buffer's now
5154 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5155 // file associated with the buffer has now been updated to `data_b.txt`
5156 project
5157 .update(cx, |project, cx| {
5158 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5159 let new_path = ProjectPath {
5160 worktree_id,
5161 path: rel_path("data_b.txt").into(),
5162 };
5163
5164 project.save_buffer_as(buffer.clone(), new_path, cx)
5165 })
5166 .await
5167 .unwrap();
5168
5169 buffer.update(cx, |buffer, cx| {
5170 assert_eq!(
5171 buffer.file().unwrap().full_path(cx),
5172 Path::new("dir/data_b.txt")
5173 )
5174 });
5175
5176 // Open the original `data_a.txt` file, confirming that its contents are
5177 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5178 let original_buffer = project
5179 .update(cx, |project, cx| {
5180 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5181 })
5182 .await
5183 .unwrap();
5184
5185 original_buffer.update(cx, |buffer, cx| {
5186 assert_eq!(buffer.text(), "data about a");
5187 assert_eq!(
5188 buffer.file().unwrap().full_path(cx),
5189 Path::new("dir/data_a.txt")
5190 )
5191 });
5192}
5193
5194#[gpui::test(retries = 5)]
5195async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5196 use worktree::WorktreeModelHandle as _;
5197
5198 init_test(cx);
5199 cx.executor().allow_parking();
5200
5201 let dir = TempTree::new(json!({
5202 "a": {
5203 "file1": "",
5204 "file2": "",
5205 "file3": "",
5206 },
5207 "b": {
5208 "c": {
5209 "file4": "",
5210 "file5": "",
5211 }
5212 }
5213 }));
5214
5215 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5216
5217 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5218 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5219 async move { buffer.await.unwrap() }
5220 };
5221 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5222 project.update(cx, |project, cx| {
5223 let tree = project.worktrees(cx).next().unwrap();
5224 tree.read(cx)
5225 .entry_for_path(rel_path(path))
5226 .unwrap_or_else(|| panic!("no entry for path {}", path))
5227 .id
5228 })
5229 };
5230
5231 let buffer2 = buffer_for_path("a/file2", cx).await;
5232 let buffer3 = buffer_for_path("a/file3", cx).await;
5233 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5234 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5235
5236 let file2_id = id_for_path("a/file2", cx);
5237 let file3_id = id_for_path("a/file3", cx);
5238 let file4_id = id_for_path("b/c/file4", cx);
5239
5240 // Create a remote copy of this worktree.
5241 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5242 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5243
5244 let updates = Arc::new(Mutex::new(Vec::new()));
5245 tree.update(cx, |tree, cx| {
5246 let updates = updates.clone();
5247 tree.observe_updates(0, cx, move |update| {
5248 updates.lock().push(update);
5249 async { true }
5250 });
5251 });
5252
5253 let remote = cx.update(|cx| {
5254 Worktree::remote(
5255 0,
5256 ReplicaId::REMOTE_SERVER,
5257 metadata,
5258 project.read(cx).client().into(),
5259 project.read(cx).path_style(cx),
5260 cx,
5261 )
5262 });
5263
5264 cx.executor().run_until_parked();
5265
5266 cx.update(|cx| {
5267 assert!(!buffer2.read(cx).is_dirty());
5268 assert!(!buffer3.read(cx).is_dirty());
5269 assert!(!buffer4.read(cx).is_dirty());
5270 assert!(!buffer5.read(cx).is_dirty());
5271 });
5272
5273 // Rename and delete files and directories.
5274 tree.flush_fs_events(cx).await;
5275 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5276 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5277 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5278 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5279 tree.flush_fs_events(cx).await;
5280
5281 cx.update(|app| {
5282 assert_eq!(
5283 tree.read(app).paths().collect::<Vec<_>>(),
5284 vec![
5285 rel_path("a"),
5286 rel_path("a/file1"),
5287 rel_path("a/file2.new"),
5288 rel_path("b"),
5289 rel_path("d"),
5290 rel_path("d/file3"),
5291 rel_path("d/file4"),
5292 ]
5293 );
5294 });
5295
5296 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5297 assert_eq!(id_for_path("d/file3", cx), file3_id);
5298 assert_eq!(id_for_path("d/file4", cx), file4_id);
5299
5300 cx.update(|cx| {
5301 assert_eq!(
5302 buffer2.read(cx).file().unwrap().path().as_ref(),
5303 rel_path("a/file2.new")
5304 );
5305 assert_eq!(
5306 buffer3.read(cx).file().unwrap().path().as_ref(),
5307 rel_path("d/file3")
5308 );
5309 assert_eq!(
5310 buffer4.read(cx).file().unwrap().path().as_ref(),
5311 rel_path("d/file4")
5312 );
5313 assert_eq!(
5314 buffer5.read(cx).file().unwrap().path().as_ref(),
5315 rel_path("b/c/file5")
5316 );
5317
5318 assert_matches!(
5319 buffer2.read(cx).file().unwrap().disk_state(),
5320 DiskState::Present { .. }
5321 );
5322 assert_matches!(
5323 buffer3.read(cx).file().unwrap().disk_state(),
5324 DiskState::Present { .. }
5325 );
5326 assert_matches!(
5327 buffer4.read(cx).file().unwrap().disk_state(),
5328 DiskState::Present { .. }
5329 );
5330 assert_eq!(
5331 buffer5.read(cx).file().unwrap().disk_state(),
5332 DiskState::Deleted
5333 );
5334 });
5335
5336 // Update the remote worktree. Check that it becomes consistent with the
5337 // local worktree.
5338 cx.executor().run_until_parked();
5339
5340 remote.update(cx, |remote, _| {
5341 for update in updates.lock().drain(..) {
5342 remote.as_remote_mut().unwrap().update_from_remote(update);
5343 }
5344 });
5345 cx.executor().run_until_parked();
5346 remote.update(cx, |remote, _| {
5347 assert_eq!(
5348 remote.paths().collect::<Vec<_>>(),
5349 vec![
5350 rel_path("a"),
5351 rel_path("a/file1"),
5352 rel_path("a/file2.new"),
5353 rel_path("b"),
5354 rel_path("d"),
5355 rel_path("d/file3"),
5356 rel_path("d/file4"),
5357 ]
5358 );
5359 });
5360}
5361
5362#[cfg(target_os = "linux")]
5363#[gpui::test(retries = 5)]
5364async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5365 init_test(cx);
5366 cx.executor().allow_parking();
5367
5368 let dir = TempTree::new(json!({}));
5369 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5370 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5371
5372 tree.flush_fs_events(cx).await;
5373
5374 let repro_dir = dir.path().join("repro");
5375 std::fs::create_dir(&repro_dir).unwrap();
5376 tree.flush_fs_events(cx).await;
5377
5378 cx.update(|cx| {
5379 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5380 });
5381
5382 std::fs::remove_dir_all(&repro_dir).unwrap();
5383 tree.flush_fs_events(cx).await;
5384
5385 cx.update(|cx| {
5386 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5387 });
5388
5389 std::fs::create_dir(&repro_dir).unwrap();
5390 tree.flush_fs_events(cx).await;
5391
5392 cx.update(|cx| {
5393 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5394 });
5395
5396 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5397 tree.flush_fs_events(cx).await;
5398
5399 cx.update(|cx| {
5400 assert!(
5401 tree.read(cx)
5402 .entry_for_path(rel_path("repro/repro-marker"))
5403 .is_some()
5404 );
5405 });
5406}
5407
5408#[gpui::test(iterations = 10)]
5409async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5410 init_test(cx);
5411
5412 let fs = FakeFs::new(cx.executor());
5413 fs.insert_tree(
5414 path!("/dir"),
5415 json!({
5416 "a": {
5417 "file1": "",
5418 }
5419 }),
5420 )
5421 .await;
5422
5423 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5424 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5425 let tree_id = tree.update(cx, |tree, _| tree.id());
5426
5427 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5428 project.update(cx, |project, cx| {
5429 let tree = project.worktrees(cx).next().unwrap();
5430 tree.read(cx)
5431 .entry_for_path(rel_path(path))
5432 .unwrap_or_else(|| panic!("no entry for path {}", path))
5433 .id
5434 })
5435 };
5436
5437 let dir_id = id_for_path("a", cx);
5438 let file_id = id_for_path("a/file1", cx);
5439 let buffer = project
5440 .update(cx, |p, cx| {
5441 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5442 })
5443 .await
5444 .unwrap();
5445 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5446
5447 project
5448 .update(cx, |project, cx| {
5449 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5450 })
5451 .unwrap()
5452 .await
5453 .into_included()
5454 .unwrap();
5455 cx.executor().run_until_parked();
5456
5457 assert_eq!(id_for_path("b", cx), dir_id);
5458 assert_eq!(id_for_path("b/file1", cx), file_id);
5459 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5460}
5461
5462#[gpui::test]
5463async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let fs = FakeFs::new(cx.executor());
5467 fs.insert_tree(
5468 "/dir",
5469 json!({
5470 "a.txt": "a-contents",
5471 "b.txt": "b-contents",
5472 }),
5473 )
5474 .await;
5475
5476 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5477
5478 // Spawn multiple tasks to open paths, repeating some paths.
5479 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5480 (
5481 p.open_local_buffer("/dir/a.txt", cx),
5482 p.open_local_buffer("/dir/b.txt", cx),
5483 p.open_local_buffer("/dir/a.txt", cx),
5484 )
5485 });
5486
5487 let buffer_a_1 = buffer_a_1.await.unwrap();
5488 let buffer_a_2 = buffer_a_2.await.unwrap();
5489 let buffer_b = buffer_b.await.unwrap();
5490 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5491 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5492
5493 // There is only one buffer per path.
5494 let buffer_a_id = buffer_a_1.entity_id();
5495 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5496
5497 // Open the same path again while it is still open.
5498 drop(buffer_a_1);
5499 let buffer_a_3 = project
5500 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5501 .await
5502 .unwrap();
5503
5504 // There's still only one buffer per path.
5505 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5506}
5507
5508#[gpui::test]
5509async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5510 init_test(cx);
5511
5512 let fs = FakeFs::new(cx.executor());
5513 fs.insert_tree(
5514 path!("/dir"),
5515 json!({
5516 "file1": "abc",
5517 "file2": "def",
5518 "file3": "ghi",
5519 }),
5520 )
5521 .await;
5522
5523 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5524
5525 let buffer1 = project
5526 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5527 .await
5528 .unwrap();
5529 let events = Arc::new(Mutex::new(Vec::new()));
5530
5531 // initially, the buffer isn't dirty.
5532 buffer1.update(cx, |buffer, cx| {
5533 cx.subscribe(&buffer1, {
5534 let events = events.clone();
5535 move |_, _, event, _| match event {
5536 BufferEvent::Operation { .. } => {}
5537 _ => events.lock().push(event.clone()),
5538 }
5539 })
5540 .detach();
5541
5542 assert!(!buffer.is_dirty());
5543 assert!(events.lock().is_empty());
5544
5545 buffer.edit([(1..2, "")], None, cx);
5546 });
5547
5548 // after the first edit, the buffer is dirty, and emits a dirtied event.
5549 buffer1.update(cx, |buffer, cx| {
5550 assert!(buffer.text() == "ac");
5551 assert!(buffer.is_dirty());
5552 assert_eq!(
5553 *events.lock(),
5554 &[
5555 language::BufferEvent::Edited,
5556 language::BufferEvent::DirtyChanged
5557 ]
5558 );
5559 events.lock().clear();
5560 buffer.did_save(
5561 buffer.version(),
5562 buffer.file().unwrap().disk_state().mtime(),
5563 cx,
5564 );
5565 });
5566
5567 // after saving, the buffer is not dirty, and emits a saved event.
5568 buffer1.update(cx, |buffer, cx| {
5569 assert!(!buffer.is_dirty());
5570 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5571 events.lock().clear();
5572
5573 buffer.edit([(1..1, "B")], None, cx);
5574 buffer.edit([(2..2, "D")], None, cx);
5575 });
5576
5577 // after editing again, the buffer is dirty, and emits another dirty event.
5578 buffer1.update(cx, |buffer, cx| {
5579 assert!(buffer.text() == "aBDc");
5580 assert!(buffer.is_dirty());
5581 assert_eq!(
5582 *events.lock(),
5583 &[
5584 language::BufferEvent::Edited,
5585 language::BufferEvent::DirtyChanged,
5586 language::BufferEvent::Edited,
5587 ],
5588 );
5589 events.lock().clear();
5590
5591 // After restoring the buffer to its previously-saved state,
5592 // the buffer is not considered dirty anymore.
5593 buffer.edit([(1..3, "")], None, cx);
5594 assert!(buffer.text() == "ac");
5595 assert!(!buffer.is_dirty());
5596 });
5597
5598 assert_eq!(
5599 *events.lock(),
5600 &[
5601 language::BufferEvent::Edited,
5602 language::BufferEvent::DirtyChanged
5603 ]
5604 );
5605
5606 // When a file is deleted, it is not considered dirty.
5607 let events = Arc::new(Mutex::new(Vec::new()));
5608 let buffer2 = project
5609 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5610 .await
5611 .unwrap();
5612 buffer2.update(cx, |_, cx| {
5613 cx.subscribe(&buffer2, {
5614 let events = events.clone();
5615 move |_, _, event, _| match event {
5616 BufferEvent::Operation { .. } => {}
5617 _ => events.lock().push(event.clone()),
5618 }
5619 })
5620 .detach();
5621 });
5622
5623 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5624 .await
5625 .unwrap();
5626 cx.executor().run_until_parked();
5627 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5628 assert_eq!(
5629 mem::take(&mut *events.lock()),
5630 &[language::BufferEvent::FileHandleChanged]
5631 );
5632
5633 // Buffer becomes dirty when edited.
5634 buffer2.update(cx, |buffer, cx| {
5635 buffer.edit([(2..3, "")], None, cx);
5636 assert_eq!(buffer.is_dirty(), true);
5637 });
5638 assert_eq!(
5639 mem::take(&mut *events.lock()),
5640 &[
5641 language::BufferEvent::Edited,
5642 language::BufferEvent::DirtyChanged
5643 ]
5644 );
5645
5646 // Buffer becomes clean again when all of its content is removed, because
5647 // the file was deleted.
5648 buffer2.update(cx, |buffer, cx| {
5649 buffer.edit([(0..2, "")], None, cx);
5650 assert_eq!(buffer.is_empty(), true);
5651 assert_eq!(buffer.is_dirty(), false);
5652 });
5653 assert_eq!(
5654 *events.lock(),
5655 &[
5656 language::BufferEvent::Edited,
5657 language::BufferEvent::DirtyChanged
5658 ]
5659 );
5660
5661 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5662 let events = Arc::new(Mutex::new(Vec::new()));
5663 let buffer3 = project
5664 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5665 .await
5666 .unwrap();
5667 buffer3.update(cx, |_, cx| {
5668 cx.subscribe(&buffer3, {
5669 let events = events.clone();
5670 move |_, _, event, _| match event {
5671 BufferEvent::Operation { .. } => {}
5672 _ => events.lock().push(event.clone()),
5673 }
5674 })
5675 .detach();
5676 });
5677
5678 buffer3.update(cx, |buffer, cx| {
5679 buffer.edit([(0..0, "x")], None, cx);
5680 });
5681 events.lock().clear();
5682 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5683 .await
5684 .unwrap();
5685 cx.executor().run_until_parked();
5686 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5687 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5688}
5689
5690#[gpui::test]
5691async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let (initial_contents, initial_offsets) =
5695 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5696 let fs = FakeFs::new(cx.executor());
5697 fs.insert_tree(
5698 path!("/dir"),
5699 json!({
5700 "the-file": initial_contents,
5701 }),
5702 )
5703 .await;
5704 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5705 let buffer = project
5706 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5707 .await
5708 .unwrap();
5709
5710 let anchors = initial_offsets
5711 .iter()
5712 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5713 .collect::<Vec<_>>();
5714
5715 // Change the file on disk, adding two new lines of text, and removing
5716 // one line.
5717 buffer.update(cx, |buffer, _| {
5718 assert!(!buffer.is_dirty());
5719 assert!(!buffer.has_conflict());
5720 });
5721
5722 let (new_contents, new_offsets) =
5723 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5724 fs.save(
5725 path!("/dir/the-file").as_ref(),
5726 &new_contents.as_str().into(),
5727 LineEnding::Unix,
5728 )
5729 .await
5730 .unwrap();
5731
5732 // Because the buffer was not modified, it is reloaded from disk. Its
5733 // contents are edited according to the diff between the old and new
5734 // file contents.
5735 cx.executor().run_until_parked();
5736 buffer.update(cx, |buffer, _| {
5737 assert_eq!(buffer.text(), new_contents);
5738 assert!(!buffer.is_dirty());
5739 assert!(!buffer.has_conflict());
5740
5741 let anchor_offsets = anchors
5742 .iter()
5743 .map(|anchor| anchor.to_offset(&*buffer))
5744 .collect::<Vec<_>>();
5745 assert_eq!(anchor_offsets, new_offsets);
5746 });
5747
5748 // Modify the buffer
5749 buffer.update(cx, |buffer, cx| {
5750 buffer.edit([(0..0, " ")], None, cx);
5751 assert!(buffer.is_dirty());
5752 assert!(!buffer.has_conflict());
5753 });
5754
5755 // Change the file on disk again, adding blank lines to the beginning.
5756 fs.save(
5757 path!("/dir/the-file").as_ref(),
5758 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5759 LineEnding::Unix,
5760 )
5761 .await
5762 .unwrap();
5763
5764 // Because the buffer is modified, it doesn't reload from disk, but is
5765 // marked as having a conflict.
5766 cx.executor().run_until_parked();
5767 buffer.update(cx, |buffer, _| {
5768 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5769 assert!(buffer.has_conflict());
5770 });
5771}
5772
5773#[gpui::test]
5774async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5775 init_test(cx);
5776
5777 let fs = FakeFs::new(cx.executor());
5778 fs.insert_tree(
5779 path!("/dir"),
5780 json!({
5781 "file1": "a\nb\nc\n",
5782 "file2": "one\r\ntwo\r\nthree\r\n",
5783 }),
5784 )
5785 .await;
5786
5787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5788 let buffer1 = project
5789 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5790 .await
5791 .unwrap();
5792 let buffer2 = project
5793 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5794 .await
5795 .unwrap();
5796
5797 buffer1.update(cx, |buffer, _| {
5798 assert_eq!(buffer.text(), "a\nb\nc\n");
5799 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5800 });
5801 buffer2.update(cx, |buffer, _| {
5802 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5803 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5804 });
5805
5806 // Change a file's line endings on disk from unix to windows. The buffer's
5807 // state updates correctly.
5808 fs.save(
5809 path!("/dir/file1").as_ref(),
5810 &"aaa\nb\nc\n".into(),
5811 LineEnding::Windows,
5812 )
5813 .await
5814 .unwrap();
5815 cx.executor().run_until_parked();
5816 buffer1.update(cx, |buffer, _| {
5817 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5818 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5819 });
5820
5821 // Save a file with windows line endings. The file is written correctly.
5822 buffer2.update(cx, |buffer, cx| {
5823 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5824 });
5825 project
5826 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5827 .await
5828 .unwrap();
5829 assert_eq!(
5830 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5831 "one\r\ntwo\r\nthree\r\nfour\r\n",
5832 );
5833}
5834
5835#[gpui::test]
5836async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5837 init_test(cx);
5838
5839 let fs = FakeFs::new(cx.executor());
5840 fs.insert_tree(
5841 path!("/dir"),
5842 json!({
5843 "a.rs": "
5844 fn foo(mut v: Vec<usize>) {
5845 for x in &v {
5846 v.push(1);
5847 }
5848 }
5849 "
5850 .unindent(),
5851 }),
5852 )
5853 .await;
5854
5855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5856 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5857 let buffer = project
5858 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5859 .await
5860 .unwrap();
5861
5862 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5863 let message = lsp::PublishDiagnosticsParams {
5864 uri: buffer_uri.clone(),
5865 diagnostics: vec![
5866 lsp::Diagnostic {
5867 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5868 severity: Some(DiagnosticSeverity::WARNING),
5869 message: "error 1".to_string(),
5870 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5871 location: lsp::Location {
5872 uri: buffer_uri.clone(),
5873 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5874 },
5875 message: "error 1 hint 1".to_string(),
5876 }]),
5877 ..Default::default()
5878 },
5879 lsp::Diagnostic {
5880 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5881 severity: Some(DiagnosticSeverity::HINT),
5882 message: "error 1 hint 1".to_string(),
5883 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5884 location: lsp::Location {
5885 uri: buffer_uri.clone(),
5886 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5887 },
5888 message: "original diagnostic".to_string(),
5889 }]),
5890 ..Default::default()
5891 },
5892 lsp::Diagnostic {
5893 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5894 severity: Some(DiagnosticSeverity::ERROR),
5895 message: "error 2".to_string(),
5896 related_information: Some(vec![
5897 lsp::DiagnosticRelatedInformation {
5898 location: lsp::Location {
5899 uri: buffer_uri.clone(),
5900 range: lsp::Range::new(
5901 lsp::Position::new(1, 13),
5902 lsp::Position::new(1, 15),
5903 ),
5904 },
5905 message: "error 2 hint 1".to_string(),
5906 },
5907 lsp::DiagnosticRelatedInformation {
5908 location: lsp::Location {
5909 uri: buffer_uri.clone(),
5910 range: lsp::Range::new(
5911 lsp::Position::new(1, 13),
5912 lsp::Position::new(1, 15),
5913 ),
5914 },
5915 message: "error 2 hint 2".to_string(),
5916 },
5917 ]),
5918 ..Default::default()
5919 },
5920 lsp::Diagnostic {
5921 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5922 severity: Some(DiagnosticSeverity::HINT),
5923 message: "error 2 hint 1".to_string(),
5924 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5925 location: lsp::Location {
5926 uri: buffer_uri.clone(),
5927 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5928 },
5929 message: "original diagnostic".to_string(),
5930 }]),
5931 ..Default::default()
5932 },
5933 lsp::Diagnostic {
5934 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5935 severity: Some(DiagnosticSeverity::HINT),
5936 message: "error 2 hint 2".to_string(),
5937 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5938 location: lsp::Location {
5939 uri: buffer_uri,
5940 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5941 },
5942 message: "original diagnostic".to_string(),
5943 }]),
5944 ..Default::default()
5945 },
5946 ],
5947 version: None,
5948 };
5949
5950 lsp_store
5951 .update(cx, |lsp_store, cx| {
5952 lsp_store.update_diagnostics(
5953 LanguageServerId(0),
5954 message,
5955 None,
5956 DiagnosticSourceKind::Pushed,
5957 &[],
5958 cx,
5959 )
5960 })
5961 .unwrap();
5962 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5963
5964 assert_eq!(
5965 buffer
5966 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5967 .collect::<Vec<_>>(),
5968 &[
5969 DiagnosticEntry {
5970 range: Point::new(1, 8)..Point::new(1, 9),
5971 diagnostic: Diagnostic {
5972 severity: DiagnosticSeverity::WARNING,
5973 message: "error 1".to_string(),
5974 group_id: 1,
5975 is_primary: true,
5976 source_kind: DiagnosticSourceKind::Pushed,
5977 ..Diagnostic::default()
5978 }
5979 },
5980 DiagnosticEntry {
5981 range: Point::new(1, 8)..Point::new(1, 9),
5982 diagnostic: Diagnostic {
5983 severity: DiagnosticSeverity::HINT,
5984 message: "error 1 hint 1".to_string(),
5985 group_id: 1,
5986 is_primary: false,
5987 source_kind: DiagnosticSourceKind::Pushed,
5988 ..Diagnostic::default()
5989 }
5990 },
5991 DiagnosticEntry {
5992 range: Point::new(1, 13)..Point::new(1, 15),
5993 diagnostic: Diagnostic {
5994 severity: DiagnosticSeverity::HINT,
5995 message: "error 2 hint 1".to_string(),
5996 group_id: 0,
5997 is_primary: false,
5998 source_kind: DiagnosticSourceKind::Pushed,
5999 ..Diagnostic::default()
6000 }
6001 },
6002 DiagnosticEntry {
6003 range: Point::new(1, 13)..Point::new(1, 15),
6004 diagnostic: Diagnostic {
6005 severity: DiagnosticSeverity::HINT,
6006 message: "error 2 hint 2".to_string(),
6007 group_id: 0,
6008 is_primary: false,
6009 source_kind: DiagnosticSourceKind::Pushed,
6010 ..Diagnostic::default()
6011 }
6012 },
6013 DiagnosticEntry {
6014 range: Point::new(2, 8)..Point::new(2, 17),
6015 diagnostic: Diagnostic {
6016 severity: DiagnosticSeverity::ERROR,
6017 message: "error 2".to_string(),
6018 group_id: 0,
6019 is_primary: true,
6020 source_kind: DiagnosticSourceKind::Pushed,
6021 ..Diagnostic::default()
6022 }
6023 }
6024 ]
6025 );
6026
6027 assert_eq!(
6028 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6029 &[
6030 DiagnosticEntry {
6031 range: Point::new(1, 13)..Point::new(1, 15),
6032 diagnostic: Diagnostic {
6033 severity: DiagnosticSeverity::HINT,
6034 message: "error 2 hint 1".to_string(),
6035 group_id: 0,
6036 is_primary: false,
6037 source_kind: DiagnosticSourceKind::Pushed,
6038 ..Diagnostic::default()
6039 }
6040 },
6041 DiagnosticEntry {
6042 range: Point::new(1, 13)..Point::new(1, 15),
6043 diagnostic: Diagnostic {
6044 severity: DiagnosticSeverity::HINT,
6045 message: "error 2 hint 2".to_string(),
6046 group_id: 0,
6047 is_primary: false,
6048 source_kind: DiagnosticSourceKind::Pushed,
6049 ..Diagnostic::default()
6050 }
6051 },
6052 DiagnosticEntry {
6053 range: Point::new(2, 8)..Point::new(2, 17),
6054 diagnostic: Diagnostic {
6055 severity: DiagnosticSeverity::ERROR,
6056 message: "error 2".to_string(),
6057 group_id: 0,
6058 is_primary: true,
6059 source_kind: DiagnosticSourceKind::Pushed,
6060 ..Diagnostic::default()
6061 }
6062 }
6063 ]
6064 );
6065
6066 assert_eq!(
6067 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6068 &[
6069 DiagnosticEntry {
6070 range: Point::new(1, 8)..Point::new(1, 9),
6071 diagnostic: Diagnostic {
6072 severity: DiagnosticSeverity::WARNING,
6073 message: "error 1".to_string(),
6074 group_id: 1,
6075 is_primary: true,
6076 source_kind: DiagnosticSourceKind::Pushed,
6077 ..Diagnostic::default()
6078 }
6079 },
6080 DiagnosticEntry {
6081 range: Point::new(1, 8)..Point::new(1, 9),
6082 diagnostic: Diagnostic {
6083 severity: DiagnosticSeverity::HINT,
6084 message: "error 1 hint 1".to_string(),
6085 group_id: 1,
6086 is_primary: false,
6087 source_kind: DiagnosticSourceKind::Pushed,
6088 ..Diagnostic::default()
6089 }
6090 },
6091 ]
6092 );
6093}
6094
6095#[gpui::test]
6096async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6097 init_test(cx);
6098
6099 let fs = FakeFs::new(cx.executor());
6100 fs.insert_tree(
6101 path!("/dir"),
6102 json!({
6103 "one.rs": "const ONE: usize = 1;",
6104 "two": {
6105 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6106 }
6107
6108 }),
6109 )
6110 .await;
6111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6112
6113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6114 language_registry.add(rust_lang());
6115 let watched_paths = lsp::FileOperationRegistrationOptions {
6116 filters: vec![
6117 FileOperationFilter {
6118 scheme: Some("file".to_owned()),
6119 pattern: lsp::FileOperationPattern {
6120 glob: "**/*.rs".to_owned(),
6121 matches: Some(lsp::FileOperationPatternKind::File),
6122 options: None,
6123 },
6124 },
6125 FileOperationFilter {
6126 scheme: Some("file".to_owned()),
6127 pattern: lsp::FileOperationPattern {
6128 glob: "**/**".to_owned(),
6129 matches: Some(lsp::FileOperationPatternKind::Folder),
6130 options: None,
6131 },
6132 },
6133 ],
6134 };
6135 let mut fake_servers = language_registry.register_fake_lsp(
6136 "Rust",
6137 FakeLspAdapter {
6138 capabilities: lsp::ServerCapabilities {
6139 workspace: Some(lsp::WorkspaceServerCapabilities {
6140 workspace_folders: None,
6141 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6142 did_rename: Some(watched_paths.clone()),
6143 will_rename: Some(watched_paths),
6144 ..Default::default()
6145 }),
6146 }),
6147 ..Default::default()
6148 },
6149 ..Default::default()
6150 },
6151 );
6152
6153 let _ = project
6154 .update(cx, |project, cx| {
6155 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6156 })
6157 .await
6158 .unwrap();
6159
6160 let fake_server = fake_servers.next().await.unwrap();
6161 cx.executor().run_until_parked();
6162 let response = project.update(cx, |project, cx| {
6163 let worktree = project.worktrees(cx).next().unwrap();
6164 let entry = worktree
6165 .read(cx)
6166 .entry_for_path(rel_path("one.rs"))
6167 .unwrap();
6168 project.rename_entry(
6169 entry.id,
6170 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6171 cx,
6172 )
6173 });
6174 let expected_edit = lsp::WorkspaceEdit {
6175 changes: None,
6176 document_changes: Some(DocumentChanges::Edits({
6177 vec![TextDocumentEdit {
6178 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6179 range: lsp::Range {
6180 start: lsp::Position {
6181 line: 0,
6182 character: 1,
6183 },
6184 end: lsp::Position {
6185 line: 0,
6186 character: 3,
6187 },
6188 },
6189 new_text: "This is not a drill".to_owned(),
6190 })],
6191 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6192 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6193 version: Some(1337),
6194 },
6195 }]
6196 })),
6197 change_annotations: None,
6198 };
6199 let resolved_workspace_edit = Arc::new(OnceLock::new());
6200 fake_server
6201 .set_request_handler::<WillRenameFiles, _, _>({
6202 let resolved_workspace_edit = resolved_workspace_edit.clone();
6203 let expected_edit = expected_edit.clone();
6204 move |params, _| {
6205 let resolved_workspace_edit = resolved_workspace_edit.clone();
6206 let expected_edit = expected_edit.clone();
6207 async move {
6208 assert_eq!(params.files.len(), 1);
6209 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6210 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6211 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6212 Ok(Some(expected_edit))
6213 }
6214 }
6215 })
6216 .next()
6217 .await
6218 .unwrap();
6219 let _ = response.await.unwrap();
6220 fake_server
6221 .handle_notification::<DidRenameFiles, _>(|params, _| {
6222 assert_eq!(params.files.len(), 1);
6223 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6224 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6225 })
6226 .next()
6227 .await
6228 .unwrap();
6229 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6230}
6231
6232#[gpui::test]
6233async fn test_rename(cx: &mut gpui::TestAppContext) {
6234 // hi
6235 init_test(cx);
6236
6237 let fs = FakeFs::new(cx.executor());
6238 fs.insert_tree(
6239 path!("/dir"),
6240 json!({
6241 "one.rs": "const ONE: usize = 1;",
6242 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6243 }),
6244 )
6245 .await;
6246
6247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6248
6249 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6250 language_registry.add(rust_lang());
6251 let mut fake_servers = language_registry.register_fake_lsp(
6252 "Rust",
6253 FakeLspAdapter {
6254 capabilities: lsp::ServerCapabilities {
6255 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6256 prepare_provider: Some(true),
6257 work_done_progress_options: Default::default(),
6258 })),
6259 ..Default::default()
6260 },
6261 ..Default::default()
6262 },
6263 );
6264
6265 let (buffer, _handle) = project
6266 .update(cx, |project, cx| {
6267 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6268 })
6269 .await
6270 .unwrap();
6271
6272 let fake_server = fake_servers.next().await.unwrap();
6273 cx.executor().run_until_parked();
6274
6275 let response = project.update(cx, |project, cx| {
6276 project.prepare_rename(buffer.clone(), 7, cx)
6277 });
6278 fake_server
6279 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6280 assert_eq!(
6281 params.text_document.uri.as_str(),
6282 uri!("file:///dir/one.rs")
6283 );
6284 assert_eq!(params.position, lsp::Position::new(0, 7));
6285 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6286 lsp::Position::new(0, 6),
6287 lsp::Position::new(0, 9),
6288 ))))
6289 })
6290 .next()
6291 .await
6292 .unwrap();
6293 let response = response.await.unwrap();
6294 let PrepareRenameResponse::Success(range) = response else {
6295 panic!("{:?}", response);
6296 };
6297 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6298 assert_eq!(range, 6..9);
6299
6300 let response = project.update(cx, |project, cx| {
6301 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6302 });
6303 fake_server
6304 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6305 assert_eq!(
6306 params.text_document_position.text_document.uri.as_str(),
6307 uri!("file:///dir/one.rs")
6308 );
6309 assert_eq!(
6310 params.text_document_position.position,
6311 lsp::Position::new(0, 7)
6312 );
6313 assert_eq!(params.new_name, "THREE");
6314 Ok(Some(lsp::WorkspaceEdit {
6315 changes: Some(
6316 [
6317 (
6318 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6319 vec![lsp::TextEdit::new(
6320 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6321 "THREE".to_string(),
6322 )],
6323 ),
6324 (
6325 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6326 vec![
6327 lsp::TextEdit::new(
6328 lsp::Range::new(
6329 lsp::Position::new(0, 24),
6330 lsp::Position::new(0, 27),
6331 ),
6332 "THREE".to_string(),
6333 ),
6334 lsp::TextEdit::new(
6335 lsp::Range::new(
6336 lsp::Position::new(0, 35),
6337 lsp::Position::new(0, 38),
6338 ),
6339 "THREE".to_string(),
6340 ),
6341 ],
6342 ),
6343 ]
6344 .into_iter()
6345 .collect(),
6346 ),
6347 ..Default::default()
6348 }))
6349 })
6350 .next()
6351 .await
6352 .unwrap();
6353 let mut transaction = response.await.unwrap().0;
6354 assert_eq!(transaction.len(), 2);
6355 assert_eq!(
6356 transaction
6357 .remove_entry(&buffer)
6358 .unwrap()
6359 .0
6360 .update(cx, |buffer, _| buffer.text()),
6361 "const THREE: usize = 1;"
6362 );
6363 assert_eq!(
6364 transaction
6365 .into_keys()
6366 .next()
6367 .unwrap()
6368 .update(cx, |buffer, _| buffer.text()),
6369 "const TWO: usize = one::THREE + one::THREE;"
6370 );
6371}
6372
6373#[gpui::test]
6374async fn test_search(cx: &mut gpui::TestAppContext) {
6375 init_test(cx);
6376
6377 let fs = FakeFs::new(cx.executor());
6378 fs.insert_tree(
6379 path!("/dir"),
6380 json!({
6381 "one.rs": "const ONE: usize = 1;",
6382 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6383 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6384 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6385 }),
6386 )
6387 .await;
6388 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6389 assert_eq!(
6390 search(
6391 &project,
6392 SearchQuery::text(
6393 "TWO",
6394 false,
6395 true,
6396 false,
6397 Default::default(),
6398 Default::default(),
6399 false,
6400 None
6401 )
6402 .unwrap(),
6403 cx
6404 )
6405 .await
6406 .unwrap(),
6407 HashMap::from_iter([
6408 (path!("dir/two.rs").to_string(), vec![6..9]),
6409 (path!("dir/three.rs").to_string(), vec![37..40])
6410 ])
6411 );
6412
6413 let buffer_4 = project
6414 .update(cx, |project, cx| {
6415 project.open_local_buffer(path!("/dir/four.rs"), cx)
6416 })
6417 .await
6418 .unwrap();
6419 buffer_4.update(cx, |buffer, cx| {
6420 let text = "two::TWO";
6421 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6422 });
6423
6424 assert_eq!(
6425 search(
6426 &project,
6427 SearchQuery::text(
6428 "TWO",
6429 false,
6430 true,
6431 false,
6432 Default::default(),
6433 Default::default(),
6434 false,
6435 None,
6436 )
6437 .unwrap(),
6438 cx
6439 )
6440 .await
6441 .unwrap(),
6442 HashMap::from_iter([
6443 (path!("dir/two.rs").to_string(), vec![6..9]),
6444 (path!("dir/three.rs").to_string(), vec![37..40]),
6445 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6446 ])
6447 );
6448}
6449
6450#[gpui::test]
6451async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6452 init_test(cx);
6453
6454 let search_query = "file";
6455
6456 let fs = FakeFs::new(cx.executor());
6457 fs.insert_tree(
6458 path!("/dir"),
6459 json!({
6460 "one.rs": r#"// Rust file one"#,
6461 "one.ts": r#"// TypeScript file one"#,
6462 "two.rs": r#"// Rust file two"#,
6463 "two.ts": r#"// TypeScript file two"#,
6464 }),
6465 )
6466 .await;
6467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6468
6469 assert!(
6470 search(
6471 &project,
6472 SearchQuery::text(
6473 search_query,
6474 false,
6475 true,
6476 false,
6477 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6478 Default::default(),
6479 false,
6480 None
6481 )
6482 .unwrap(),
6483 cx
6484 )
6485 .await
6486 .unwrap()
6487 .is_empty(),
6488 "If no inclusions match, no files should be returned"
6489 );
6490
6491 assert_eq!(
6492 search(
6493 &project,
6494 SearchQuery::text(
6495 search_query,
6496 false,
6497 true,
6498 false,
6499 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6500 Default::default(),
6501 false,
6502 None
6503 )
6504 .unwrap(),
6505 cx
6506 )
6507 .await
6508 .unwrap(),
6509 HashMap::from_iter([
6510 (path!("dir/one.rs").to_string(), vec![8..12]),
6511 (path!("dir/two.rs").to_string(), vec![8..12]),
6512 ]),
6513 "Rust only search should give only Rust files"
6514 );
6515
6516 assert_eq!(
6517 search(
6518 &project,
6519 SearchQuery::text(
6520 search_query,
6521 false,
6522 true,
6523 false,
6524 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6525 .unwrap(),
6526 Default::default(),
6527 false,
6528 None,
6529 )
6530 .unwrap(),
6531 cx
6532 )
6533 .await
6534 .unwrap(),
6535 HashMap::from_iter([
6536 (path!("dir/one.ts").to_string(), vec![14..18]),
6537 (path!("dir/two.ts").to_string(), vec![14..18]),
6538 ]),
6539 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6540 );
6541
6542 assert_eq!(
6543 search(
6544 &project,
6545 SearchQuery::text(
6546 search_query,
6547 false,
6548 true,
6549 false,
6550 PathMatcher::new(
6551 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6552 PathStyle::local()
6553 )
6554 .unwrap(),
6555 Default::default(),
6556 false,
6557 None,
6558 )
6559 .unwrap(),
6560 cx
6561 )
6562 .await
6563 .unwrap(),
6564 HashMap::from_iter([
6565 (path!("dir/two.ts").to_string(), vec![14..18]),
6566 (path!("dir/one.rs").to_string(), vec![8..12]),
6567 (path!("dir/one.ts").to_string(), vec![14..18]),
6568 (path!("dir/two.rs").to_string(), vec![8..12]),
6569 ]),
6570 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6571 );
6572}
6573
6574#[gpui::test]
6575async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6576 init_test(cx);
6577
6578 let search_query = "file";
6579
6580 let fs = FakeFs::new(cx.executor());
6581 fs.insert_tree(
6582 path!("/dir"),
6583 json!({
6584 "one.rs": r#"// Rust file one"#,
6585 "one.ts": r#"// TypeScript file one"#,
6586 "two.rs": r#"// Rust file two"#,
6587 "two.ts": r#"// TypeScript file two"#,
6588 }),
6589 )
6590 .await;
6591 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6592
6593 assert_eq!(
6594 search(
6595 &project,
6596 SearchQuery::text(
6597 search_query,
6598 false,
6599 true,
6600 false,
6601 Default::default(),
6602 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6603 false,
6604 None,
6605 )
6606 .unwrap(),
6607 cx
6608 )
6609 .await
6610 .unwrap(),
6611 HashMap::from_iter([
6612 (path!("dir/one.rs").to_string(), vec![8..12]),
6613 (path!("dir/one.ts").to_string(), vec![14..18]),
6614 (path!("dir/two.rs").to_string(), vec![8..12]),
6615 (path!("dir/two.ts").to_string(), vec![14..18]),
6616 ]),
6617 "If no exclusions match, all files should be returned"
6618 );
6619
6620 assert_eq!(
6621 search(
6622 &project,
6623 SearchQuery::text(
6624 search_query,
6625 false,
6626 true,
6627 false,
6628 Default::default(),
6629 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6630 false,
6631 None,
6632 )
6633 .unwrap(),
6634 cx
6635 )
6636 .await
6637 .unwrap(),
6638 HashMap::from_iter([
6639 (path!("dir/one.ts").to_string(), vec![14..18]),
6640 (path!("dir/two.ts").to_string(), vec![14..18]),
6641 ]),
6642 "Rust exclusion search should give only TypeScript files"
6643 );
6644
6645 assert_eq!(
6646 search(
6647 &project,
6648 SearchQuery::text(
6649 search_query,
6650 false,
6651 true,
6652 false,
6653 Default::default(),
6654 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6655 .unwrap(),
6656 false,
6657 None,
6658 )
6659 .unwrap(),
6660 cx
6661 )
6662 .await
6663 .unwrap(),
6664 HashMap::from_iter([
6665 (path!("dir/one.rs").to_string(), vec![8..12]),
6666 (path!("dir/two.rs").to_string(), vec![8..12]),
6667 ]),
6668 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6669 );
6670
6671 assert!(
6672 search(
6673 &project,
6674 SearchQuery::text(
6675 search_query,
6676 false,
6677 true,
6678 false,
6679 Default::default(),
6680 PathMatcher::new(
6681 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6682 PathStyle::local(),
6683 )
6684 .unwrap(),
6685 false,
6686 None,
6687 )
6688 .unwrap(),
6689 cx
6690 )
6691 .await
6692 .unwrap()
6693 .is_empty(),
6694 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6695 );
6696}
6697
6698#[gpui::test]
6699async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6700 init_test(cx);
6701
6702 let search_query = "file";
6703
6704 let fs = FakeFs::new(cx.executor());
6705 fs.insert_tree(
6706 path!("/dir"),
6707 json!({
6708 "one.rs": r#"// Rust file one"#,
6709 "one.ts": r#"// TypeScript file one"#,
6710 "two.rs": r#"// Rust file two"#,
6711 "two.ts": r#"// TypeScript file two"#,
6712 }),
6713 )
6714 .await;
6715
6716 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6717 let path_style = PathStyle::local();
6718 let _buffer = project.update(cx, |project, cx| {
6719 project.create_local_buffer("file", None, false, cx)
6720 });
6721
6722 assert_eq!(
6723 search(
6724 &project,
6725 SearchQuery::text(
6726 search_query,
6727 false,
6728 true,
6729 false,
6730 Default::default(),
6731 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6732 false,
6733 None,
6734 )
6735 .unwrap(),
6736 cx
6737 )
6738 .await
6739 .unwrap(),
6740 HashMap::from_iter([
6741 (path!("dir/one.rs").to_string(), vec![8..12]),
6742 (path!("dir/one.ts").to_string(), vec![14..18]),
6743 (path!("dir/two.rs").to_string(), vec![8..12]),
6744 (path!("dir/two.ts").to_string(), vec![14..18]),
6745 ]),
6746 "If no exclusions match, all files should be returned"
6747 );
6748
6749 assert_eq!(
6750 search(
6751 &project,
6752 SearchQuery::text(
6753 search_query,
6754 false,
6755 true,
6756 false,
6757 Default::default(),
6758 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6759 false,
6760 None,
6761 )
6762 .unwrap(),
6763 cx
6764 )
6765 .await
6766 .unwrap(),
6767 HashMap::from_iter([
6768 (path!("dir/one.ts").to_string(), vec![14..18]),
6769 (path!("dir/two.ts").to_string(), vec![14..18]),
6770 ]),
6771 "Rust exclusion search should give only TypeScript files"
6772 );
6773
6774 assert_eq!(
6775 search(
6776 &project,
6777 SearchQuery::text(
6778 search_query,
6779 false,
6780 true,
6781 false,
6782 Default::default(),
6783 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6784 false,
6785 None,
6786 )
6787 .unwrap(),
6788 cx
6789 )
6790 .await
6791 .unwrap(),
6792 HashMap::from_iter([
6793 (path!("dir/one.rs").to_string(), vec![8..12]),
6794 (path!("dir/two.rs").to_string(), vec![8..12]),
6795 ]),
6796 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6797 );
6798
6799 assert!(
6800 search(
6801 &project,
6802 SearchQuery::text(
6803 search_query,
6804 false,
6805 true,
6806 false,
6807 Default::default(),
6808 PathMatcher::new(
6809 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6810 PathStyle::local(),
6811 )
6812 .unwrap(),
6813 false,
6814 None,
6815 )
6816 .unwrap(),
6817 cx
6818 )
6819 .await
6820 .unwrap()
6821 .is_empty(),
6822 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6823 );
6824}
6825
6826#[gpui::test]
6827async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6828 init_test(cx);
6829
6830 let search_query = "file";
6831
6832 let fs = FakeFs::new(cx.executor());
6833 fs.insert_tree(
6834 path!("/dir"),
6835 json!({
6836 "one.rs": r#"// Rust file one"#,
6837 "one.ts": r#"// TypeScript file one"#,
6838 "two.rs": r#"// Rust file two"#,
6839 "two.ts": r#"// TypeScript file two"#,
6840 }),
6841 )
6842 .await;
6843 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6844 assert!(
6845 search(
6846 &project,
6847 SearchQuery::text(
6848 search_query,
6849 false,
6850 true,
6851 false,
6852 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6853 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6854 false,
6855 None,
6856 )
6857 .unwrap(),
6858 cx
6859 )
6860 .await
6861 .unwrap()
6862 .is_empty(),
6863 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6864 );
6865
6866 assert!(
6867 search(
6868 &project,
6869 SearchQuery::text(
6870 search_query,
6871 false,
6872 true,
6873 false,
6874 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6875 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6876 false,
6877 None,
6878 )
6879 .unwrap(),
6880 cx
6881 )
6882 .await
6883 .unwrap()
6884 .is_empty(),
6885 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6886 );
6887
6888 assert!(
6889 search(
6890 &project,
6891 SearchQuery::text(
6892 search_query,
6893 false,
6894 true,
6895 false,
6896 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6897 .unwrap(),
6898 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6899 .unwrap(),
6900 false,
6901 None,
6902 )
6903 .unwrap(),
6904 cx
6905 )
6906 .await
6907 .unwrap()
6908 .is_empty(),
6909 "Non-matching inclusions and exclusions should not change that."
6910 );
6911
6912 assert_eq!(
6913 search(
6914 &project,
6915 SearchQuery::text(
6916 search_query,
6917 false,
6918 true,
6919 false,
6920 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6921 .unwrap(),
6922 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6923 .unwrap(),
6924 false,
6925 None,
6926 )
6927 .unwrap(),
6928 cx
6929 )
6930 .await
6931 .unwrap(),
6932 HashMap::from_iter([
6933 (path!("dir/one.ts").to_string(), vec![14..18]),
6934 (path!("dir/two.ts").to_string(), vec![14..18]),
6935 ]),
6936 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6937 );
6938}
6939
6940#[gpui::test]
6941async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6942 init_test(cx);
6943
6944 let fs = FakeFs::new(cx.executor());
6945 fs.insert_tree(
6946 path!("/worktree-a"),
6947 json!({
6948 "haystack.rs": r#"// NEEDLE"#,
6949 "haystack.ts": r#"// NEEDLE"#,
6950 }),
6951 )
6952 .await;
6953 fs.insert_tree(
6954 path!("/worktree-b"),
6955 json!({
6956 "haystack.rs": r#"// NEEDLE"#,
6957 "haystack.ts": r#"// NEEDLE"#,
6958 }),
6959 )
6960 .await;
6961
6962 let path_style = PathStyle::local();
6963 let project = Project::test(
6964 fs.clone(),
6965 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6966 cx,
6967 )
6968 .await;
6969
6970 assert_eq!(
6971 search(
6972 &project,
6973 SearchQuery::text(
6974 "NEEDLE",
6975 false,
6976 true,
6977 false,
6978 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6979 Default::default(),
6980 true,
6981 None,
6982 )
6983 .unwrap(),
6984 cx
6985 )
6986 .await
6987 .unwrap(),
6988 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6989 "should only return results from included worktree"
6990 );
6991 assert_eq!(
6992 search(
6993 &project,
6994 SearchQuery::text(
6995 "NEEDLE",
6996 false,
6997 true,
6998 false,
6999 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7000 Default::default(),
7001 true,
7002 None,
7003 )
7004 .unwrap(),
7005 cx
7006 )
7007 .await
7008 .unwrap(),
7009 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7010 "should only return results from included worktree"
7011 );
7012
7013 assert_eq!(
7014 search(
7015 &project,
7016 SearchQuery::text(
7017 "NEEDLE",
7018 false,
7019 true,
7020 false,
7021 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7022 Default::default(),
7023 false,
7024 None,
7025 )
7026 .unwrap(),
7027 cx
7028 )
7029 .await
7030 .unwrap(),
7031 HashMap::from_iter([
7032 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7033 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7034 ]),
7035 "should return results from both worktrees"
7036 );
7037}
7038
7039#[gpui::test]
7040async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7041 init_test(cx);
7042
7043 let fs = FakeFs::new(cx.background_executor.clone());
7044 fs.insert_tree(
7045 path!("/dir"),
7046 json!({
7047 ".git": {},
7048 ".gitignore": "**/target\n/node_modules\n",
7049 "target": {
7050 "index.txt": "index_key:index_value"
7051 },
7052 "node_modules": {
7053 "eslint": {
7054 "index.ts": "const eslint_key = 'eslint value'",
7055 "package.json": r#"{ "some_key": "some value" }"#,
7056 },
7057 "prettier": {
7058 "index.ts": "const prettier_key = 'prettier value'",
7059 "package.json": r#"{ "other_key": "other value" }"#,
7060 },
7061 },
7062 "package.json": r#"{ "main_key": "main value" }"#,
7063 }),
7064 )
7065 .await;
7066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7067
7068 let query = "key";
7069 assert_eq!(
7070 search(
7071 &project,
7072 SearchQuery::text(
7073 query,
7074 false,
7075 false,
7076 false,
7077 Default::default(),
7078 Default::default(),
7079 false,
7080 None,
7081 )
7082 .unwrap(),
7083 cx
7084 )
7085 .await
7086 .unwrap(),
7087 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7088 "Only one non-ignored file should have the query"
7089 );
7090
7091 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7092 let path_style = PathStyle::local();
7093 assert_eq!(
7094 search(
7095 &project,
7096 SearchQuery::text(
7097 query,
7098 false,
7099 false,
7100 true,
7101 Default::default(),
7102 Default::default(),
7103 false,
7104 None,
7105 )
7106 .unwrap(),
7107 cx
7108 )
7109 .await
7110 .unwrap(),
7111 HashMap::from_iter([
7112 (path!("dir/package.json").to_string(), vec![8..11]),
7113 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7114 (
7115 path!("dir/node_modules/prettier/package.json").to_string(),
7116 vec![9..12]
7117 ),
7118 (
7119 path!("dir/node_modules/prettier/index.ts").to_string(),
7120 vec![15..18]
7121 ),
7122 (
7123 path!("dir/node_modules/eslint/index.ts").to_string(),
7124 vec![13..16]
7125 ),
7126 (
7127 path!("dir/node_modules/eslint/package.json").to_string(),
7128 vec![8..11]
7129 ),
7130 ]),
7131 "Unrestricted search with ignored directories should find every file with the query"
7132 );
7133
7134 let files_to_include =
7135 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7136 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7138 assert_eq!(
7139 search(
7140 &project,
7141 SearchQuery::text(
7142 query,
7143 false,
7144 false,
7145 true,
7146 files_to_include,
7147 files_to_exclude,
7148 false,
7149 None,
7150 )
7151 .unwrap(),
7152 cx
7153 )
7154 .await
7155 .unwrap(),
7156 HashMap::from_iter([(
7157 path!("dir/node_modules/prettier/package.json").to_string(),
7158 vec![9..12]
7159 )]),
7160 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7161 );
7162}
7163
7164#[gpui::test]
7165async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7166 init_test(cx);
7167
7168 let fs = FakeFs::new(cx.executor());
7169 fs.insert_tree(
7170 path!("/dir"),
7171 json!({
7172 "one.rs": "// ПРИВЕТ? привет!",
7173 "two.rs": "// ПРИВЕТ.",
7174 "three.rs": "// привет",
7175 }),
7176 )
7177 .await;
7178 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7179 let unicode_case_sensitive_query = SearchQuery::text(
7180 "привет",
7181 false,
7182 true,
7183 false,
7184 Default::default(),
7185 Default::default(),
7186 false,
7187 None,
7188 );
7189 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7190 assert_eq!(
7191 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7192 .await
7193 .unwrap(),
7194 HashMap::from_iter([
7195 (path!("dir/one.rs").to_string(), vec![17..29]),
7196 (path!("dir/three.rs").to_string(), vec![3..15]),
7197 ])
7198 );
7199
7200 let unicode_case_insensitive_query = SearchQuery::text(
7201 "привет",
7202 false,
7203 false,
7204 false,
7205 Default::default(),
7206 Default::default(),
7207 false,
7208 None,
7209 );
7210 assert_matches!(
7211 unicode_case_insensitive_query,
7212 Ok(SearchQuery::Regex { .. })
7213 );
7214 assert_eq!(
7215 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7216 .await
7217 .unwrap(),
7218 HashMap::from_iter([
7219 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7220 (path!("dir/two.rs").to_string(), vec![3..15]),
7221 (path!("dir/three.rs").to_string(), vec![3..15]),
7222 ])
7223 );
7224
7225 assert_eq!(
7226 search(
7227 &project,
7228 SearchQuery::text(
7229 "привет.",
7230 false,
7231 false,
7232 false,
7233 Default::default(),
7234 Default::default(),
7235 false,
7236 None,
7237 )
7238 .unwrap(),
7239 cx
7240 )
7241 .await
7242 .unwrap(),
7243 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7244 );
7245}
7246
7247#[gpui::test]
7248async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7249 init_test(cx);
7250
7251 let fs = FakeFs::new(cx.executor());
7252 fs.insert_tree(
7253 "/one/two",
7254 json!({
7255 "three": {
7256 "a.txt": "",
7257 "four": {}
7258 },
7259 "c.rs": ""
7260 }),
7261 )
7262 .await;
7263
7264 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7265 project
7266 .update(cx, |project, cx| {
7267 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7268 project.create_entry((id, rel_path("b..")), true, cx)
7269 })
7270 .await
7271 .unwrap()
7272 .into_included()
7273 .unwrap();
7274
7275 assert_eq!(
7276 fs.paths(true),
7277 vec![
7278 PathBuf::from(path!("/")),
7279 PathBuf::from(path!("/one")),
7280 PathBuf::from(path!("/one/two")),
7281 PathBuf::from(path!("/one/two/c.rs")),
7282 PathBuf::from(path!("/one/two/three")),
7283 PathBuf::from(path!("/one/two/three/a.txt")),
7284 PathBuf::from(path!("/one/two/three/b..")),
7285 PathBuf::from(path!("/one/two/three/four")),
7286 ]
7287 );
7288}
7289
7290#[gpui::test]
7291async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7292 init_test(cx);
7293
7294 let fs = FakeFs::new(cx.executor());
7295 fs.insert_tree(
7296 path!("/dir"),
7297 json!({
7298 "a.tsx": "a",
7299 }),
7300 )
7301 .await;
7302
7303 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7304
7305 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7306 language_registry.add(tsx_lang());
7307 let language_server_names = [
7308 "TypeScriptServer",
7309 "TailwindServer",
7310 "ESLintServer",
7311 "NoHoverCapabilitiesServer",
7312 ];
7313 let mut language_servers = [
7314 language_registry.register_fake_lsp(
7315 "tsx",
7316 FakeLspAdapter {
7317 name: language_server_names[0],
7318 capabilities: lsp::ServerCapabilities {
7319 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7320 ..lsp::ServerCapabilities::default()
7321 },
7322 ..FakeLspAdapter::default()
7323 },
7324 ),
7325 language_registry.register_fake_lsp(
7326 "tsx",
7327 FakeLspAdapter {
7328 name: language_server_names[1],
7329 capabilities: lsp::ServerCapabilities {
7330 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7331 ..lsp::ServerCapabilities::default()
7332 },
7333 ..FakeLspAdapter::default()
7334 },
7335 ),
7336 language_registry.register_fake_lsp(
7337 "tsx",
7338 FakeLspAdapter {
7339 name: language_server_names[2],
7340 capabilities: lsp::ServerCapabilities {
7341 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7342 ..lsp::ServerCapabilities::default()
7343 },
7344 ..FakeLspAdapter::default()
7345 },
7346 ),
7347 language_registry.register_fake_lsp(
7348 "tsx",
7349 FakeLspAdapter {
7350 name: language_server_names[3],
7351 capabilities: lsp::ServerCapabilities {
7352 hover_provider: None,
7353 ..lsp::ServerCapabilities::default()
7354 },
7355 ..FakeLspAdapter::default()
7356 },
7357 ),
7358 ];
7359
7360 let (buffer, _handle) = project
7361 .update(cx, |p, cx| {
7362 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7363 })
7364 .await
7365 .unwrap();
7366 cx.executor().run_until_parked();
7367
7368 let mut servers_with_hover_requests = HashMap::default();
7369 for i in 0..language_server_names.len() {
7370 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7371 panic!(
7372 "Failed to get language server #{i} with name {}",
7373 &language_server_names[i]
7374 )
7375 });
7376 let new_server_name = new_server.server.name();
7377 assert!(
7378 !servers_with_hover_requests.contains_key(&new_server_name),
7379 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7380 );
7381 match new_server_name.as_ref() {
7382 "TailwindServer" | "TypeScriptServer" => {
7383 servers_with_hover_requests.insert(
7384 new_server_name.clone(),
7385 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7386 move |_, _| {
7387 let name = new_server_name.clone();
7388 async move {
7389 Ok(Some(lsp::Hover {
7390 contents: lsp::HoverContents::Scalar(
7391 lsp::MarkedString::String(format!("{name} hover")),
7392 ),
7393 range: None,
7394 }))
7395 }
7396 },
7397 ),
7398 );
7399 }
7400 "ESLintServer" => {
7401 servers_with_hover_requests.insert(
7402 new_server_name,
7403 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7404 |_, _| async move { Ok(None) },
7405 ),
7406 );
7407 }
7408 "NoHoverCapabilitiesServer" => {
7409 let _never_handled = new_server
7410 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7411 panic!(
7412 "Should not call for hovers server with no corresponding capabilities"
7413 )
7414 });
7415 }
7416 unexpected => panic!("Unexpected server name: {unexpected}"),
7417 }
7418 }
7419
7420 let hover_task = project.update(cx, |project, cx| {
7421 project.hover(&buffer, Point::new(0, 0), cx)
7422 });
7423 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7424 |mut hover_request| async move {
7425 hover_request
7426 .next()
7427 .await
7428 .expect("All hover requests should have been triggered")
7429 },
7430 ))
7431 .await;
7432 assert_eq!(
7433 vec!["TailwindServer hover", "TypeScriptServer hover"],
7434 hover_task
7435 .await
7436 .into_iter()
7437 .flatten()
7438 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7439 .sorted()
7440 .collect::<Vec<_>>(),
7441 "Should receive hover responses from all related servers with hover capabilities"
7442 );
7443}
7444
7445#[gpui::test]
7446async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7447 init_test(cx);
7448
7449 let fs = FakeFs::new(cx.executor());
7450 fs.insert_tree(
7451 path!("/dir"),
7452 json!({
7453 "a.ts": "a",
7454 }),
7455 )
7456 .await;
7457
7458 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7459
7460 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7461 language_registry.add(typescript_lang());
7462 let mut fake_language_servers = language_registry.register_fake_lsp(
7463 "TypeScript",
7464 FakeLspAdapter {
7465 capabilities: lsp::ServerCapabilities {
7466 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7467 ..lsp::ServerCapabilities::default()
7468 },
7469 ..FakeLspAdapter::default()
7470 },
7471 );
7472
7473 let (buffer, _handle) = project
7474 .update(cx, |p, cx| {
7475 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7476 })
7477 .await
7478 .unwrap();
7479 cx.executor().run_until_parked();
7480
7481 let fake_server = fake_language_servers
7482 .next()
7483 .await
7484 .expect("failed to get the language server");
7485
7486 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7487 move |_, _| async move {
7488 Ok(Some(lsp::Hover {
7489 contents: lsp::HoverContents::Array(vec![
7490 lsp::MarkedString::String("".to_string()),
7491 lsp::MarkedString::String(" ".to_string()),
7492 lsp::MarkedString::String("\n\n\n".to_string()),
7493 ]),
7494 range: None,
7495 }))
7496 },
7497 );
7498
7499 let hover_task = project.update(cx, |project, cx| {
7500 project.hover(&buffer, Point::new(0, 0), cx)
7501 });
7502 let () = request_handled
7503 .next()
7504 .await
7505 .expect("All hover requests should have been triggered");
7506 assert_eq!(
7507 Vec::<String>::new(),
7508 hover_task
7509 .await
7510 .into_iter()
7511 .flatten()
7512 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7513 .sorted()
7514 .collect::<Vec<_>>(),
7515 "Empty hover parts should be ignored"
7516 );
7517}
7518
7519#[gpui::test]
7520async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7521 init_test(cx);
7522
7523 let fs = FakeFs::new(cx.executor());
7524 fs.insert_tree(
7525 path!("/dir"),
7526 json!({
7527 "a.ts": "a",
7528 }),
7529 )
7530 .await;
7531
7532 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7533
7534 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7535 language_registry.add(typescript_lang());
7536 let mut fake_language_servers = language_registry.register_fake_lsp(
7537 "TypeScript",
7538 FakeLspAdapter {
7539 capabilities: lsp::ServerCapabilities {
7540 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7541 ..lsp::ServerCapabilities::default()
7542 },
7543 ..FakeLspAdapter::default()
7544 },
7545 );
7546
7547 let (buffer, _handle) = project
7548 .update(cx, |p, cx| {
7549 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7550 })
7551 .await
7552 .unwrap();
7553 cx.executor().run_until_parked();
7554
7555 let fake_server = fake_language_servers
7556 .next()
7557 .await
7558 .expect("failed to get the language server");
7559
7560 let mut request_handled = fake_server
7561 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7562 Ok(Some(vec![
7563 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7564 title: "organize imports".to_string(),
7565 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7566 ..lsp::CodeAction::default()
7567 }),
7568 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7569 title: "fix code".to_string(),
7570 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7571 ..lsp::CodeAction::default()
7572 }),
7573 ]))
7574 });
7575
7576 let code_actions_task = project.update(cx, |project, cx| {
7577 project.code_actions(
7578 &buffer,
7579 0..buffer.read(cx).len(),
7580 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7581 cx,
7582 )
7583 });
7584
7585 let () = request_handled
7586 .next()
7587 .await
7588 .expect("The code action request should have been triggered");
7589
7590 let code_actions = code_actions_task.await.unwrap().unwrap();
7591 assert_eq!(code_actions.len(), 1);
7592 assert_eq!(
7593 code_actions[0].lsp_action.action_kind(),
7594 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7595 );
7596}
7597
7598#[gpui::test]
7599async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7600 init_test(cx);
7601
7602 let fs = FakeFs::new(cx.executor());
7603 fs.insert_tree(
7604 path!("/dir"),
7605 json!({
7606 "a.tsx": "a",
7607 }),
7608 )
7609 .await;
7610
7611 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7612
7613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7614 language_registry.add(tsx_lang());
7615 let language_server_names = [
7616 "TypeScriptServer",
7617 "TailwindServer",
7618 "ESLintServer",
7619 "NoActionsCapabilitiesServer",
7620 ];
7621
7622 let mut language_server_rxs = [
7623 language_registry.register_fake_lsp(
7624 "tsx",
7625 FakeLspAdapter {
7626 name: language_server_names[0],
7627 capabilities: lsp::ServerCapabilities {
7628 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7629 ..lsp::ServerCapabilities::default()
7630 },
7631 ..FakeLspAdapter::default()
7632 },
7633 ),
7634 language_registry.register_fake_lsp(
7635 "tsx",
7636 FakeLspAdapter {
7637 name: language_server_names[1],
7638 capabilities: lsp::ServerCapabilities {
7639 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7640 ..lsp::ServerCapabilities::default()
7641 },
7642 ..FakeLspAdapter::default()
7643 },
7644 ),
7645 language_registry.register_fake_lsp(
7646 "tsx",
7647 FakeLspAdapter {
7648 name: language_server_names[2],
7649 capabilities: lsp::ServerCapabilities {
7650 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7651 ..lsp::ServerCapabilities::default()
7652 },
7653 ..FakeLspAdapter::default()
7654 },
7655 ),
7656 language_registry.register_fake_lsp(
7657 "tsx",
7658 FakeLspAdapter {
7659 name: language_server_names[3],
7660 capabilities: lsp::ServerCapabilities {
7661 code_action_provider: None,
7662 ..lsp::ServerCapabilities::default()
7663 },
7664 ..FakeLspAdapter::default()
7665 },
7666 ),
7667 ];
7668
7669 let (buffer, _handle) = project
7670 .update(cx, |p, cx| {
7671 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7672 })
7673 .await
7674 .unwrap();
7675 cx.executor().run_until_parked();
7676
7677 let mut servers_with_actions_requests = HashMap::default();
7678 for i in 0..language_server_names.len() {
7679 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7680 panic!(
7681 "Failed to get language server #{i} with name {}",
7682 &language_server_names[i]
7683 )
7684 });
7685 let new_server_name = new_server.server.name();
7686
7687 assert!(
7688 !servers_with_actions_requests.contains_key(&new_server_name),
7689 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7690 );
7691 match new_server_name.0.as_ref() {
7692 "TailwindServer" | "TypeScriptServer" => {
7693 servers_with_actions_requests.insert(
7694 new_server_name.clone(),
7695 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7696 move |_, _| {
7697 let name = new_server_name.clone();
7698 async move {
7699 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7700 lsp::CodeAction {
7701 title: format!("{name} code action"),
7702 ..lsp::CodeAction::default()
7703 },
7704 )]))
7705 }
7706 },
7707 ),
7708 );
7709 }
7710 "ESLintServer" => {
7711 servers_with_actions_requests.insert(
7712 new_server_name,
7713 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7714 |_, _| async move { Ok(None) },
7715 ),
7716 );
7717 }
7718 "NoActionsCapabilitiesServer" => {
7719 let _never_handled = new_server
7720 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7721 panic!(
7722 "Should not call for code actions server with no corresponding capabilities"
7723 )
7724 });
7725 }
7726 unexpected => panic!("Unexpected server name: {unexpected}"),
7727 }
7728 }
7729
7730 let code_actions_task = project.update(cx, |project, cx| {
7731 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7732 });
7733
7734 // cx.run_until_parked();
7735 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7736 |mut code_actions_request| async move {
7737 code_actions_request
7738 .next()
7739 .await
7740 .expect("All code actions requests should have been triggered")
7741 },
7742 ))
7743 .await;
7744 assert_eq!(
7745 vec!["TailwindServer code action", "TypeScriptServer code action"],
7746 code_actions_task
7747 .await
7748 .unwrap()
7749 .unwrap()
7750 .into_iter()
7751 .map(|code_action| code_action.lsp_action.title().to_owned())
7752 .sorted()
7753 .collect::<Vec<_>>(),
7754 "Should receive code actions responses from all related servers with hover capabilities"
7755 );
7756}
7757
7758#[gpui::test]
7759async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761
7762 let fs = FakeFs::new(cx.executor());
7763 fs.insert_tree(
7764 "/dir",
7765 json!({
7766 "a.rs": "let a = 1;",
7767 "b.rs": "let b = 2;",
7768 "c.rs": "let c = 2;",
7769 }),
7770 )
7771 .await;
7772
7773 let project = Project::test(
7774 fs,
7775 [
7776 "/dir/a.rs".as_ref(),
7777 "/dir/b.rs".as_ref(),
7778 "/dir/c.rs".as_ref(),
7779 ],
7780 cx,
7781 )
7782 .await;
7783
7784 // check the initial state and get the worktrees
7785 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7786 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7787 assert_eq!(worktrees.len(), 3);
7788
7789 let worktree_a = worktrees[0].read(cx);
7790 let worktree_b = worktrees[1].read(cx);
7791 let worktree_c = worktrees[2].read(cx);
7792
7793 // check they start in the right order
7794 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7795 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7796 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7797
7798 (
7799 worktrees[0].clone(),
7800 worktrees[1].clone(),
7801 worktrees[2].clone(),
7802 )
7803 });
7804
7805 // move first worktree to after the second
7806 // [a, b, c] -> [b, a, c]
7807 project
7808 .update(cx, |project, cx| {
7809 let first = worktree_a.read(cx);
7810 let second = worktree_b.read(cx);
7811 project.move_worktree(first.id(), second.id(), cx)
7812 })
7813 .expect("moving first after second");
7814
7815 // check the state after moving
7816 project.update(cx, |project, cx| {
7817 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7818 assert_eq!(worktrees.len(), 3);
7819
7820 let first = worktrees[0].read(cx);
7821 let second = worktrees[1].read(cx);
7822 let third = worktrees[2].read(cx);
7823
7824 // check they are now in the right order
7825 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7826 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7827 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7828 });
7829
7830 // move the second worktree to before the first
7831 // [b, a, c] -> [a, b, c]
7832 project
7833 .update(cx, |project, cx| {
7834 let second = worktree_a.read(cx);
7835 let first = worktree_b.read(cx);
7836 project.move_worktree(first.id(), second.id(), cx)
7837 })
7838 .expect("moving second before first");
7839
7840 // check the state after moving
7841 project.update(cx, |project, cx| {
7842 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7843 assert_eq!(worktrees.len(), 3);
7844
7845 let first = worktrees[0].read(cx);
7846 let second = worktrees[1].read(cx);
7847 let third = worktrees[2].read(cx);
7848
7849 // check they are now in the right order
7850 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7851 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7852 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7853 });
7854
7855 // move the second worktree to after the third
7856 // [a, b, c] -> [a, c, b]
7857 project
7858 .update(cx, |project, cx| {
7859 let second = worktree_b.read(cx);
7860 let third = worktree_c.read(cx);
7861 project.move_worktree(second.id(), third.id(), cx)
7862 })
7863 .expect("moving second after third");
7864
7865 // check the state after moving
7866 project.update(cx, |project, cx| {
7867 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7868 assert_eq!(worktrees.len(), 3);
7869
7870 let first = worktrees[0].read(cx);
7871 let second = worktrees[1].read(cx);
7872 let third = worktrees[2].read(cx);
7873
7874 // check they are now in the right order
7875 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7876 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7877 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7878 });
7879
7880 // move the third worktree to before the second
7881 // [a, c, b] -> [a, b, c]
7882 project
7883 .update(cx, |project, cx| {
7884 let third = worktree_c.read(cx);
7885 let second = worktree_b.read(cx);
7886 project.move_worktree(third.id(), second.id(), cx)
7887 })
7888 .expect("moving third before second");
7889
7890 // check the state after moving
7891 project.update(cx, |project, cx| {
7892 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7893 assert_eq!(worktrees.len(), 3);
7894
7895 let first = worktrees[0].read(cx);
7896 let second = worktrees[1].read(cx);
7897 let third = worktrees[2].read(cx);
7898
7899 // check they are now in the right order
7900 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7901 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7902 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7903 });
7904
7905 // move the first worktree to after the third
7906 // [a, b, c] -> [b, c, a]
7907 project
7908 .update(cx, |project, cx| {
7909 let first = worktree_a.read(cx);
7910 let third = worktree_c.read(cx);
7911 project.move_worktree(first.id(), third.id(), cx)
7912 })
7913 .expect("moving first after third");
7914
7915 // check the state after moving
7916 project.update(cx, |project, cx| {
7917 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7918 assert_eq!(worktrees.len(), 3);
7919
7920 let first = worktrees[0].read(cx);
7921 let second = worktrees[1].read(cx);
7922 let third = worktrees[2].read(cx);
7923
7924 // check they are now in the right order
7925 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7926 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7927 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7928 });
7929
7930 // move the third worktree to before the first
7931 // [b, c, a] -> [a, b, c]
7932 project
7933 .update(cx, |project, cx| {
7934 let third = worktree_a.read(cx);
7935 let first = worktree_b.read(cx);
7936 project.move_worktree(third.id(), first.id(), cx)
7937 })
7938 .expect("moving third before first");
7939
7940 // check the state after moving
7941 project.update(cx, |project, cx| {
7942 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7943 assert_eq!(worktrees.len(), 3);
7944
7945 let first = worktrees[0].read(cx);
7946 let second = worktrees[1].read(cx);
7947 let third = worktrees[2].read(cx);
7948
7949 // check they are now in the right order
7950 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7951 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7952 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7953 });
7954}
7955
7956#[gpui::test]
7957async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7958 init_test(cx);
7959
7960 let staged_contents = r#"
7961 fn main() {
7962 println!("hello world");
7963 }
7964 "#
7965 .unindent();
7966 let file_contents = r#"
7967 // print goodbye
7968 fn main() {
7969 println!("goodbye world");
7970 }
7971 "#
7972 .unindent();
7973
7974 let fs = FakeFs::new(cx.background_executor.clone());
7975 fs.insert_tree(
7976 "/dir",
7977 json!({
7978 ".git": {},
7979 "src": {
7980 "main.rs": file_contents,
7981 }
7982 }),
7983 )
7984 .await;
7985
7986 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7987
7988 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7989
7990 let buffer = project
7991 .update(cx, |project, cx| {
7992 project.open_local_buffer("/dir/src/main.rs", cx)
7993 })
7994 .await
7995 .unwrap();
7996 let unstaged_diff = project
7997 .update(cx, |project, cx| {
7998 project.open_unstaged_diff(buffer.clone(), cx)
7999 })
8000 .await
8001 .unwrap();
8002
8003 cx.run_until_parked();
8004 unstaged_diff.update(cx, |unstaged_diff, cx| {
8005 let snapshot = buffer.read(cx).snapshot();
8006 assert_hunks(
8007 unstaged_diff.snapshot(cx).hunks(&snapshot),
8008 &snapshot,
8009 &unstaged_diff.base_text_string(cx).unwrap(),
8010 &[
8011 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8012 (
8013 2..3,
8014 " println!(\"hello world\");\n",
8015 " println!(\"goodbye world\");\n",
8016 DiffHunkStatus::modified_none(),
8017 ),
8018 ],
8019 );
8020 });
8021
8022 let staged_contents = r#"
8023 // print goodbye
8024 fn main() {
8025 }
8026 "#
8027 .unindent();
8028
8029 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8030
8031 cx.run_until_parked();
8032 unstaged_diff.update(cx, |unstaged_diff, cx| {
8033 let snapshot = buffer.read(cx).snapshot();
8034 assert_hunks(
8035 unstaged_diff
8036 .snapshot(cx)
8037 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8038 &snapshot,
8039 &unstaged_diff.base_text(cx).text(),
8040 &[(
8041 2..3,
8042 "",
8043 " println!(\"goodbye world\");\n",
8044 DiffHunkStatus::added_none(),
8045 )],
8046 );
8047 });
8048}
8049
8050#[gpui::test]
8051async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8052 init_test(cx);
8053
8054 let committed_contents = r#"
8055 fn main() {
8056 println!("hello world");
8057 }
8058 "#
8059 .unindent();
8060 let staged_contents = r#"
8061 fn main() {
8062 println!("goodbye world");
8063 }
8064 "#
8065 .unindent();
8066 let file_contents = r#"
8067 // print goodbye
8068 fn main() {
8069 println!("goodbye world");
8070 }
8071 "#
8072 .unindent();
8073
8074 let fs = FakeFs::new(cx.background_executor.clone());
8075 fs.insert_tree(
8076 "/dir",
8077 json!({
8078 ".git": {},
8079 "src": {
8080 "modification.rs": file_contents,
8081 }
8082 }),
8083 )
8084 .await;
8085
8086 fs.set_head_for_repo(
8087 Path::new("/dir/.git"),
8088 &[
8089 ("src/modification.rs", committed_contents),
8090 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8091 ],
8092 "deadbeef",
8093 );
8094 fs.set_index_for_repo(
8095 Path::new("/dir/.git"),
8096 &[
8097 ("src/modification.rs", staged_contents),
8098 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8099 ],
8100 );
8101
8102 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8103 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8104 let language = rust_lang();
8105 language_registry.add(language.clone());
8106
8107 let buffer_1 = project
8108 .update(cx, |project, cx| {
8109 project.open_local_buffer("/dir/src/modification.rs", cx)
8110 })
8111 .await
8112 .unwrap();
8113 let diff_1 = project
8114 .update(cx, |project, cx| {
8115 project.open_uncommitted_diff(buffer_1.clone(), cx)
8116 })
8117 .await
8118 .unwrap();
8119 diff_1.read_with(cx, |diff, cx| {
8120 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8121 });
8122 cx.run_until_parked();
8123 diff_1.update(cx, |diff, cx| {
8124 let snapshot = buffer_1.read(cx).snapshot();
8125 assert_hunks(
8126 diff.snapshot(cx)
8127 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8128 &snapshot,
8129 &diff.base_text_string(cx).unwrap(),
8130 &[
8131 (
8132 0..1,
8133 "",
8134 "// print goodbye\n",
8135 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8136 ),
8137 (
8138 2..3,
8139 " println!(\"hello world\");\n",
8140 " println!(\"goodbye world\");\n",
8141 DiffHunkStatus::modified_none(),
8142 ),
8143 ],
8144 );
8145 });
8146
8147 // Reset HEAD to a version that differs from both the buffer and the index.
8148 let committed_contents = r#"
8149 // print goodbye
8150 fn main() {
8151 }
8152 "#
8153 .unindent();
8154 fs.set_head_for_repo(
8155 Path::new("/dir/.git"),
8156 &[
8157 ("src/modification.rs", committed_contents.clone()),
8158 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8159 ],
8160 "deadbeef",
8161 );
8162
8163 // Buffer now has an unstaged hunk.
8164 cx.run_until_parked();
8165 diff_1.update(cx, |diff, cx| {
8166 let snapshot = buffer_1.read(cx).snapshot();
8167 assert_hunks(
8168 diff.snapshot(cx)
8169 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8170 &snapshot,
8171 &diff.base_text(cx).text(),
8172 &[(
8173 2..3,
8174 "",
8175 " println!(\"goodbye world\");\n",
8176 DiffHunkStatus::added_none(),
8177 )],
8178 );
8179 });
8180
8181 // Open a buffer for a file that's been deleted.
8182 let buffer_2 = project
8183 .update(cx, |project, cx| {
8184 project.open_local_buffer("/dir/src/deletion.rs", cx)
8185 })
8186 .await
8187 .unwrap();
8188 let diff_2 = project
8189 .update(cx, |project, cx| {
8190 project.open_uncommitted_diff(buffer_2.clone(), cx)
8191 })
8192 .await
8193 .unwrap();
8194 cx.run_until_parked();
8195 diff_2.update(cx, |diff, cx| {
8196 let snapshot = buffer_2.read(cx).snapshot();
8197 assert_hunks(
8198 diff.snapshot(cx)
8199 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8200 &snapshot,
8201 &diff.base_text_string(cx).unwrap(),
8202 &[(
8203 0..0,
8204 "// the-deleted-contents\n",
8205 "",
8206 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8207 )],
8208 );
8209 });
8210
8211 // Stage the deletion of this file
8212 fs.set_index_for_repo(
8213 Path::new("/dir/.git"),
8214 &[("src/modification.rs", committed_contents.clone())],
8215 );
8216 cx.run_until_parked();
8217 diff_2.update(cx, |diff, cx| {
8218 let snapshot = buffer_2.read(cx).snapshot();
8219 assert_hunks(
8220 diff.snapshot(cx)
8221 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8222 &snapshot,
8223 &diff.base_text_string(cx).unwrap(),
8224 &[(
8225 0..0,
8226 "// the-deleted-contents\n",
8227 "",
8228 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8229 )],
8230 );
8231 });
8232}
8233
8234#[gpui::test]
8235async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8236 use DiffHunkSecondaryStatus::*;
8237 init_test(cx);
8238
8239 let committed_contents = r#"
8240 zero
8241 one
8242 two
8243 three
8244 four
8245 five
8246 "#
8247 .unindent();
8248 let file_contents = r#"
8249 one
8250 TWO
8251 three
8252 FOUR
8253 five
8254 "#
8255 .unindent();
8256
8257 let fs = FakeFs::new(cx.background_executor.clone());
8258 fs.insert_tree(
8259 "/dir",
8260 json!({
8261 ".git": {},
8262 "file.txt": file_contents.clone()
8263 }),
8264 )
8265 .await;
8266
8267 fs.set_head_and_index_for_repo(
8268 path!("/dir/.git").as_ref(),
8269 &[("file.txt", committed_contents.clone())],
8270 );
8271
8272 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8273
8274 let buffer = project
8275 .update(cx, |project, cx| {
8276 project.open_local_buffer("/dir/file.txt", cx)
8277 })
8278 .await
8279 .unwrap();
8280 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8281 let uncommitted_diff = project
8282 .update(cx, |project, cx| {
8283 project.open_uncommitted_diff(buffer.clone(), cx)
8284 })
8285 .await
8286 .unwrap();
8287 let mut diff_events = cx.events(&uncommitted_diff);
8288
8289 // The hunks are initially unstaged.
8290 uncommitted_diff.read_with(cx, |diff, cx| {
8291 assert_hunks(
8292 diff.snapshot(cx).hunks(&snapshot),
8293 &snapshot,
8294 &diff.base_text_string(cx).unwrap(),
8295 &[
8296 (
8297 0..0,
8298 "zero\n",
8299 "",
8300 DiffHunkStatus::deleted(HasSecondaryHunk),
8301 ),
8302 (
8303 1..2,
8304 "two\n",
8305 "TWO\n",
8306 DiffHunkStatus::modified(HasSecondaryHunk),
8307 ),
8308 (
8309 3..4,
8310 "four\n",
8311 "FOUR\n",
8312 DiffHunkStatus::modified(HasSecondaryHunk),
8313 ),
8314 ],
8315 );
8316 });
8317
8318 // Stage a hunk. It appears as optimistically staged.
8319 uncommitted_diff.update(cx, |diff, cx| {
8320 let range =
8321 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8322 let hunks = diff
8323 .snapshot(cx)
8324 .hunks_intersecting_range(range, &snapshot)
8325 .collect::<Vec<_>>();
8326 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8327
8328 assert_hunks(
8329 diff.snapshot(cx).hunks(&snapshot),
8330 &snapshot,
8331 &diff.base_text_string(cx).unwrap(),
8332 &[
8333 (
8334 0..0,
8335 "zero\n",
8336 "",
8337 DiffHunkStatus::deleted(HasSecondaryHunk),
8338 ),
8339 (
8340 1..2,
8341 "two\n",
8342 "TWO\n",
8343 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8344 ),
8345 (
8346 3..4,
8347 "four\n",
8348 "FOUR\n",
8349 DiffHunkStatus::modified(HasSecondaryHunk),
8350 ),
8351 ],
8352 );
8353 });
8354
8355 // The diff emits a change event for the range of the staged hunk.
8356 assert!(matches!(
8357 diff_events.next().await.unwrap(),
8358 BufferDiffEvent::HunksStagedOrUnstaged(_)
8359 ));
8360 let event = diff_events.next().await.unwrap();
8361 if let BufferDiffEvent::DiffChanged(DiffChanged {
8362 changed_range: Some(changed_range),
8363 base_text_changed_range: _,
8364 extended_range: _,
8365 }) = event
8366 {
8367 let changed_range = changed_range.to_point(&snapshot);
8368 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8369 } else {
8370 panic!("Unexpected event {event:?}");
8371 }
8372
8373 // When the write to the index completes, it appears as staged.
8374 cx.run_until_parked();
8375 uncommitted_diff.update(cx, |diff, cx| {
8376 assert_hunks(
8377 diff.snapshot(cx).hunks(&snapshot),
8378 &snapshot,
8379 &diff.base_text_string(cx).unwrap(),
8380 &[
8381 (
8382 0..0,
8383 "zero\n",
8384 "",
8385 DiffHunkStatus::deleted(HasSecondaryHunk),
8386 ),
8387 (
8388 1..2,
8389 "two\n",
8390 "TWO\n",
8391 DiffHunkStatus::modified(NoSecondaryHunk),
8392 ),
8393 (
8394 3..4,
8395 "four\n",
8396 "FOUR\n",
8397 DiffHunkStatus::modified(HasSecondaryHunk),
8398 ),
8399 ],
8400 );
8401 });
8402
8403 // The diff emits a change event for the changed index text.
8404 let event = diff_events.next().await.unwrap();
8405 if let BufferDiffEvent::DiffChanged(DiffChanged {
8406 changed_range: Some(changed_range),
8407 base_text_changed_range: _,
8408 extended_range: _,
8409 }) = event
8410 {
8411 let changed_range = changed_range.to_point(&snapshot);
8412 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8413 } else {
8414 panic!("Unexpected event {event:?}");
8415 }
8416
8417 // Simulate a problem writing to the git index.
8418 fs.set_error_message_for_index_write(
8419 "/dir/.git".as_ref(),
8420 Some("failed to write git index".into()),
8421 );
8422
8423 // Stage another hunk.
8424 uncommitted_diff.update(cx, |diff, cx| {
8425 let range =
8426 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8427 let hunks = diff
8428 .snapshot(cx)
8429 .hunks_intersecting_range(range, &snapshot)
8430 .collect::<Vec<_>>();
8431 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8432
8433 assert_hunks(
8434 diff.snapshot(cx).hunks(&snapshot),
8435 &snapshot,
8436 &diff.base_text_string(cx).unwrap(),
8437 &[
8438 (
8439 0..0,
8440 "zero\n",
8441 "",
8442 DiffHunkStatus::deleted(HasSecondaryHunk),
8443 ),
8444 (
8445 1..2,
8446 "two\n",
8447 "TWO\n",
8448 DiffHunkStatus::modified(NoSecondaryHunk),
8449 ),
8450 (
8451 3..4,
8452 "four\n",
8453 "FOUR\n",
8454 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8455 ),
8456 ],
8457 );
8458 });
8459 assert!(matches!(
8460 diff_events.next().await.unwrap(),
8461 BufferDiffEvent::HunksStagedOrUnstaged(_)
8462 ));
8463 let event = diff_events.next().await.unwrap();
8464 if let BufferDiffEvent::DiffChanged(DiffChanged {
8465 changed_range: Some(changed_range),
8466 base_text_changed_range: _,
8467 extended_range: _,
8468 }) = event
8469 {
8470 let changed_range = changed_range.to_point(&snapshot);
8471 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8472 } else {
8473 panic!("Unexpected event {event:?}");
8474 }
8475
8476 // When the write fails, the hunk returns to being unstaged.
8477 cx.run_until_parked();
8478 uncommitted_diff.update(cx, |diff, cx| {
8479 assert_hunks(
8480 diff.snapshot(cx).hunks(&snapshot),
8481 &snapshot,
8482 &diff.base_text_string(cx).unwrap(),
8483 &[
8484 (
8485 0..0,
8486 "zero\n",
8487 "",
8488 DiffHunkStatus::deleted(HasSecondaryHunk),
8489 ),
8490 (
8491 1..2,
8492 "two\n",
8493 "TWO\n",
8494 DiffHunkStatus::modified(NoSecondaryHunk),
8495 ),
8496 (
8497 3..4,
8498 "four\n",
8499 "FOUR\n",
8500 DiffHunkStatus::modified(HasSecondaryHunk),
8501 ),
8502 ],
8503 );
8504 });
8505
8506 let event = diff_events.next().await.unwrap();
8507 if let BufferDiffEvent::DiffChanged(DiffChanged {
8508 changed_range: Some(changed_range),
8509 base_text_changed_range: _,
8510 extended_range: _,
8511 }) = event
8512 {
8513 let changed_range = changed_range.to_point(&snapshot);
8514 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8515 } else {
8516 panic!("Unexpected event {event:?}");
8517 }
8518
8519 // Allow writing to the git index to succeed again.
8520 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8521
8522 // Stage two hunks with separate operations.
8523 uncommitted_diff.update(cx, |diff, cx| {
8524 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8525 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8526 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8527 });
8528
8529 // Both staged hunks appear as pending.
8530 uncommitted_diff.update(cx, |diff, cx| {
8531 assert_hunks(
8532 diff.snapshot(cx).hunks(&snapshot),
8533 &snapshot,
8534 &diff.base_text_string(cx).unwrap(),
8535 &[
8536 (
8537 0..0,
8538 "zero\n",
8539 "",
8540 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8541 ),
8542 (
8543 1..2,
8544 "two\n",
8545 "TWO\n",
8546 DiffHunkStatus::modified(NoSecondaryHunk),
8547 ),
8548 (
8549 3..4,
8550 "four\n",
8551 "FOUR\n",
8552 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8553 ),
8554 ],
8555 );
8556 });
8557
8558 // Both staging operations take effect.
8559 cx.run_until_parked();
8560 uncommitted_diff.update(cx, |diff, cx| {
8561 assert_hunks(
8562 diff.snapshot(cx).hunks(&snapshot),
8563 &snapshot,
8564 &diff.base_text_string(cx).unwrap(),
8565 &[
8566 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8567 (
8568 1..2,
8569 "two\n",
8570 "TWO\n",
8571 DiffHunkStatus::modified(NoSecondaryHunk),
8572 ),
8573 (
8574 3..4,
8575 "four\n",
8576 "FOUR\n",
8577 DiffHunkStatus::modified(NoSecondaryHunk),
8578 ),
8579 ],
8580 );
8581 });
8582}
8583
8584#[gpui::test(seeds(340, 472))]
8585async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8586 use DiffHunkSecondaryStatus::*;
8587 init_test(cx);
8588
8589 let committed_contents = r#"
8590 zero
8591 one
8592 two
8593 three
8594 four
8595 five
8596 "#
8597 .unindent();
8598 let file_contents = r#"
8599 one
8600 TWO
8601 three
8602 FOUR
8603 five
8604 "#
8605 .unindent();
8606
8607 let fs = FakeFs::new(cx.background_executor.clone());
8608 fs.insert_tree(
8609 "/dir",
8610 json!({
8611 ".git": {},
8612 "file.txt": file_contents.clone()
8613 }),
8614 )
8615 .await;
8616
8617 fs.set_head_for_repo(
8618 "/dir/.git".as_ref(),
8619 &[("file.txt", committed_contents.clone())],
8620 "deadbeef",
8621 );
8622 fs.set_index_for_repo(
8623 "/dir/.git".as_ref(),
8624 &[("file.txt", committed_contents.clone())],
8625 );
8626
8627 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8628
8629 let buffer = project
8630 .update(cx, |project, cx| {
8631 project.open_local_buffer("/dir/file.txt", cx)
8632 })
8633 .await
8634 .unwrap();
8635 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8636 let uncommitted_diff = project
8637 .update(cx, |project, cx| {
8638 project.open_uncommitted_diff(buffer.clone(), cx)
8639 })
8640 .await
8641 .unwrap();
8642
8643 // The hunks are initially unstaged.
8644 uncommitted_diff.read_with(cx, |diff, cx| {
8645 assert_hunks(
8646 diff.snapshot(cx).hunks(&snapshot),
8647 &snapshot,
8648 &diff.base_text_string(cx).unwrap(),
8649 &[
8650 (
8651 0..0,
8652 "zero\n",
8653 "",
8654 DiffHunkStatus::deleted(HasSecondaryHunk),
8655 ),
8656 (
8657 1..2,
8658 "two\n",
8659 "TWO\n",
8660 DiffHunkStatus::modified(HasSecondaryHunk),
8661 ),
8662 (
8663 3..4,
8664 "four\n",
8665 "FOUR\n",
8666 DiffHunkStatus::modified(HasSecondaryHunk),
8667 ),
8668 ],
8669 );
8670 });
8671
8672 // Pause IO events
8673 fs.pause_events();
8674
8675 // Stage the first hunk.
8676 uncommitted_diff.update(cx, |diff, cx| {
8677 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8678 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8679 assert_hunks(
8680 diff.snapshot(cx).hunks(&snapshot),
8681 &snapshot,
8682 &diff.base_text_string(cx).unwrap(),
8683 &[
8684 (
8685 0..0,
8686 "zero\n",
8687 "",
8688 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8689 ),
8690 (
8691 1..2,
8692 "two\n",
8693 "TWO\n",
8694 DiffHunkStatus::modified(HasSecondaryHunk),
8695 ),
8696 (
8697 3..4,
8698 "four\n",
8699 "FOUR\n",
8700 DiffHunkStatus::modified(HasSecondaryHunk),
8701 ),
8702 ],
8703 );
8704 });
8705
8706 // Stage the second hunk *before* receiving the FS event for the first hunk.
8707 cx.run_until_parked();
8708 uncommitted_diff.update(cx, |diff, cx| {
8709 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8710 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8711 assert_hunks(
8712 diff.snapshot(cx).hunks(&snapshot),
8713 &snapshot,
8714 &diff.base_text_string(cx).unwrap(),
8715 &[
8716 (
8717 0..0,
8718 "zero\n",
8719 "",
8720 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8721 ),
8722 (
8723 1..2,
8724 "two\n",
8725 "TWO\n",
8726 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8727 ),
8728 (
8729 3..4,
8730 "four\n",
8731 "FOUR\n",
8732 DiffHunkStatus::modified(HasSecondaryHunk),
8733 ),
8734 ],
8735 );
8736 });
8737
8738 // Process the FS event for staging the first hunk (second event is still pending).
8739 fs.flush_events(1);
8740 cx.run_until_parked();
8741
8742 // Stage the third hunk before receiving the second FS event.
8743 uncommitted_diff.update(cx, |diff, cx| {
8744 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8745 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8746 });
8747
8748 // Wait for all remaining IO.
8749 cx.run_until_parked();
8750 fs.flush_events(fs.buffered_event_count());
8751
8752 // Now all hunks are staged.
8753 cx.run_until_parked();
8754 uncommitted_diff.update(cx, |diff, cx| {
8755 assert_hunks(
8756 diff.snapshot(cx).hunks(&snapshot),
8757 &snapshot,
8758 &diff.base_text_string(cx).unwrap(),
8759 &[
8760 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8761 (
8762 1..2,
8763 "two\n",
8764 "TWO\n",
8765 DiffHunkStatus::modified(NoSecondaryHunk),
8766 ),
8767 (
8768 3..4,
8769 "four\n",
8770 "FOUR\n",
8771 DiffHunkStatus::modified(NoSecondaryHunk),
8772 ),
8773 ],
8774 );
8775 });
8776}
8777
8778#[gpui::test(iterations = 25)]
8779async fn test_staging_random_hunks(
8780 mut rng: StdRng,
8781 _executor: BackgroundExecutor,
8782 cx: &mut gpui::TestAppContext,
8783) {
8784 let operations = env::var("OPERATIONS")
8785 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8786 .unwrap_or(20);
8787
8788 use DiffHunkSecondaryStatus::*;
8789 init_test(cx);
8790
8791 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8792 let index_text = committed_text.clone();
8793 let buffer_text = (0..30)
8794 .map(|i| match i % 5 {
8795 0 => format!("line {i} (modified)\n"),
8796 _ => format!("line {i}\n"),
8797 })
8798 .collect::<String>();
8799
8800 let fs = FakeFs::new(cx.background_executor.clone());
8801 fs.insert_tree(
8802 path!("/dir"),
8803 json!({
8804 ".git": {},
8805 "file.txt": buffer_text.clone()
8806 }),
8807 )
8808 .await;
8809 fs.set_head_for_repo(
8810 path!("/dir/.git").as_ref(),
8811 &[("file.txt", committed_text.clone())],
8812 "deadbeef",
8813 );
8814 fs.set_index_for_repo(
8815 path!("/dir/.git").as_ref(),
8816 &[("file.txt", index_text.clone())],
8817 );
8818 let repo = fs
8819 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8820 .unwrap();
8821
8822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8823 let buffer = project
8824 .update(cx, |project, cx| {
8825 project.open_local_buffer(path!("/dir/file.txt"), cx)
8826 })
8827 .await
8828 .unwrap();
8829 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8830 let uncommitted_diff = project
8831 .update(cx, |project, cx| {
8832 project.open_uncommitted_diff(buffer.clone(), cx)
8833 })
8834 .await
8835 .unwrap();
8836
8837 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8838 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8839 });
8840 assert_eq!(hunks.len(), 6);
8841
8842 for _i in 0..operations {
8843 let hunk_ix = rng.random_range(0..hunks.len());
8844 let hunk = &mut hunks[hunk_ix];
8845 let row = hunk.range.start.row;
8846
8847 if hunk.status().has_secondary_hunk() {
8848 log::info!("staging hunk at {row}");
8849 uncommitted_diff.update(cx, |diff, cx| {
8850 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8851 });
8852 hunk.secondary_status = SecondaryHunkRemovalPending;
8853 } else {
8854 log::info!("unstaging hunk at {row}");
8855 uncommitted_diff.update(cx, |diff, cx| {
8856 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8857 });
8858 hunk.secondary_status = SecondaryHunkAdditionPending;
8859 }
8860
8861 for _ in 0..rng.random_range(0..10) {
8862 log::info!("yielding");
8863 cx.executor().simulate_random_delay().await;
8864 }
8865 }
8866
8867 cx.executor().run_until_parked();
8868
8869 for hunk in &mut hunks {
8870 if hunk.secondary_status == SecondaryHunkRemovalPending {
8871 hunk.secondary_status = NoSecondaryHunk;
8872 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8873 hunk.secondary_status = HasSecondaryHunk;
8874 }
8875 }
8876
8877 log::info!(
8878 "index text:\n{}",
8879 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8880 .await
8881 .unwrap()
8882 );
8883
8884 uncommitted_diff.update(cx, |diff, cx| {
8885 let expected_hunks = hunks
8886 .iter()
8887 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8888 .collect::<Vec<_>>();
8889 let actual_hunks = diff
8890 .snapshot(cx)
8891 .hunks(&snapshot)
8892 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8893 .collect::<Vec<_>>();
8894 assert_eq!(actual_hunks, expected_hunks);
8895 });
8896}
8897
8898#[gpui::test]
8899async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8900 init_test(cx);
8901
8902 let committed_contents = r#"
8903 fn main() {
8904 println!("hello from HEAD");
8905 }
8906 "#
8907 .unindent();
8908 let file_contents = r#"
8909 fn main() {
8910 println!("hello from the working copy");
8911 }
8912 "#
8913 .unindent();
8914
8915 let fs = FakeFs::new(cx.background_executor.clone());
8916 fs.insert_tree(
8917 "/dir",
8918 json!({
8919 ".git": {},
8920 "src": {
8921 "main.rs": file_contents,
8922 }
8923 }),
8924 )
8925 .await;
8926
8927 fs.set_head_for_repo(
8928 Path::new("/dir/.git"),
8929 &[("src/main.rs", committed_contents.clone())],
8930 "deadbeef",
8931 );
8932 fs.set_index_for_repo(
8933 Path::new("/dir/.git"),
8934 &[("src/main.rs", committed_contents.clone())],
8935 );
8936
8937 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8938
8939 let buffer = project
8940 .update(cx, |project, cx| {
8941 project.open_local_buffer("/dir/src/main.rs", cx)
8942 })
8943 .await
8944 .unwrap();
8945 let uncommitted_diff = project
8946 .update(cx, |project, cx| {
8947 project.open_uncommitted_diff(buffer.clone(), cx)
8948 })
8949 .await
8950 .unwrap();
8951
8952 cx.run_until_parked();
8953 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8954 let snapshot = buffer.read(cx).snapshot();
8955 assert_hunks(
8956 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8957 &snapshot,
8958 &uncommitted_diff.base_text_string(cx).unwrap(),
8959 &[(
8960 1..2,
8961 " println!(\"hello from HEAD\");\n",
8962 " println!(\"hello from the working copy\");\n",
8963 DiffHunkStatus {
8964 kind: DiffHunkStatusKind::Modified,
8965 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8966 },
8967 )],
8968 );
8969 });
8970}
8971
8972// TODO: Should we test this on Windows also?
8973#[gpui::test]
8974#[cfg(not(windows))]
8975async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8976 use std::os::unix::fs::PermissionsExt;
8977 init_test(cx);
8978 cx.executor().allow_parking();
8979 let committed_contents = "bar\n";
8980 let file_contents = "baz\n";
8981 let root = TempTree::new(json!({
8982 "project": {
8983 "foo": committed_contents
8984 },
8985 }));
8986
8987 let work_dir = root.path().join("project");
8988 let file_path = work_dir.join("foo");
8989 let repo = git_init(work_dir.as_path());
8990 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8991 perms.set_mode(0o755);
8992 std::fs::set_permissions(&file_path, perms).unwrap();
8993 git_add("foo", &repo);
8994 git_commit("Initial commit", &repo);
8995 std::fs::write(&file_path, file_contents).unwrap();
8996
8997 let project = Project::test(
8998 Arc::new(RealFs::new(None, cx.executor())),
8999 [root.path()],
9000 cx,
9001 )
9002 .await;
9003
9004 let buffer = project
9005 .update(cx, |project, cx| {
9006 project.open_local_buffer(file_path.as_path(), cx)
9007 })
9008 .await
9009 .unwrap();
9010
9011 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9012
9013 let uncommitted_diff = project
9014 .update(cx, |project, cx| {
9015 project.open_uncommitted_diff(buffer.clone(), cx)
9016 })
9017 .await
9018 .unwrap();
9019
9020 uncommitted_diff.update(cx, |diff, cx| {
9021 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9022 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9023 });
9024
9025 cx.run_until_parked();
9026
9027 let output = smol::process::Command::new("git")
9028 .current_dir(&work_dir)
9029 .args(["diff", "--staged"])
9030 .output()
9031 .await
9032 .unwrap();
9033
9034 let staged_diff = String::from_utf8_lossy(&output.stdout);
9035
9036 assert!(
9037 !staged_diff.contains("new mode 100644"),
9038 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9039 staged_diff
9040 );
9041
9042 let output = smol::process::Command::new("git")
9043 .current_dir(&work_dir)
9044 .args(["ls-files", "-s"])
9045 .output()
9046 .await
9047 .unwrap();
9048 let index_contents = String::from_utf8_lossy(&output.stdout);
9049
9050 assert!(
9051 index_contents.contains("100755"),
9052 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9053 index_contents
9054 );
9055}
9056
9057#[gpui::test]
9058async fn test_repository_and_path_for_project_path(
9059 background_executor: BackgroundExecutor,
9060 cx: &mut gpui::TestAppContext,
9061) {
9062 init_test(cx);
9063 let fs = FakeFs::new(background_executor);
9064 fs.insert_tree(
9065 path!("/root"),
9066 json!({
9067 "c.txt": "",
9068 "dir1": {
9069 ".git": {},
9070 "deps": {
9071 "dep1": {
9072 ".git": {},
9073 "src": {
9074 "a.txt": ""
9075 }
9076 }
9077 },
9078 "src": {
9079 "b.txt": ""
9080 }
9081 },
9082 }),
9083 )
9084 .await;
9085
9086 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9087 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9088 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9089 project
9090 .update(cx, |project, cx| project.git_scans_complete(cx))
9091 .await;
9092 cx.run_until_parked();
9093
9094 project.read_with(cx, |project, cx| {
9095 let git_store = project.git_store().read(cx);
9096 let pairs = [
9097 ("c.txt", None),
9098 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9099 (
9100 "dir1/deps/dep1/src/a.txt",
9101 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9102 ),
9103 ];
9104 let expected = pairs
9105 .iter()
9106 .map(|(path, result)| {
9107 (
9108 path,
9109 result.map(|(repo, repo_path)| {
9110 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9111 }),
9112 )
9113 })
9114 .collect::<Vec<_>>();
9115 let actual = pairs
9116 .iter()
9117 .map(|(path, _)| {
9118 let project_path = (tree_id, rel_path(path)).into();
9119 let result = maybe!({
9120 let (repo, repo_path) =
9121 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9122 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9123 });
9124 (path, result)
9125 })
9126 .collect::<Vec<_>>();
9127 pretty_assertions::assert_eq!(expected, actual);
9128 });
9129
9130 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9131 .await
9132 .unwrap();
9133 cx.run_until_parked();
9134
9135 project.read_with(cx, |project, cx| {
9136 let git_store = project.git_store().read(cx);
9137 assert_eq!(
9138 git_store.repository_and_path_for_project_path(
9139 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9140 cx
9141 ),
9142 None
9143 );
9144 });
9145}
9146
9147#[gpui::test]
9148async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9149 init_test(cx);
9150 let fs = FakeFs::new(cx.background_executor.clone());
9151 let home = paths::home_dir();
9152 fs.insert_tree(
9153 home,
9154 json!({
9155 ".git": {},
9156 "project": {
9157 "a.txt": "A"
9158 },
9159 }),
9160 )
9161 .await;
9162
9163 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9164 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9165 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9166
9167 project
9168 .update(cx, |project, cx| project.git_scans_complete(cx))
9169 .await;
9170 tree.flush_fs_events(cx).await;
9171
9172 project.read_with(cx, |project, cx| {
9173 let containing = project
9174 .git_store()
9175 .read(cx)
9176 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9177 assert!(containing.is_none());
9178 });
9179
9180 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9181 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9182 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9183 project
9184 .update(cx, |project, cx| project.git_scans_complete(cx))
9185 .await;
9186 tree.flush_fs_events(cx).await;
9187
9188 project.read_with(cx, |project, cx| {
9189 let containing = project
9190 .git_store()
9191 .read(cx)
9192 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9193 assert_eq!(
9194 containing
9195 .unwrap()
9196 .0
9197 .read(cx)
9198 .work_directory_abs_path
9199 .as_ref(),
9200 home,
9201 );
9202 });
9203}
9204
9205#[gpui::test]
9206async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9207 init_test(cx);
9208 cx.executor().allow_parking();
9209
9210 let root = TempTree::new(json!({
9211 "project": {
9212 "a.txt": "a", // Modified
9213 "b.txt": "bb", // Added
9214 "c.txt": "ccc", // Unchanged
9215 "d.txt": "dddd", // Deleted
9216 },
9217 }));
9218
9219 // Set up git repository before creating the project.
9220 let work_dir = root.path().join("project");
9221 let repo = git_init(work_dir.as_path());
9222 git_add("a.txt", &repo);
9223 git_add("c.txt", &repo);
9224 git_add("d.txt", &repo);
9225 git_commit("Initial commit", &repo);
9226 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9227 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9228
9229 let project = Project::test(
9230 Arc::new(RealFs::new(None, cx.executor())),
9231 [root.path()],
9232 cx,
9233 )
9234 .await;
9235
9236 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9237 tree.flush_fs_events(cx).await;
9238 project
9239 .update(cx, |project, cx| project.git_scans_complete(cx))
9240 .await;
9241 cx.executor().run_until_parked();
9242
9243 let repository = project.read_with(cx, |project, cx| {
9244 project.repositories(cx).values().next().unwrap().clone()
9245 });
9246
9247 // Check that the right git state is observed on startup
9248 repository.read_with(cx, |repository, _| {
9249 let entries = repository.cached_status().collect::<Vec<_>>();
9250 assert_eq!(
9251 entries,
9252 [
9253 StatusEntry {
9254 repo_path: repo_path("a.txt"),
9255 status: StatusCode::Modified.worktree(),
9256 },
9257 StatusEntry {
9258 repo_path: repo_path("b.txt"),
9259 status: FileStatus::Untracked,
9260 },
9261 StatusEntry {
9262 repo_path: repo_path("d.txt"),
9263 status: StatusCode::Deleted.worktree(),
9264 },
9265 ]
9266 );
9267 });
9268
9269 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9270
9271 tree.flush_fs_events(cx).await;
9272 project
9273 .update(cx, |project, cx| project.git_scans_complete(cx))
9274 .await;
9275 cx.executor().run_until_parked();
9276
9277 repository.read_with(cx, |repository, _| {
9278 let entries = repository.cached_status().collect::<Vec<_>>();
9279 assert_eq!(
9280 entries,
9281 [
9282 StatusEntry {
9283 repo_path: repo_path("a.txt"),
9284 status: StatusCode::Modified.worktree(),
9285 },
9286 StatusEntry {
9287 repo_path: repo_path("b.txt"),
9288 status: FileStatus::Untracked,
9289 },
9290 StatusEntry {
9291 repo_path: repo_path("c.txt"),
9292 status: StatusCode::Modified.worktree(),
9293 },
9294 StatusEntry {
9295 repo_path: repo_path("d.txt"),
9296 status: StatusCode::Deleted.worktree(),
9297 },
9298 ]
9299 );
9300 });
9301
9302 git_add("a.txt", &repo);
9303 git_add("c.txt", &repo);
9304 git_remove_index(Path::new("d.txt"), &repo);
9305 git_commit("Another commit", &repo);
9306 tree.flush_fs_events(cx).await;
9307 project
9308 .update(cx, |project, cx| project.git_scans_complete(cx))
9309 .await;
9310 cx.executor().run_until_parked();
9311
9312 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9313 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9314 tree.flush_fs_events(cx).await;
9315 project
9316 .update(cx, |project, cx| project.git_scans_complete(cx))
9317 .await;
9318 cx.executor().run_until_parked();
9319
9320 repository.read_with(cx, |repository, _cx| {
9321 let entries = repository.cached_status().collect::<Vec<_>>();
9322
9323 // Deleting an untracked entry, b.txt, should leave no status
9324 // a.txt was tracked, and so should have a status
9325 assert_eq!(
9326 entries,
9327 [StatusEntry {
9328 repo_path: repo_path("a.txt"),
9329 status: StatusCode::Deleted.worktree(),
9330 }]
9331 );
9332 });
9333}
9334
9335#[gpui::test]
9336#[ignore]
9337async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9338 init_test(cx);
9339 cx.executor().allow_parking();
9340
9341 let root = TempTree::new(json!({
9342 "project": {
9343 "sub": {},
9344 "a.txt": "",
9345 },
9346 }));
9347
9348 let work_dir = root.path().join("project");
9349 let repo = git_init(work_dir.as_path());
9350 // a.txt exists in HEAD and the working copy but is deleted in the index.
9351 git_add("a.txt", &repo);
9352 git_commit("Initial commit", &repo);
9353 git_remove_index("a.txt".as_ref(), &repo);
9354 // `sub` is a nested git repository.
9355 let _sub = git_init(&work_dir.join("sub"));
9356
9357 let project = Project::test(
9358 Arc::new(RealFs::new(None, cx.executor())),
9359 [root.path()],
9360 cx,
9361 )
9362 .await;
9363
9364 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9365 tree.flush_fs_events(cx).await;
9366 project
9367 .update(cx, |project, cx| project.git_scans_complete(cx))
9368 .await;
9369 cx.executor().run_until_parked();
9370
9371 let repository = project.read_with(cx, |project, cx| {
9372 project
9373 .repositories(cx)
9374 .values()
9375 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9376 .unwrap()
9377 .clone()
9378 });
9379
9380 repository.read_with(cx, |repository, _cx| {
9381 let entries = repository.cached_status().collect::<Vec<_>>();
9382
9383 // `sub` doesn't appear in our computed statuses.
9384 // a.txt appears with a combined `DA` status.
9385 assert_eq!(
9386 entries,
9387 [StatusEntry {
9388 repo_path: repo_path("a.txt"),
9389 status: TrackedStatus {
9390 index_status: StatusCode::Deleted,
9391 worktree_status: StatusCode::Added
9392 }
9393 .into(),
9394 }]
9395 )
9396 });
9397}
9398
9399#[track_caller]
9400/// We merge lhs into rhs.
9401fn merge_pending_ops_snapshots(
9402 source: Vec<pending_op::PendingOps>,
9403 mut target: Vec<pending_op::PendingOps>,
9404) -> Vec<pending_op::PendingOps> {
9405 for s_ops in source {
9406 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9407 if ops.repo_path == s_ops.repo_path {
9408 Some(idx)
9409 } else {
9410 None
9411 }
9412 }) {
9413 let t_ops = &mut target[idx];
9414 for s_op in s_ops.ops {
9415 if let Some(op_idx) = t_ops
9416 .ops
9417 .iter()
9418 .zip(0..)
9419 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9420 {
9421 let t_op = &mut t_ops.ops[op_idx];
9422 match (s_op.job_status, t_op.job_status) {
9423 (pending_op::JobStatus::Running, _) => {}
9424 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9425 (s_st, t_st) if s_st == t_st => {}
9426 _ => unreachable!(),
9427 }
9428 } else {
9429 t_ops.ops.push(s_op);
9430 }
9431 }
9432 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9433 } else {
9434 target.push(s_ops);
9435 }
9436 }
9437 target
9438}
9439
9440#[gpui::test]
9441async fn test_repository_pending_ops_staging(
9442 executor: gpui::BackgroundExecutor,
9443 cx: &mut gpui::TestAppContext,
9444) {
9445 init_test(cx);
9446
9447 let fs = FakeFs::new(executor);
9448 fs.insert_tree(
9449 path!("/root"),
9450 json!({
9451 "my-repo": {
9452 ".git": {},
9453 "a.txt": "a",
9454 }
9455
9456 }),
9457 )
9458 .await;
9459
9460 fs.set_status_for_repo(
9461 path!("/root/my-repo/.git").as_ref(),
9462 &[("a.txt", FileStatus::Untracked)],
9463 );
9464
9465 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9466 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9467 project.update(cx, |project, cx| {
9468 let pending_ops_all = pending_ops_all.clone();
9469 cx.subscribe(project.git_store(), move |_, _, e, _| {
9470 if let GitStoreEvent::RepositoryUpdated(
9471 _,
9472 RepositoryEvent::PendingOpsChanged { pending_ops },
9473 _,
9474 ) = e
9475 {
9476 let merged = merge_pending_ops_snapshots(
9477 pending_ops.items(()),
9478 pending_ops_all.lock().items(()),
9479 );
9480 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9481 }
9482 })
9483 .detach();
9484 });
9485 project
9486 .update(cx, |project, cx| project.git_scans_complete(cx))
9487 .await;
9488
9489 let repo = project.read_with(cx, |project, cx| {
9490 project.repositories(cx).values().next().unwrap().clone()
9491 });
9492
9493 // Ensure we have no pending ops for any of the untracked files
9494 repo.read_with(cx, |repo, _cx| {
9495 assert!(repo.pending_ops().next().is_none());
9496 });
9497
9498 let mut id = 1u16;
9499
9500 let mut assert_stage = async |path: RepoPath, stage| {
9501 let git_status = if stage {
9502 pending_op::GitStatus::Staged
9503 } else {
9504 pending_op::GitStatus::Unstaged
9505 };
9506 repo.update(cx, |repo, cx| {
9507 let task = if stage {
9508 repo.stage_entries(vec![path.clone()], cx)
9509 } else {
9510 repo.unstage_entries(vec![path.clone()], cx)
9511 };
9512 let ops = repo.pending_ops_for_path(&path).unwrap();
9513 assert_eq!(
9514 ops.ops.last(),
9515 Some(&pending_op::PendingOp {
9516 id: id.into(),
9517 git_status,
9518 job_status: pending_op::JobStatus::Running
9519 })
9520 );
9521 task
9522 })
9523 .await
9524 .unwrap();
9525
9526 repo.read_with(cx, |repo, _cx| {
9527 let ops = repo.pending_ops_for_path(&path).unwrap();
9528 assert_eq!(
9529 ops.ops.last(),
9530 Some(&pending_op::PendingOp {
9531 id: id.into(),
9532 git_status,
9533 job_status: pending_op::JobStatus::Finished
9534 })
9535 );
9536 });
9537
9538 id += 1;
9539 };
9540
9541 assert_stage(repo_path("a.txt"), true).await;
9542 assert_stage(repo_path("a.txt"), false).await;
9543 assert_stage(repo_path("a.txt"), true).await;
9544 assert_stage(repo_path("a.txt"), false).await;
9545 assert_stage(repo_path("a.txt"), true).await;
9546
9547 cx.run_until_parked();
9548
9549 assert_eq!(
9550 pending_ops_all
9551 .lock()
9552 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9553 .unwrap()
9554 .ops,
9555 vec![
9556 pending_op::PendingOp {
9557 id: 1u16.into(),
9558 git_status: pending_op::GitStatus::Staged,
9559 job_status: pending_op::JobStatus::Finished
9560 },
9561 pending_op::PendingOp {
9562 id: 2u16.into(),
9563 git_status: pending_op::GitStatus::Unstaged,
9564 job_status: pending_op::JobStatus::Finished
9565 },
9566 pending_op::PendingOp {
9567 id: 3u16.into(),
9568 git_status: pending_op::GitStatus::Staged,
9569 job_status: pending_op::JobStatus::Finished
9570 },
9571 pending_op::PendingOp {
9572 id: 4u16.into(),
9573 git_status: pending_op::GitStatus::Unstaged,
9574 job_status: pending_op::JobStatus::Finished
9575 },
9576 pending_op::PendingOp {
9577 id: 5u16.into(),
9578 git_status: pending_op::GitStatus::Staged,
9579 job_status: pending_op::JobStatus::Finished
9580 }
9581 ],
9582 );
9583
9584 repo.update(cx, |repo, _cx| {
9585 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9586
9587 assert_eq!(
9588 git_statuses,
9589 [StatusEntry {
9590 repo_path: repo_path("a.txt"),
9591 status: TrackedStatus {
9592 index_status: StatusCode::Added,
9593 worktree_status: StatusCode::Unmodified
9594 }
9595 .into(),
9596 }]
9597 );
9598 });
9599}
9600
9601#[gpui::test]
9602async fn test_repository_pending_ops_long_running_staging(
9603 executor: gpui::BackgroundExecutor,
9604 cx: &mut gpui::TestAppContext,
9605) {
9606 init_test(cx);
9607
9608 let fs = FakeFs::new(executor);
9609 fs.insert_tree(
9610 path!("/root"),
9611 json!({
9612 "my-repo": {
9613 ".git": {},
9614 "a.txt": "a",
9615 }
9616
9617 }),
9618 )
9619 .await;
9620
9621 fs.set_status_for_repo(
9622 path!("/root/my-repo/.git").as_ref(),
9623 &[("a.txt", FileStatus::Untracked)],
9624 );
9625
9626 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9627 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9628 project.update(cx, |project, cx| {
9629 let pending_ops_all = pending_ops_all.clone();
9630 cx.subscribe(project.git_store(), move |_, _, e, _| {
9631 if let GitStoreEvent::RepositoryUpdated(
9632 _,
9633 RepositoryEvent::PendingOpsChanged { pending_ops },
9634 _,
9635 ) = e
9636 {
9637 let merged = merge_pending_ops_snapshots(
9638 pending_ops.items(()),
9639 pending_ops_all.lock().items(()),
9640 );
9641 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9642 }
9643 })
9644 .detach();
9645 });
9646
9647 project
9648 .update(cx, |project, cx| project.git_scans_complete(cx))
9649 .await;
9650
9651 let repo = project.read_with(cx, |project, cx| {
9652 project.repositories(cx).values().next().unwrap().clone()
9653 });
9654
9655 repo.update(cx, |repo, cx| {
9656 repo.stage_entries(vec![repo_path("a.txt")], cx)
9657 })
9658 .detach();
9659
9660 repo.update(cx, |repo, cx| {
9661 repo.stage_entries(vec![repo_path("a.txt")], cx)
9662 })
9663 .unwrap()
9664 .with_timeout(Duration::from_secs(1), &cx.executor())
9665 .await
9666 .unwrap();
9667
9668 cx.run_until_parked();
9669
9670 assert_eq!(
9671 pending_ops_all
9672 .lock()
9673 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9674 .unwrap()
9675 .ops,
9676 vec![
9677 pending_op::PendingOp {
9678 id: 1u16.into(),
9679 git_status: pending_op::GitStatus::Staged,
9680 job_status: pending_op::JobStatus::Skipped
9681 },
9682 pending_op::PendingOp {
9683 id: 2u16.into(),
9684 git_status: pending_op::GitStatus::Staged,
9685 job_status: pending_op::JobStatus::Finished
9686 }
9687 ],
9688 );
9689
9690 repo.update(cx, |repo, _cx| {
9691 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9692
9693 assert_eq!(
9694 git_statuses,
9695 [StatusEntry {
9696 repo_path: repo_path("a.txt"),
9697 status: TrackedStatus {
9698 index_status: StatusCode::Added,
9699 worktree_status: StatusCode::Unmodified
9700 }
9701 .into(),
9702 }]
9703 );
9704 });
9705}
9706
9707#[gpui::test]
9708async fn test_repository_pending_ops_stage_all(
9709 executor: gpui::BackgroundExecutor,
9710 cx: &mut gpui::TestAppContext,
9711) {
9712 init_test(cx);
9713
9714 let fs = FakeFs::new(executor);
9715 fs.insert_tree(
9716 path!("/root"),
9717 json!({
9718 "my-repo": {
9719 ".git": {},
9720 "a.txt": "a",
9721 "b.txt": "b"
9722 }
9723
9724 }),
9725 )
9726 .await;
9727
9728 fs.set_status_for_repo(
9729 path!("/root/my-repo/.git").as_ref(),
9730 &[
9731 ("a.txt", FileStatus::Untracked),
9732 ("b.txt", FileStatus::Untracked),
9733 ],
9734 );
9735
9736 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9737 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9738 project.update(cx, |project, cx| {
9739 let pending_ops_all = pending_ops_all.clone();
9740 cx.subscribe(project.git_store(), move |_, _, e, _| {
9741 if let GitStoreEvent::RepositoryUpdated(
9742 _,
9743 RepositoryEvent::PendingOpsChanged { pending_ops },
9744 _,
9745 ) = e
9746 {
9747 let merged = merge_pending_ops_snapshots(
9748 pending_ops.items(()),
9749 pending_ops_all.lock().items(()),
9750 );
9751 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9752 }
9753 })
9754 .detach();
9755 });
9756 project
9757 .update(cx, |project, cx| project.git_scans_complete(cx))
9758 .await;
9759
9760 let repo = project.read_with(cx, |project, cx| {
9761 project.repositories(cx).values().next().unwrap().clone()
9762 });
9763
9764 repo.update(cx, |repo, cx| {
9765 repo.stage_entries(vec![repo_path("a.txt")], cx)
9766 })
9767 .await
9768 .unwrap();
9769 repo.update(cx, |repo, cx| repo.stage_all(cx))
9770 .await
9771 .unwrap();
9772 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9773 .await
9774 .unwrap();
9775
9776 cx.run_until_parked();
9777
9778 assert_eq!(
9779 pending_ops_all
9780 .lock()
9781 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9782 .unwrap()
9783 .ops,
9784 vec![
9785 pending_op::PendingOp {
9786 id: 1u16.into(),
9787 git_status: pending_op::GitStatus::Staged,
9788 job_status: pending_op::JobStatus::Finished
9789 },
9790 pending_op::PendingOp {
9791 id: 2u16.into(),
9792 git_status: pending_op::GitStatus::Unstaged,
9793 job_status: pending_op::JobStatus::Finished
9794 },
9795 ],
9796 );
9797 assert_eq!(
9798 pending_ops_all
9799 .lock()
9800 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9801 .unwrap()
9802 .ops,
9803 vec![
9804 pending_op::PendingOp {
9805 id: 1u16.into(),
9806 git_status: pending_op::GitStatus::Staged,
9807 job_status: pending_op::JobStatus::Finished
9808 },
9809 pending_op::PendingOp {
9810 id: 2u16.into(),
9811 git_status: pending_op::GitStatus::Unstaged,
9812 job_status: pending_op::JobStatus::Finished
9813 },
9814 ],
9815 );
9816
9817 repo.update(cx, |repo, _cx| {
9818 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9819
9820 assert_eq!(
9821 git_statuses,
9822 [
9823 StatusEntry {
9824 repo_path: repo_path("a.txt"),
9825 status: FileStatus::Untracked,
9826 },
9827 StatusEntry {
9828 repo_path: repo_path("b.txt"),
9829 status: FileStatus::Untracked,
9830 },
9831 ]
9832 );
9833 });
9834}
9835
9836#[gpui::test]
9837async fn test_repository_subfolder_git_status(
9838 executor: gpui::BackgroundExecutor,
9839 cx: &mut gpui::TestAppContext,
9840) {
9841 init_test(cx);
9842
9843 let fs = FakeFs::new(executor);
9844 fs.insert_tree(
9845 path!("/root"),
9846 json!({
9847 "my-repo": {
9848 ".git": {},
9849 "a.txt": "a",
9850 "sub-folder-1": {
9851 "sub-folder-2": {
9852 "c.txt": "cc",
9853 "d": {
9854 "e.txt": "eee"
9855 }
9856 },
9857 }
9858 },
9859 }),
9860 )
9861 .await;
9862
9863 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9864 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9865
9866 fs.set_status_for_repo(
9867 path!("/root/my-repo/.git").as_ref(),
9868 &[(E_TXT, FileStatus::Untracked)],
9869 );
9870
9871 let project = Project::test(
9872 fs.clone(),
9873 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9874 cx,
9875 )
9876 .await;
9877
9878 project
9879 .update(cx, |project, cx| project.git_scans_complete(cx))
9880 .await;
9881 cx.run_until_parked();
9882
9883 let repository = project.read_with(cx, |project, cx| {
9884 project.repositories(cx).values().next().unwrap().clone()
9885 });
9886
9887 // Ensure that the git status is loaded correctly
9888 repository.read_with(cx, |repository, _cx| {
9889 assert_eq!(
9890 repository.work_directory_abs_path,
9891 Path::new(path!("/root/my-repo")).into()
9892 );
9893
9894 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9895 assert_eq!(
9896 repository
9897 .status_for_path(&repo_path(E_TXT))
9898 .unwrap()
9899 .status,
9900 FileStatus::Untracked
9901 );
9902 });
9903
9904 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9905 project
9906 .update(cx, |project, cx| project.git_scans_complete(cx))
9907 .await;
9908 cx.run_until_parked();
9909
9910 repository.read_with(cx, |repository, _cx| {
9911 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9912 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9913 });
9914}
9915
9916// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9917#[cfg(any())]
9918#[gpui::test]
9919async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9920 init_test(cx);
9921 cx.executor().allow_parking();
9922
9923 let root = TempTree::new(json!({
9924 "project": {
9925 "a.txt": "a",
9926 },
9927 }));
9928 let root_path = root.path();
9929
9930 let repo = git_init(&root_path.join("project"));
9931 git_add("a.txt", &repo);
9932 git_commit("init", &repo);
9933
9934 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9935
9936 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9937 tree.flush_fs_events(cx).await;
9938 project
9939 .update(cx, |project, cx| project.git_scans_complete(cx))
9940 .await;
9941 cx.executor().run_until_parked();
9942
9943 let repository = project.read_with(cx, |project, cx| {
9944 project.repositories(cx).values().next().unwrap().clone()
9945 });
9946
9947 git_branch("other-branch", &repo);
9948 git_checkout("refs/heads/other-branch", &repo);
9949 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9950 git_add("a.txt", &repo);
9951 git_commit("capitalize", &repo);
9952 let commit = repo
9953 .head()
9954 .expect("Failed to get HEAD")
9955 .peel_to_commit()
9956 .expect("HEAD is not a commit");
9957 git_checkout("refs/heads/main", &repo);
9958 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9959 git_add("a.txt", &repo);
9960 git_commit("improve letter", &repo);
9961 git_cherry_pick(&commit, &repo);
9962 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9963 .expect("No CHERRY_PICK_HEAD");
9964 pretty_assertions::assert_eq!(
9965 git_status(&repo),
9966 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9967 );
9968 tree.flush_fs_events(cx).await;
9969 project
9970 .update(cx, |project, cx| project.git_scans_complete(cx))
9971 .await;
9972 cx.executor().run_until_parked();
9973 let conflicts = repository.update(cx, |repository, _| {
9974 repository
9975 .merge_conflicts
9976 .iter()
9977 .cloned()
9978 .collect::<Vec<_>>()
9979 });
9980 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9981
9982 git_add("a.txt", &repo);
9983 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9984 git_commit("whatevs", &repo);
9985 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9986 .expect("Failed to remove CHERRY_PICK_HEAD");
9987 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9988 tree.flush_fs_events(cx).await;
9989 let conflicts = repository.update(cx, |repository, _| {
9990 repository
9991 .merge_conflicts
9992 .iter()
9993 .cloned()
9994 .collect::<Vec<_>>()
9995 });
9996 pretty_assertions::assert_eq!(conflicts, []);
9997}
9998
9999#[gpui::test]
10000async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10001 init_test(cx);
10002 let fs = FakeFs::new(cx.background_executor.clone());
10003 fs.insert_tree(
10004 path!("/root"),
10005 json!({
10006 ".git": {},
10007 ".gitignore": "*.txt\n",
10008 "a.xml": "<a></a>",
10009 "b.txt": "Some text"
10010 }),
10011 )
10012 .await;
10013
10014 fs.set_head_and_index_for_repo(
10015 path!("/root/.git").as_ref(),
10016 &[
10017 (".gitignore", "*.txt\n".into()),
10018 ("a.xml", "<a></a>".into()),
10019 ],
10020 );
10021
10022 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10023
10024 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10025 tree.flush_fs_events(cx).await;
10026 project
10027 .update(cx, |project, cx| project.git_scans_complete(cx))
10028 .await;
10029 cx.executor().run_until_parked();
10030
10031 let repository = project.read_with(cx, |project, cx| {
10032 project.repositories(cx).values().next().unwrap().clone()
10033 });
10034
10035 // One file is unmodified, the other is ignored.
10036 cx.read(|cx| {
10037 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10038 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10039 });
10040
10041 // Change the gitignore, and stage the newly non-ignored file.
10042 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10043 .await
10044 .unwrap();
10045 fs.set_index_for_repo(
10046 Path::new(path!("/root/.git")),
10047 &[
10048 (".gitignore", "*.txt\n".into()),
10049 ("a.xml", "<a></a>".into()),
10050 ("b.txt", "Some text".into()),
10051 ],
10052 );
10053
10054 cx.executor().run_until_parked();
10055 cx.read(|cx| {
10056 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10057 assert_entry_git_state(
10058 tree.read(cx),
10059 repository.read(cx),
10060 "b.txt",
10061 Some(StatusCode::Added),
10062 false,
10063 );
10064 });
10065}
10066
10067// NOTE:
10068// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10069// a directory which some program has already open.
10070// This is a limitation of the Windows.
10071// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10072// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10073#[gpui::test]
10074#[cfg_attr(target_os = "windows", ignore)]
10075async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10076 init_test(cx);
10077 cx.executor().allow_parking();
10078 let root = TempTree::new(json!({
10079 "projects": {
10080 "project1": {
10081 "a": "",
10082 "b": "",
10083 }
10084 },
10085
10086 }));
10087 let root_path = root.path();
10088
10089 let repo = git_init(&root_path.join("projects/project1"));
10090 git_add("a", &repo);
10091 git_commit("init", &repo);
10092 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10093
10094 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10095
10096 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10097 tree.flush_fs_events(cx).await;
10098 project
10099 .update(cx, |project, cx| project.git_scans_complete(cx))
10100 .await;
10101 cx.executor().run_until_parked();
10102
10103 let repository = project.read_with(cx, |project, cx| {
10104 project.repositories(cx).values().next().unwrap().clone()
10105 });
10106
10107 repository.read_with(cx, |repository, _| {
10108 assert_eq!(
10109 repository.work_directory_abs_path.as_ref(),
10110 root_path.join("projects/project1").as_path()
10111 );
10112 assert_eq!(
10113 repository
10114 .status_for_path(&repo_path("a"))
10115 .map(|entry| entry.status),
10116 Some(StatusCode::Modified.worktree()),
10117 );
10118 assert_eq!(
10119 repository
10120 .status_for_path(&repo_path("b"))
10121 .map(|entry| entry.status),
10122 Some(FileStatus::Untracked),
10123 );
10124 });
10125
10126 std::fs::rename(
10127 root_path.join("projects/project1"),
10128 root_path.join("projects/project2"),
10129 )
10130 .unwrap();
10131 tree.flush_fs_events(cx).await;
10132
10133 repository.read_with(cx, |repository, _| {
10134 assert_eq!(
10135 repository.work_directory_abs_path.as_ref(),
10136 root_path.join("projects/project2").as_path()
10137 );
10138 assert_eq!(
10139 repository.status_for_path(&repo_path("a")).unwrap().status,
10140 StatusCode::Modified.worktree(),
10141 );
10142 assert_eq!(
10143 repository.status_for_path(&repo_path("b")).unwrap().status,
10144 FileStatus::Untracked,
10145 );
10146 });
10147}
10148
10149// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10150// you can't rename a directory which some program has already open. This is a
10151// limitation of the Windows. See:
10152// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10153// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10154#[gpui::test]
10155#[cfg_attr(target_os = "windows", ignore)]
10156async fn test_file_status(cx: &mut gpui::TestAppContext) {
10157 init_test(cx);
10158 cx.executor().allow_parking();
10159 const IGNORE_RULE: &str = "**/target";
10160
10161 let root = TempTree::new(json!({
10162 "project": {
10163 "a.txt": "a",
10164 "b.txt": "bb",
10165 "c": {
10166 "d": {
10167 "e.txt": "eee"
10168 }
10169 },
10170 "f.txt": "ffff",
10171 "target": {
10172 "build_file": "???"
10173 },
10174 ".gitignore": IGNORE_RULE
10175 },
10176
10177 }));
10178 let root_path = root.path();
10179
10180 const A_TXT: &str = "a.txt";
10181 const B_TXT: &str = "b.txt";
10182 const E_TXT: &str = "c/d/e.txt";
10183 const F_TXT: &str = "f.txt";
10184 const DOTGITIGNORE: &str = ".gitignore";
10185 const BUILD_FILE: &str = "target/build_file";
10186
10187 // Set up git repository before creating the worktree.
10188 let work_dir = root.path().join("project");
10189 let mut repo = git_init(work_dir.as_path());
10190 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10191 git_add(A_TXT, &repo);
10192 git_add(E_TXT, &repo);
10193 git_add(DOTGITIGNORE, &repo);
10194 git_commit("Initial commit", &repo);
10195
10196 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10197
10198 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10199 tree.flush_fs_events(cx).await;
10200 project
10201 .update(cx, |project, cx| project.git_scans_complete(cx))
10202 .await;
10203 cx.executor().run_until_parked();
10204
10205 let repository = project.read_with(cx, |project, cx| {
10206 project.repositories(cx).values().next().unwrap().clone()
10207 });
10208
10209 // Check that the right git state is observed on startup
10210 repository.read_with(cx, |repository, _cx| {
10211 assert_eq!(
10212 repository.work_directory_abs_path.as_ref(),
10213 root_path.join("project").as_path()
10214 );
10215
10216 assert_eq!(
10217 repository
10218 .status_for_path(&repo_path(B_TXT))
10219 .unwrap()
10220 .status,
10221 FileStatus::Untracked,
10222 );
10223 assert_eq!(
10224 repository
10225 .status_for_path(&repo_path(F_TXT))
10226 .unwrap()
10227 .status,
10228 FileStatus::Untracked,
10229 );
10230 });
10231
10232 // Modify a file in the working copy.
10233 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10234 tree.flush_fs_events(cx).await;
10235 project
10236 .update(cx, |project, cx| project.git_scans_complete(cx))
10237 .await;
10238 cx.executor().run_until_parked();
10239
10240 // The worktree detects that the file's git status has changed.
10241 repository.read_with(cx, |repository, _| {
10242 assert_eq!(
10243 repository
10244 .status_for_path(&repo_path(A_TXT))
10245 .unwrap()
10246 .status,
10247 StatusCode::Modified.worktree(),
10248 );
10249 });
10250
10251 // Create a commit in the git repository.
10252 git_add(A_TXT, &repo);
10253 git_add(B_TXT, &repo);
10254 git_commit("Committing modified and added", &repo);
10255 tree.flush_fs_events(cx).await;
10256 project
10257 .update(cx, |project, cx| project.git_scans_complete(cx))
10258 .await;
10259 cx.executor().run_until_parked();
10260
10261 // The worktree detects that the files' git status have changed.
10262 repository.read_with(cx, |repository, _cx| {
10263 assert_eq!(
10264 repository
10265 .status_for_path(&repo_path(F_TXT))
10266 .unwrap()
10267 .status,
10268 FileStatus::Untracked,
10269 );
10270 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10271 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10272 });
10273
10274 // Modify files in the working copy and perform git operations on other files.
10275 git_reset(0, &repo);
10276 git_remove_index(Path::new(B_TXT), &repo);
10277 git_stash(&mut repo);
10278 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10279 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10280 tree.flush_fs_events(cx).await;
10281 project
10282 .update(cx, |project, cx| project.git_scans_complete(cx))
10283 .await;
10284 cx.executor().run_until_parked();
10285
10286 // Check that more complex repo changes are tracked
10287 repository.read_with(cx, |repository, _cx| {
10288 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10289 assert_eq!(
10290 repository
10291 .status_for_path(&repo_path(B_TXT))
10292 .unwrap()
10293 .status,
10294 FileStatus::Untracked,
10295 );
10296 assert_eq!(
10297 repository
10298 .status_for_path(&repo_path(E_TXT))
10299 .unwrap()
10300 .status,
10301 StatusCode::Modified.worktree(),
10302 );
10303 });
10304
10305 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10306 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10307 std::fs::write(
10308 work_dir.join(DOTGITIGNORE),
10309 [IGNORE_RULE, "f.txt"].join("\n"),
10310 )
10311 .unwrap();
10312
10313 git_add(Path::new(DOTGITIGNORE), &repo);
10314 git_commit("Committing modified git ignore", &repo);
10315
10316 tree.flush_fs_events(cx).await;
10317 cx.executor().run_until_parked();
10318
10319 let mut renamed_dir_name = "first_directory/second_directory";
10320 const RENAMED_FILE: &str = "rf.txt";
10321
10322 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10323 std::fs::write(
10324 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10325 "new-contents",
10326 )
10327 .unwrap();
10328
10329 tree.flush_fs_events(cx).await;
10330 project
10331 .update(cx, |project, cx| project.git_scans_complete(cx))
10332 .await;
10333 cx.executor().run_until_parked();
10334
10335 repository.read_with(cx, |repository, _cx| {
10336 assert_eq!(
10337 repository
10338 .status_for_path(&RepoPath::from_rel_path(
10339 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10340 ))
10341 .unwrap()
10342 .status,
10343 FileStatus::Untracked,
10344 );
10345 });
10346
10347 renamed_dir_name = "new_first_directory/second_directory";
10348
10349 std::fs::rename(
10350 work_dir.join("first_directory"),
10351 work_dir.join("new_first_directory"),
10352 )
10353 .unwrap();
10354
10355 tree.flush_fs_events(cx).await;
10356 project
10357 .update(cx, |project, cx| project.git_scans_complete(cx))
10358 .await;
10359 cx.executor().run_until_parked();
10360
10361 repository.read_with(cx, |repository, _cx| {
10362 assert_eq!(
10363 repository
10364 .status_for_path(&RepoPath::from_rel_path(
10365 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10366 ))
10367 .unwrap()
10368 .status,
10369 FileStatus::Untracked,
10370 );
10371 });
10372}
10373
10374#[gpui::test]
10375#[ignore]
10376async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10377 init_test(cx);
10378 cx.executor().allow_parking();
10379
10380 const IGNORE_RULE: &str = "**/target";
10381
10382 let root = TempTree::new(json!({
10383 "project": {
10384 "src": {
10385 "main.rs": "fn main() {}"
10386 },
10387 "target": {
10388 "debug": {
10389 "important_text.txt": "important text",
10390 },
10391 },
10392 ".gitignore": IGNORE_RULE
10393 },
10394
10395 }));
10396 let root_path = root.path();
10397
10398 // Set up git repository before creating the worktree.
10399 let work_dir = root.path().join("project");
10400 let repo = git_init(work_dir.as_path());
10401 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10402 git_add("src/main.rs", &repo);
10403 git_add(".gitignore", &repo);
10404 git_commit("Initial commit", &repo);
10405
10406 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10407 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10408 let project_events = Arc::new(Mutex::new(Vec::new()));
10409 project.update(cx, |project, cx| {
10410 let repo_events = repository_updates.clone();
10411 cx.subscribe(project.git_store(), move |_, _, e, _| {
10412 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10413 repo_events.lock().push(e.clone());
10414 }
10415 })
10416 .detach();
10417 let project_events = project_events.clone();
10418 cx.subscribe_self(move |_, e, _| {
10419 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10420 project_events.lock().extend(
10421 updates
10422 .iter()
10423 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10424 .filter(|(path, _)| path != "fs-event-sentinel"),
10425 );
10426 }
10427 })
10428 .detach();
10429 });
10430
10431 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10432 tree.flush_fs_events(cx).await;
10433 tree.update(cx, |tree, cx| {
10434 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10435 })
10436 .await
10437 .unwrap();
10438 tree.update(cx, |tree, _| {
10439 assert_eq!(
10440 tree.entries(true, 0)
10441 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10442 .collect::<Vec<_>>(),
10443 vec![
10444 (rel_path(""), false),
10445 (rel_path("project/"), false),
10446 (rel_path("project/.gitignore"), false),
10447 (rel_path("project/src"), false),
10448 (rel_path("project/src/main.rs"), false),
10449 (rel_path("project/target"), true),
10450 (rel_path("project/target/debug"), true),
10451 (rel_path("project/target/debug/important_text.txt"), true),
10452 ]
10453 );
10454 });
10455
10456 assert_eq!(
10457 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10458 vec![RepositoryEvent::StatusesChanged,],
10459 "Initial worktree scan should produce a repo update event"
10460 );
10461 assert_eq!(
10462 project_events.lock().drain(..).collect::<Vec<_>>(),
10463 vec![
10464 ("project/target".to_string(), PathChange::Loaded),
10465 ("project/target/debug".to_string(), PathChange::Loaded),
10466 (
10467 "project/target/debug/important_text.txt".to_string(),
10468 PathChange::Loaded
10469 ),
10470 ],
10471 "Initial project changes should show that all not-ignored and all opened files are loaded"
10472 );
10473
10474 let deps_dir = work_dir.join("target").join("debug").join("deps");
10475 std::fs::create_dir_all(&deps_dir).unwrap();
10476 tree.flush_fs_events(cx).await;
10477 project
10478 .update(cx, |project, cx| project.git_scans_complete(cx))
10479 .await;
10480 cx.executor().run_until_parked();
10481 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10482 tree.flush_fs_events(cx).await;
10483 project
10484 .update(cx, |project, cx| project.git_scans_complete(cx))
10485 .await;
10486 cx.executor().run_until_parked();
10487 std::fs::remove_dir_all(&deps_dir).unwrap();
10488 tree.flush_fs_events(cx).await;
10489 project
10490 .update(cx, |project, cx| project.git_scans_complete(cx))
10491 .await;
10492 cx.executor().run_until_parked();
10493
10494 tree.update(cx, |tree, _| {
10495 assert_eq!(
10496 tree.entries(true, 0)
10497 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10498 .collect::<Vec<_>>(),
10499 vec![
10500 (rel_path(""), false),
10501 (rel_path("project/"), false),
10502 (rel_path("project/.gitignore"), false),
10503 (rel_path("project/src"), false),
10504 (rel_path("project/src/main.rs"), false),
10505 (rel_path("project/target"), true),
10506 (rel_path("project/target/debug"), true),
10507 (rel_path("project/target/debug/important_text.txt"), true),
10508 ],
10509 "No stray temp files should be left after the flycheck changes"
10510 );
10511 });
10512
10513 assert_eq!(
10514 repository_updates
10515 .lock()
10516 .iter()
10517 .cloned()
10518 .collect::<Vec<_>>(),
10519 Vec::new(),
10520 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10521 );
10522 assert_eq!(
10523 project_events.lock().as_slice(),
10524 vec![
10525 ("project/target/debug/deps".to_string(), PathChange::Added),
10526 ("project/target/debug/deps".to_string(), PathChange::Removed),
10527 ],
10528 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10529 No updates for more nested directories should happen as those are ignored",
10530 );
10531}
10532
10533// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10534// to different timings/ordering of events.
10535#[ignore]
10536#[gpui::test]
10537async fn test_odd_events_for_ignored_dirs(
10538 executor: BackgroundExecutor,
10539 cx: &mut gpui::TestAppContext,
10540) {
10541 init_test(cx);
10542 let fs = FakeFs::new(executor);
10543 fs.insert_tree(
10544 path!("/root"),
10545 json!({
10546 ".git": {},
10547 ".gitignore": "**/target/",
10548 "src": {
10549 "main.rs": "fn main() {}",
10550 },
10551 "target": {
10552 "debug": {
10553 "foo.txt": "foo",
10554 "deps": {}
10555 }
10556 }
10557 }),
10558 )
10559 .await;
10560 fs.set_head_and_index_for_repo(
10561 path!("/root/.git").as_ref(),
10562 &[
10563 (".gitignore", "**/target/".into()),
10564 ("src/main.rs", "fn main() {}".into()),
10565 ],
10566 );
10567
10568 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10569 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10570 let project_events = Arc::new(Mutex::new(Vec::new()));
10571 project.update(cx, |project, cx| {
10572 let repository_updates = repository_updates.clone();
10573 cx.subscribe(project.git_store(), move |_, _, e, _| {
10574 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10575 repository_updates.lock().push(e.clone());
10576 }
10577 })
10578 .detach();
10579 let project_events = project_events.clone();
10580 cx.subscribe_self(move |_, e, _| {
10581 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10582 project_events.lock().extend(
10583 updates
10584 .iter()
10585 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10586 .filter(|(path, _)| path != "fs-event-sentinel"),
10587 );
10588 }
10589 })
10590 .detach();
10591 });
10592
10593 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10594 tree.update(cx, |tree, cx| {
10595 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10596 })
10597 .await
10598 .unwrap();
10599 tree.flush_fs_events(cx).await;
10600 project
10601 .update(cx, |project, cx| project.git_scans_complete(cx))
10602 .await;
10603 cx.run_until_parked();
10604 tree.update(cx, |tree, _| {
10605 assert_eq!(
10606 tree.entries(true, 0)
10607 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10608 .collect::<Vec<_>>(),
10609 vec![
10610 (rel_path(""), false),
10611 (rel_path(".gitignore"), false),
10612 (rel_path("src"), false),
10613 (rel_path("src/main.rs"), false),
10614 (rel_path("target"), true),
10615 (rel_path("target/debug"), true),
10616 (rel_path("target/debug/deps"), true),
10617 (rel_path("target/debug/foo.txt"), true),
10618 ]
10619 );
10620 });
10621
10622 assert_eq!(
10623 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10624 vec![
10625 RepositoryEvent::BranchChanged,
10626 RepositoryEvent::StatusesChanged,
10627 RepositoryEvent::StatusesChanged,
10628 ],
10629 "Initial worktree scan should produce a repo update event"
10630 );
10631 assert_eq!(
10632 project_events.lock().drain(..).collect::<Vec<_>>(),
10633 vec![
10634 ("target".to_string(), PathChange::Loaded),
10635 ("target/debug".to_string(), PathChange::Loaded),
10636 ("target/debug/deps".to_string(), PathChange::Loaded),
10637 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10638 ],
10639 "All non-ignored entries and all opened firs should be getting a project event",
10640 );
10641
10642 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10643 // This may happen multiple times during a single flycheck, but once is enough for testing.
10644 fs.emit_fs_event("/root/target/debug/deps", None);
10645 tree.flush_fs_events(cx).await;
10646 project
10647 .update(cx, |project, cx| project.git_scans_complete(cx))
10648 .await;
10649 cx.executor().run_until_parked();
10650
10651 assert_eq!(
10652 repository_updates
10653 .lock()
10654 .iter()
10655 .cloned()
10656 .collect::<Vec<_>>(),
10657 Vec::new(),
10658 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10659 );
10660 assert_eq!(
10661 project_events.lock().as_slice(),
10662 Vec::new(),
10663 "No further project events should happen, as only ignored dirs received FS events",
10664 );
10665}
10666
10667#[gpui::test]
10668async fn test_repos_in_invisible_worktrees(
10669 executor: BackgroundExecutor,
10670 cx: &mut gpui::TestAppContext,
10671) {
10672 init_test(cx);
10673 let fs = FakeFs::new(executor);
10674 fs.insert_tree(
10675 path!("/root"),
10676 json!({
10677 "dir1": {
10678 ".git": {},
10679 "dep1": {
10680 ".git": {},
10681 "src": {
10682 "a.txt": "",
10683 },
10684 },
10685 "b.txt": "",
10686 },
10687 }),
10688 )
10689 .await;
10690
10691 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10692 let _visible_worktree =
10693 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10694 project
10695 .update(cx, |project, cx| project.git_scans_complete(cx))
10696 .await;
10697
10698 let repos = project.read_with(cx, |project, cx| {
10699 project
10700 .repositories(cx)
10701 .values()
10702 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10703 .collect::<Vec<_>>()
10704 });
10705 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10706
10707 let (_invisible_worktree, _) = project
10708 .update(cx, |project, cx| {
10709 project.worktree_store().update(cx, |worktree_store, cx| {
10710 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10711 })
10712 })
10713 .await
10714 .expect("failed to create worktree");
10715 project
10716 .update(cx, |project, cx| project.git_scans_complete(cx))
10717 .await;
10718
10719 let repos = project.read_with(cx, |project, cx| {
10720 project
10721 .repositories(cx)
10722 .values()
10723 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10724 .collect::<Vec<_>>()
10725 });
10726 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10727}
10728
10729#[gpui::test(iterations = 10)]
10730async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10731 init_test(cx);
10732 cx.update(|cx| {
10733 cx.update_global::<SettingsStore, _>(|store, cx| {
10734 store.update_user_settings(cx, |settings| {
10735 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10736 });
10737 });
10738 });
10739 let fs = FakeFs::new(cx.background_executor.clone());
10740 fs.insert_tree(
10741 path!("/root"),
10742 json!({
10743 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10744 "tree": {
10745 ".git": {},
10746 ".gitignore": "ignored-dir\n",
10747 "tracked-dir": {
10748 "tracked-file1": "",
10749 "ancestor-ignored-file1": "",
10750 },
10751 "ignored-dir": {
10752 "ignored-file1": ""
10753 }
10754 }
10755 }),
10756 )
10757 .await;
10758 fs.set_head_and_index_for_repo(
10759 path!("/root/tree/.git").as_ref(),
10760 &[
10761 (".gitignore", "ignored-dir\n".into()),
10762 ("tracked-dir/tracked-file1", "".into()),
10763 ],
10764 );
10765
10766 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10767
10768 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10769 tree.flush_fs_events(cx).await;
10770 project
10771 .update(cx, |project, cx| project.git_scans_complete(cx))
10772 .await;
10773 cx.executor().run_until_parked();
10774
10775 let repository = project.read_with(cx, |project, cx| {
10776 project.repositories(cx).values().next().unwrap().clone()
10777 });
10778
10779 tree.read_with(cx, |tree, _| {
10780 tree.as_local()
10781 .unwrap()
10782 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10783 })
10784 .recv()
10785 .await;
10786
10787 cx.read(|cx| {
10788 assert_entry_git_state(
10789 tree.read(cx),
10790 repository.read(cx),
10791 "tracked-dir/tracked-file1",
10792 None,
10793 false,
10794 );
10795 assert_entry_git_state(
10796 tree.read(cx),
10797 repository.read(cx),
10798 "tracked-dir/ancestor-ignored-file1",
10799 None,
10800 false,
10801 );
10802 assert_entry_git_state(
10803 tree.read(cx),
10804 repository.read(cx),
10805 "ignored-dir/ignored-file1",
10806 None,
10807 true,
10808 );
10809 });
10810
10811 fs.create_file(
10812 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10813 Default::default(),
10814 )
10815 .await
10816 .unwrap();
10817 fs.set_index_for_repo(
10818 path!("/root/tree/.git").as_ref(),
10819 &[
10820 (".gitignore", "ignored-dir\n".into()),
10821 ("tracked-dir/tracked-file1", "".into()),
10822 ("tracked-dir/tracked-file2", "".into()),
10823 ],
10824 );
10825 fs.create_file(
10826 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10827 Default::default(),
10828 )
10829 .await
10830 .unwrap();
10831 fs.create_file(
10832 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10833 Default::default(),
10834 )
10835 .await
10836 .unwrap();
10837
10838 cx.executor().run_until_parked();
10839 cx.read(|cx| {
10840 assert_entry_git_state(
10841 tree.read(cx),
10842 repository.read(cx),
10843 "tracked-dir/tracked-file2",
10844 Some(StatusCode::Added),
10845 false,
10846 );
10847 assert_entry_git_state(
10848 tree.read(cx),
10849 repository.read(cx),
10850 "tracked-dir/ancestor-ignored-file2",
10851 None,
10852 false,
10853 );
10854 assert_entry_git_state(
10855 tree.read(cx),
10856 repository.read(cx),
10857 "ignored-dir/ignored-file2",
10858 None,
10859 true,
10860 );
10861 assert!(
10862 tree.read(cx)
10863 .entry_for_path(&rel_path(".git"))
10864 .unwrap()
10865 .is_ignored
10866 );
10867 });
10868}
10869
10870#[gpui::test]
10871async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10872 init_test(cx);
10873
10874 let fs = FakeFs::new(cx.executor());
10875 fs.insert_tree(
10876 path!("/project"),
10877 json!({
10878 ".git": {
10879 "worktrees": {
10880 "some-worktree": {
10881 "commondir": "../..\n",
10882 // For is_git_dir
10883 "HEAD": "",
10884 "config": ""
10885 }
10886 },
10887 "modules": {
10888 "subdir": {
10889 "some-submodule": {
10890 // For is_git_dir
10891 "HEAD": "",
10892 "config": "",
10893 }
10894 }
10895 }
10896 },
10897 "src": {
10898 "a.txt": "A",
10899 },
10900 "some-worktree": {
10901 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10902 "src": {
10903 "b.txt": "B",
10904 }
10905 },
10906 "subdir": {
10907 "some-submodule": {
10908 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10909 "c.txt": "C",
10910 }
10911 }
10912 }),
10913 )
10914 .await;
10915
10916 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10917 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10918 scan_complete.await;
10919
10920 let mut repositories = project.update(cx, |project, cx| {
10921 project
10922 .repositories(cx)
10923 .values()
10924 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10925 .collect::<Vec<_>>()
10926 });
10927 repositories.sort();
10928 pretty_assertions::assert_eq!(
10929 repositories,
10930 [
10931 Path::new(path!("/project")).into(),
10932 Path::new(path!("/project/some-worktree")).into(),
10933 Path::new(path!("/project/subdir/some-submodule")).into(),
10934 ]
10935 );
10936
10937 // Generate a git-related event for the worktree and check that it's refreshed.
10938 fs.with_git_state(
10939 path!("/project/some-worktree/.git").as_ref(),
10940 true,
10941 |state| {
10942 state
10943 .head_contents
10944 .insert(repo_path("src/b.txt"), "b".to_owned());
10945 state
10946 .index_contents
10947 .insert(repo_path("src/b.txt"), "b".to_owned());
10948 },
10949 )
10950 .unwrap();
10951 cx.run_until_parked();
10952
10953 let buffer = project
10954 .update(cx, |project, cx| {
10955 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10956 })
10957 .await
10958 .unwrap();
10959 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10960 let (repo, _) = project
10961 .git_store()
10962 .read(cx)
10963 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10964 .unwrap();
10965 pretty_assertions::assert_eq!(
10966 repo.read(cx).work_directory_abs_path,
10967 Path::new(path!("/project/some-worktree")).into(),
10968 );
10969 let barrier = repo.update(cx, |repo, _| repo.barrier());
10970 (repo.clone(), barrier)
10971 });
10972 barrier.await.unwrap();
10973 worktree_repo.update(cx, |repo, _| {
10974 pretty_assertions::assert_eq!(
10975 repo.status_for_path(&repo_path("src/b.txt"))
10976 .unwrap()
10977 .status,
10978 StatusCode::Modified.worktree(),
10979 );
10980 });
10981
10982 // The same for the submodule.
10983 fs.with_git_state(
10984 path!("/project/subdir/some-submodule/.git").as_ref(),
10985 true,
10986 |state| {
10987 state
10988 .head_contents
10989 .insert(repo_path("c.txt"), "c".to_owned());
10990 state
10991 .index_contents
10992 .insert(repo_path("c.txt"), "c".to_owned());
10993 },
10994 )
10995 .unwrap();
10996 cx.run_until_parked();
10997
10998 let buffer = project
10999 .update(cx, |project, cx| {
11000 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11001 })
11002 .await
11003 .unwrap();
11004 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11005 let (repo, _) = project
11006 .git_store()
11007 .read(cx)
11008 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11009 .unwrap();
11010 pretty_assertions::assert_eq!(
11011 repo.read(cx).work_directory_abs_path,
11012 Path::new(path!("/project/subdir/some-submodule")).into(),
11013 );
11014 let barrier = repo.update(cx, |repo, _| repo.barrier());
11015 (repo.clone(), barrier)
11016 });
11017 barrier.await.unwrap();
11018 submodule_repo.update(cx, |repo, _| {
11019 pretty_assertions::assert_eq!(
11020 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11021 StatusCode::Modified.worktree(),
11022 );
11023 });
11024}
11025
11026#[gpui::test]
11027async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11028 init_test(cx);
11029 let fs = FakeFs::new(cx.background_executor.clone());
11030 fs.insert_tree(
11031 path!("/root"),
11032 json!({
11033 "project": {
11034 ".git": {},
11035 "child1": {
11036 "a.txt": "A",
11037 },
11038 "child2": {
11039 "b.txt": "B",
11040 }
11041 }
11042 }),
11043 )
11044 .await;
11045
11046 let project = Project::test(
11047 fs.clone(),
11048 [
11049 path!("/root/project/child1").as_ref(),
11050 path!("/root/project/child2").as_ref(),
11051 ],
11052 cx,
11053 )
11054 .await;
11055
11056 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11057 tree.flush_fs_events(cx).await;
11058 project
11059 .update(cx, |project, cx| project.git_scans_complete(cx))
11060 .await;
11061 cx.executor().run_until_parked();
11062
11063 let repos = project.read_with(cx, |project, cx| {
11064 project
11065 .repositories(cx)
11066 .values()
11067 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11068 .collect::<Vec<_>>()
11069 });
11070 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11071}
11072
11073#[gpui::test]
11074async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11075 init_test(cx);
11076
11077 let file_1_committed = String::from(r#"file_1_committed"#);
11078 let file_1_staged = String::from(r#"file_1_staged"#);
11079 let file_2_committed = String::from(r#"file_2_committed"#);
11080 let file_2_staged = String::from(r#"file_2_staged"#);
11081 let buffer_contents = String::from(r#"buffer"#);
11082
11083 let fs = FakeFs::new(cx.background_executor.clone());
11084 fs.insert_tree(
11085 path!("/dir"),
11086 json!({
11087 ".git": {},
11088 "src": {
11089 "file_1.rs": file_1_committed.clone(),
11090 "file_2.rs": file_2_committed.clone(),
11091 }
11092 }),
11093 )
11094 .await;
11095
11096 fs.set_head_for_repo(
11097 path!("/dir/.git").as_ref(),
11098 &[
11099 ("src/file_1.rs", file_1_committed.clone()),
11100 ("src/file_2.rs", file_2_committed.clone()),
11101 ],
11102 "deadbeef",
11103 );
11104 fs.set_index_for_repo(
11105 path!("/dir/.git").as_ref(),
11106 &[
11107 ("src/file_1.rs", file_1_staged.clone()),
11108 ("src/file_2.rs", file_2_staged.clone()),
11109 ],
11110 );
11111
11112 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11113
11114 let buffer = project
11115 .update(cx, |project, cx| {
11116 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11117 })
11118 .await
11119 .unwrap();
11120
11121 buffer.update(cx, |buffer, cx| {
11122 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11123 });
11124
11125 let unstaged_diff = project
11126 .update(cx, |project, cx| {
11127 project.open_unstaged_diff(buffer.clone(), cx)
11128 })
11129 .await
11130 .unwrap();
11131
11132 cx.run_until_parked();
11133
11134 unstaged_diff.update(cx, |unstaged_diff, cx| {
11135 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11136 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11137 });
11138
11139 // Save the buffer as `file_2.rs`, which should trigger the
11140 // `BufferChangedFilePath` event.
11141 project
11142 .update(cx, |project, cx| {
11143 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11144 let path = ProjectPath {
11145 worktree_id,
11146 path: rel_path("src/file_2.rs").into(),
11147 };
11148 project.save_buffer_as(buffer.clone(), path, cx)
11149 })
11150 .await
11151 .unwrap();
11152
11153 cx.run_until_parked();
11154
11155 // Verify that the diff bases have been updated to file_2's contents due to
11156 // the `BufferChangedFilePath` event being handled.
11157 unstaged_diff.update(cx, |unstaged_diff, cx| {
11158 let snapshot = buffer.read(cx).snapshot();
11159 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11160 assert_eq!(
11161 base_text, file_2_staged,
11162 "Diff bases should be automatically updated to file_2 staged content"
11163 );
11164
11165 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11166 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11167 });
11168
11169 let uncommitted_diff = project
11170 .update(cx, |project, cx| {
11171 project.open_uncommitted_diff(buffer.clone(), cx)
11172 })
11173 .await
11174 .unwrap();
11175
11176 cx.run_until_parked();
11177
11178 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11179 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11180 assert_eq!(
11181 base_text, file_2_committed,
11182 "Uncommitted diff should compare against file_2 committed content"
11183 );
11184 });
11185}
11186
11187async fn search(
11188 project: &Entity<Project>,
11189 query: SearchQuery,
11190 cx: &mut gpui::TestAppContext,
11191) -> Result<HashMap<String, Vec<Range<usize>>>> {
11192 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11193 let mut results = HashMap::default();
11194 while let Ok(search_result) = search_rx.rx.recv().await {
11195 match search_result {
11196 SearchResult::Buffer { buffer, ranges } => {
11197 results.entry(buffer).or_insert(ranges);
11198 }
11199 SearchResult::LimitReached => {}
11200 }
11201 }
11202 Ok(results
11203 .into_iter()
11204 .map(|(buffer, ranges)| {
11205 buffer.update(cx, |buffer, cx| {
11206 let path = buffer
11207 .file()
11208 .unwrap()
11209 .full_path(cx)
11210 .to_string_lossy()
11211 .to_string();
11212 let ranges = ranges
11213 .into_iter()
11214 .map(|range| range.to_offset(buffer))
11215 .collect::<Vec<_>>();
11216 (path, ranges)
11217 })
11218 })
11219 .collect())
11220}
11221
11222#[gpui::test]
11223async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11224 init_test(cx);
11225
11226 let fs = FakeFs::new(cx.executor());
11227
11228 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11229 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11230 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11231 fs.insert_tree(path!("/dir"), json!({})).await;
11232 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11233
11234 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11235
11236 let buffer = project
11237 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11238 .await
11239 .unwrap();
11240
11241 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11242 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11243 });
11244 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11245 assert_eq!(initial_text, "Hi");
11246 assert!(!initial_dirty);
11247
11248 let reload_receiver = buffer.update(cx, |buffer, cx| {
11249 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11250 });
11251 cx.executor().run_until_parked();
11252
11253 // Wait for reload to complete
11254 let _ = reload_receiver.await;
11255
11256 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11257 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11258 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11259 });
11260 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11261 assert_eq!(reloaded_text, "楈");
11262 assert!(!reloaded_dirty);
11263
11264 // Undo the reload
11265 buffer.update(cx, |buffer, cx| {
11266 buffer.undo(cx);
11267 });
11268
11269 buffer.read_with(cx, |buffer, _| {
11270 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11271 assert_eq!(buffer.text(), "Hi");
11272 assert!(!buffer.is_dirty());
11273 });
11274
11275 buffer.update(cx, |buffer, cx| {
11276 buffer.redo(cx);
11277 });
11278
11279 buffer.read_with(cx, |buffer, _| {
11280 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11281 assert_ne!(buffer.text(), "Hi");
11282 assert!(!buffer.is_dirty());
11283 });
11284}
11285
11286pub fn init_test(cx: &mut gpui::TestAppContext) {
11287 zlog::init_test();
11288
11289 cx.update(|cx| {
11290 let settings_store = SettingsStore::test(cx);
11291 cx.set_global(settings_store);
11292 release_channel::init(semver::Version::new(0, 0, 0), cx);
11293 });
11294}
11295
11296fn json_lang() -> Arc<Language> {
11297 Arc::new(Language::new(
11298 LanguageConfig {
11299 name: "JSON".into(),
11300 matcher: LanguageMatcher {
11301 path_suffixes: vec!["json".to_string()],
11302 ..Default::default()
11303 },
11304 ..Default::default()
11305 },
11306 None,
11307 ))
11308}
11309
11310fn js_lang() -> Arc<Language> {
11311 Arc::new(Language::new(
11312 LanguageConfig {
11313 name: "JavaScript".into(),
11314 matcher: LanguageMatcher {
11315 path_suffixes: vec!["js".to_string()],
11316 ..Default::default()
11317 },
11318 ..Default::default()
11319 },
11320 None,
11321 ))
11322}
11323
11324fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11325 struct PythonMootToolchainLister(Arc<FakeFs>);
11326 #[async_trait]
11327 impl ToolchainLister for PythonMootToolchainLister {
11328 async fn list(
11329 &self,
11330 worktree_root: PathBuf,
11331 subroot_relative_path: Arc<RelPath>,
11332 _: Option<HashMap<String, String>>,
11333 _: &dyn Fs,
11334 ) -> ToolchainList {
11335 // This lister will always return a path .venv directories within ancestors
11336 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11337 let mut toolchains = vec![];
11338 for ancestor in ancestors {
11339 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11340 if self.0.is_dir(&venv_path).await {
11341 toolchains.push(Toolchain {
11342 name: SharedString::new_static("Python Venv"),
11343 path: venv_path.to_string_lossy().into_owned().into(),
11344 language_name: LanguageName(SharedString::new_static("Python")),
11345 as_json: serde_json::Value::Null,
11346 })
11347 }
11348 }
11349 ToolchainList {
11350 toolchains,
11351 ..Default::default()
11352 }
11353 }
11354 async fn resolve(
11355 &self,
11356 _: PathBuf,
11357 _: Option<HashMap<String, String>>,
11358 _: &dyn Fs,
11359 ) -> anyhow::Result<Toolchain> {
11360 Err(anyhow::anyhow!("Not implemented"))
11361 }
11362 fn meta(&self) -> ToolchainMetadata {
11363 ToolchainMetadata {
11364 term: SharedString::new_static("Virtual Environment"),
11365 new_toolchain_placeholder: SharedString::new_static(
11366 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11367 ),
11368 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11369 }
11370 }
11371 fn activation_script(
11372 &self,
11373 _: &Toolchain,
11374 _: ShellKind,
11375 _: &gpui::App,
11376 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11377 Box::pin(async { vec![] })
11378 }
11379 }
11380 Arc::new(
11381 Language::new(
11382 LanguageConfig {
11383 name: "Python".into(),
11384 matcher: LanguageMatcher {
11385 path_suffixes: vec!["py".to_string()],
11386 ..Default::default()
11387 },
11388 ..Default::default()
11389 },
11390 None, // We're not testing Python parsing with this language.
11391 )
11392 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11393 "pyproject.toml",
11394 ))))
11395 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11396 )
11397}
11398
11399fn typescript_lang() -> Arc<Language> {
11400 Arc::new(Language::new(
11401 LanguageConfig {
11402 name: "TypeScript".into(),
11403 matcher: LanguageMatcher {
11404 path_suffixes: vec!["ts".to_string()],
11405 ..Default::default()
11406 },
11407 ..Default::default()
11408 },
11409 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11410 ))
11411}
11412
11413fn tsx_lang() -> Arc<Language> {
11414 Arc::new(Language::new(
11415 LanguageConfig {
11416 name: "tsx".into(),
11417 matcher: LanguageMatcher {
11418 path_suffixes: vec!["tsx".to_string()],
11419 ..Default::default()
11420 },
11421 ..Default::default()
11422 },
11423 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11424 ))
11425}
11426
11427fn get_all_tasks(
11428 project: &Entity<Project>,
11429 task_contexts: Arc<TaskContexts>,
11430 cx: &mut App,
11431) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11432 let new_tasks = project.update(cx, |project, cx| {
11433 project.task_store().update(cx, |task_store, cx| {
11434 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11435 this.used_and_current_resolved_tasks(task_contexts, cx)
11436 })
11437 })
11438 });
11439
11440 cx.background_spawn(async move {
11441 let (mut old, new) = new_tasks.await;
11442 old.extend(new);
11443 old
11444 })
11445}
11446
11447#[track_caller]
11448fn assert_entry_git_state(
11449 tree: &Worktree,
11450 repository: &Repository,
11451 path: &str,
11452 index_status: Option<StatusCode>,
11453 is_ignored: bool,
11454) {
11455 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11456 let entry = tree
11457 .entry_for_path(&rel_path(path))
11458 .unwrap_or_else(|| panic!("entry {path} not found"));
11459 let status = repository
11460 .status_for_path(&repo_path(path))
11461 .map(|entry| entry.status);
11462 let expected = index_status.map(|index_status| {
11463 TrackedStatus {
11464 index_status,
11465 worktree_status: StatusCode::Unmodified,
11466 }
11467 .into()
11468 });
11469 assert_eq!(
11470 status, expected,
11471 "expected {path} to have git status: {expected:?}"
11472 );
11473 assert_eq!(
11474 entry.is_ignored, is_ignored,
11475 "expected {path} to have is_ignored: {is_ignored}"
11476 );
11477}
11478
11479#[track_caller]
11480fn git_init(path: &Path) -> git2::Repository {
11481 let mut init_opts = RepositoryInitOptions::new();
11482 init_opts.initial_head("main");
11483 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11484}
11485
11486#[track_caller]
11487fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11488 let path = path.as_ref();
11489 let mut index = repo.index().expect("Failed to get index");
11490 index.add_path(path).expect("Failed to add file");
11491 index.write().expect("Failed to write index");
11492}
11493
11494#[track_caller]
11495fn git_remove_index(path: &Path, repo: &git2::Repository) {
11496 let mut index = repo.index().expect("Failed to get index");
11497 index.remove_path(path).expect("Failed to add file");
11498 index.write().expect("Failed to write index");
11499}
11500
11501#[track_caller]
11502fn git_commit(msg: &'static str, repo: &git2::Repository) {
11503 use git2::Signature;
11504
11505 let signature = Signature::now("test", "test@zed.dev").unwrap();
11506 let oid = repo.index().unwrap().write_tree().unwrap();
11507 let tree = repo.find_tree(oid).unwrap();
11508 if let Ok(head) = repo.head() {
11509 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11510
11511 let parent_commit = parent_obj.as_commit().unwrap();
11512
11513 repo.commit(
11514 Some("HEAD"),
11515 &signature,
11516 &signature,
11517 msg,
11518 &tree,
11519 &[parent_commit],
11520 )
11521 .expect("Failed to commit with parent");
11522 } else {
11523 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11524 .expect("Failed to commit");
11525 }
11526}
11527
11528#[cfg(any())]
11529#[track_caller]
11530fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11531 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11532}
11533
11534#[track_caller]
11535fn git_stash(repo: &mut git2::Repository) {
11536 use git2::Signature;
11537
11538 let signature = Signature::now("test", "test@zed.dev").unwrap();
11539 repo.stash_save(&signature, "N/A", None)
11540 .expect("Failed to stash");
11541}
11542
11543#[track_caller]
11544fn git_reset(offset: usize, repo: &git2::Repository) {
11545 let head = repo.head().expect("Couldn't get repo head");
11546 let object = head.peel(git2::ObjectType::Commit).unwrap();
11547 let commit = object.as_commit().unwrap();
11548 let new_head = commit
11549 .parents()
11550 .inspect(|parnet| {
11551 parnet.message();
11552 })
11553 .nth(offset)
11554 .expect("Not enough history");
11555 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11556 .expect("Could not reset");
11557}
11558
11559#[cfg(any())]
11560#[track_caller]
11561fn git_branch(name: &str, repo: &git2::Repository) {
11562 let head = repo
11563 .head()
11564 .expect("Couldn't get repo head")
11565 .peel_to_commit()
11566 .expect("HEAD is not a commit");
11567 repo.branch(name, &head, false).expect("Failed to commit");
11568}
11569
11570#[cfg(any())]
11571#[track_caller]
11572fn git_checkout(name: &str, repo: &git2::Repository) {
11573 repo.set_head(name).expect("Failed to set head");
11574 repo.checkout_head(None).expect("Failed to check out head");
11575}
11576
11577#[cfg(any())]
11578#[track_caller]
11579fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11580 repo.statuses(None)
11581 .unwrap()
11582 .iter()
11583 .map(|status| (status.path().unwrap().to_string(), status.status()))
11584 .collect()
11585}
11586
11587#[gpui::test]
11588async fn test_find_project_path_abs(
11589 background_executor: BackgroundExecutor,
11590 cx: &mut gpui::TestAppContext,
11591) {
11592 // find_project_path should work with absolute paths
11593 init_test(cx);
11594
11595 let fs = FakeFs::new(background_executor);
11596 fs.insert_tree(
11597 path!("/root"),
11598 json!({
11599 "project1": {
11600 "file1.txt": "content1",
11601 "subdir": {
11602 "file2.txt": "content2"
11603 }
11604 },
11605 "project2": {
11606 "file3.txt": "content3"
11607 }
11608 }),
11609 )
11610 .await;
11611
11612 let project = Project::test(
11613 fs.clone(),
11614 [
11615 path!("/root/project1").as_ref(),
11616 path!("/root/project2").as_ref(),
11617 ],
11618 cx,
11619 )
11620 .await;
11621
11622 // Make sure the worktrees are fully initialized
11623 project
11624 .update(cx, |project, cx| project.git_scans_complete(cx))
11625 .await;
11626 cx.run_until_parked();
11627
11628 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11629 project.read_with(cx, |project, cx| {
11630 let worktrees: Vec<_> = project.worktrees(cx).collect();
11631 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11632 let id1 = worktrees[0].read(cx).id();
11633 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11634 let id2 = worktrees[1].read(cx).id();
11635 (abs_path1, id1, abs_path2, id2)
11636 });
11637
11638 project.update(cx, |project, cx| {
11639 let abs_path = project1_abs_path.join("file1.txt");
11640 let found_path = project.find_project_path(abs_path, cx).unwrap();
11641 assert_eq!(found_path.worktree_id, project1_id);
11642 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11643
11644 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11645 let found_path = project.find_project_path(abs_path, cx).unwrap();
11646 assert_eq!(found_path.worktree_id, project1_id);
11647 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11648
11649 let abs_path = project2_abs_path.join("file3.txt");
11650 let found_path = project.find_project_path(abs_path, cx).unwrap();
11651 assert_eq!(found_path.worktree_id, project2_id);
11652 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11653
11654 let abs_path = project1_abs_path.join("nonexistent.txt");
11655 let found_path = project.find_project_path(abs_path, cx);
11656 assert!(
11657 found_path.is_some(),
11658 "Should find project path for nonexistent file in worktree"
11659 );
11660
11661 // Test with an absolute path outside any worktree
11662 let abs_path = Path::new("/some/other/path");
11663 let found_path = project.find_project_path(abs_path, cx);
11664 assert!(
11665 found_path.is_none(),
11666 "Should not find project path for path outside any worktree"
11667 );
11668 });
11669}
11670
11671#[gpui::test]
11672async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11673 init_test(cx);
11674
11675 let fs = FakeFs::new(cx.executor());
11676 fs.insert_tree(
11677 path!("/root"),
11678 json!({
11679 "a": {
11680 ".git": {},
11681 "src": {
11682 "main.rs": "fn main() {}",
11683 }
11684 },
11685 "b": {
11686 ".git": {},
11687 "src": {
11688 "main.rs": "fn main() {}",
11689 },
11690 "script": {
11691 "run.sh": "#!/bin/bash"
11692 }
11693 }
11694 }),
11695 )
11696 .await;
11697
11698 let project = Project::test(
11699 fs.clone(),
11700 [
11701 path!("/root/a").as_ref(),
11702 path!("/root/b/script").as_ref(),
11703 path!("/root/b").as_ref(),
11704 ],
11705 cx,
11706 )
11707 .await;
11708 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11709 scan_complete.await;
11710
11711 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11712 assert_eq!(worktrees.len(), 3);
11713
11714 let worktree_id_by_abs_path = worktrees
11715 .into_iter()
11716 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11717 .collect::<HashMap<_, _>>();
11718 let worktree_id = worktree_id_by_abs_path
11719 .get(Path::new(path!("/root/b/script")))
11720 .unwrap();
11721
11722 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11723 assert_eq!(repos.len(), 2);
11724
11725 project.update(cx, |project, cx| {
11726 project.remove_worktree(*worktree_id, cx);
11727 });
11728 cx.run_until_parked();
11729
11730 let mut repo_paths = project
11731 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11732 .values()
11733 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11734 .collect::<Vec<_>>();
11735 repo_paths.sort();
11736
11737 pretty_assertions::assert_eq!(
11738 repo_paths,
11739 [
11740 Path::new(path!("/root/a")).into(),
11741 Path::new(path!("/root/b")).into(),
11742 ]
11743 );
11744
11745 let active_repo_path = project
11746 .read_with(cx, |p, cx| {
11747 p.active_repository(cx)
11748 .map(|r| r.read(cx).work_directory_abs_path.clone())
11749 })
11750 .unwrap();
11751 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11752
11753 let worktree_id = worktree_id_by_abs_path
11754 .get(Path::new(path!("/root/a")))
11755 .unwrap();
11756 project.update(cx, |project, cx| {
11757 project.remove_worktree(*worktree_id, cx);
11758 });
11759 cx.run_until_parked();
11760
11761 let active_repo_path = project
11762 .read_with(cx, |p, cx| {
11763 p.active_repository(cx)
11764 .map(|r| r.read(cx).work_directory_abs_path.clone())
11765 })
11766 .unwrap();
11767 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11768
11769 let worktree_id = worktree_id_by_abs_path
11770 .get(Path::new(path!("/root/b")))
11771 .unwrap();
11772 project.update(cx, |project, cx| {
11773 project.remove_worktree(*worktree_id, cx);
11774 });
11775 cx.run_until_parked();
11776
11777 let active_repo_path = project.read_with(cx, |p, cx| {
11778 p.active_repository(cx)
11779 .map(|r| r.read(cx).work_directory_abs_path.clone())
11780 });
11781 assert!(active_repo_path.is_none());
11782}
11783
11784#[gpui::test]
11785async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11786 use DiffHunkSecondaryStatus::*;
11787 init_test(cx);
11788
11789 let committed_contents = r#"
11790 one
11791 two
11792 three
11793 "#
11794 .unindent();
11795 let file_contents = r#"
11796 one
11797 TWO
11798 three
11799 "#
11800 .unindent();
11801
11802 let fs = FakeFs::new(cx.background_executor.clone());
11803 fs.insert_tree(
11804 path!("/dir"),
11805 json!({
11806 ".git": {},
11807 "file.txt": file_contents.clone()
11808 }),
11809 )
11810 .await;
11811
11812 fs.set_head_and_index_for_repo(
11813 path!("/dir/.git").as_ref(),
11814 &[("file.txt", committed_contents.clone())],
11815 );
11816
11817 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11818
11819 let buffer = project
11820 .update(cx, |project, cx| {
11821 project.open_local_buffer(path!("/dir/file.txt"), cx)
11822 })
11823 .await
11824 .unwrap();
11825 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11826 let uncommitted_diff = project
11827 .update(cx, |project, cx| {
11828 project.open_uncommitted_diff(buffer.clone(), cx)
11829 })
11830 .await
11831 .unwrap();
11832
11833 // The hunk is initially unstaged.
11834 uncommitted_diff.read_with(cx, |diff, cx| {
11835 assert_hunks(
11836 diff.snapshot(cx).hunks(&snapshot),
11837 &snapshot,
11838 &diff.base_text_string(cx).unwrap(),
11839 &[(
11840 1..2,
11841 "two\n",
11842 "TWO\n",
11843 DiffHunkStatus::modified(HasSecondaryHunk),
11844 )],
11845 );
11846 });
11847
11848 // Get the repository handle.
11849 let repo = project.read_with(cx, |project, cx| {
11850 project.repositories(cx).values().next().unwrap().clone()
11851 });
11852
11853 // Stage the file.
11854 let stage_task = repo.update(cx, |repo, cx| {
11855 repo.stage_entries(vec![repo_path("file.txt")], cx)
11856 });
11857
11858 // Run a few ticks to let the job start and mark hunks as pending,
11859 // but don't run_until_parked which would complete the entire operation.
11860 for _ in 0..10 {
11861 cx.executor().tick();
11862 let [hunk]: [_; 1] = uncommitted_diff
11863 .read_with(cx, |diff, cx| {
11864 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11865 })
11866 .try_into()
11867 .unwrap();
11868 match hunk.secondary_status {
11869 HasSecondaryHunk => {}
11870 SecondaryHunkRemovalPending => break,
11871 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11872 _ => panic!("unexpected hunk state"),
11873 }
11874 }
11875 uncommitted_diff.read_with(cx, |diff, cx| {
11876 assert_hunks(
11877 diff.snapshot(cx).hunks(&snapshot),
11878 &snapshot,
11879 &diff.base_text_string(cx).unwrap(),
11880 &[(
11881 1..2,
11882 "two\n",
11883 "TWO\n",
11884 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11885 )],
11886 );
11887 });
11888
11889 // Let the staging complete.
11890 stage_task.await.unwrap();
11891 cx.run_until_parked();
11892
11893 // The hunk is now fully staged.
11894 uncommitted_diff.read_with(cx, |diff, cx| {
11895 assert_hunks(
11896 diff.snapshot(cx).hunks(&snapshot),
11897 &snapshot,
11898 &diff.base_text_string(cx).unwrap(),
11899 &[(
11900 1..2,
11901 "two\n",
11902 "TWO\n",
11903 DiffHunkStatus::modified(NoSecondaryHunk),
11904 )],
11905 );
11906 });
11907
11908 // Simulate a commit by updating HEAD to match the current file contents.
11909 // The FakeGitRepository's commit method is a no-op, so we need to manually
11910 // update HEAD to simulate the commit completing.
11911 fs.set_head_for_repo(
11912 path!("/dir/.git").as_ref(),
11913 &[("file.txt", file_contents.clone())],
11914 "newhead",
11915 );
11916 cx.run_until_parked();
11917
11918 // After committing, there are no more hunks.
11919 uncommitted_diff.read_with(cx, |diff, cx| {
11920 assert_hunks(
11921 diff.snapshot(cx).hunks(&snapshot),
11922 &snapshot,
11923 &diff.base_text_string(cx).unwrap(),
11924 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11925 );
11926 });
11927}
11928
11929#[gpui::test]
11930async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11931 init_test(cx);
11932
11933 // Configure read_only_files setting
11934 cx.update(|cx| {
11935 cx.update_global::<SettingsStore, _>(|store, cx| {
11936 store.update_user_settings(cx, |settings| {
11937 settings.project.worktree.read_only_files = Some(vec![
11938 "**/generated/**".to_string(),
11939 "**/*.gen.rs".to_string(),
11940 ]);
11941 });
11942 });
11943 });
11944
11945 let fs = FakeFs::new(cx.background_executor.clone());
11946 fs.insert_tree(
11947 path!("/root"),
11948 json!({
11949 "src": {
11950 "main.rs": "fn main() {}",
11951 "types.gen.rs": "// Generated file",
11952 },
11953 "generated": {
11954 "schema.rs": "// Auto-generated schema",
11955 }
11956 }),
11957 )
11958 .await;
11959
11960 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11961
11962 // Open a regular file - should be read-write
11963 let regular_buffer = project
11964 .update(cx, |project, cx| {
11965 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11966 })
11967 .await
11968 .unwrap();
11969
11970 regular_buffer.read_with(cx, |buffer, _| {
11971 assert!(!buffer.read_only(), "Regular file should not be read-only");
11972 });
11973
11974 // Open a file matching *.gen.rs pattern - should be read-only
11975 let gen_buffer = project
11976 .update(cx, |project, cx| {
11977 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11978 })
11979 .await
11980 .unwrap();
11981
11982 gen_buffer.read_with(cx, |buffer, _| {
11983 assert!(
11984 buffer.read_only(),
11985 "File matching *.gen.rs pattern should be read-only"
11986 );
11987 });
11988
11989 // Open a file in generated directory - should be read-only
11990 let generated_buffer = project
11991 .update(cx, |project, cx| {
11992 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11993 })
11994 .await
11995 .unwrap();
11996
11997 generated_buffer.read_with(cx, |buffer, _| {
11998 assert!(
11999 buffer.read_only(),
12000 "File in generated directory should be read-only"
12001 );
12002 });
12003}
12004
12005#[gpui::test]
12006async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12007 init_test(cx);
12008
12009 // Explicitly set read_only_files to empty (default behavior)
12010 cx.update(|cx| {
12011 cx.update_global::<SettingsStore, _>(|store, cx| {
12012 store.update_user_settings(cx, |settings| {
12013 settings.project.worktree.read_only_files = Some(vec![]);
12014 });
12015 });
12016 });
12017
12018 let fs = FakeFs::new(cx.background_executor.clone());
12019 fs.insert_tree(
12020 path!("/root"),
12021 json!({
12022 "src": {
12023 "main.rs": "fn main() {}",
12024 },
12025 "generated": {
12026 "schema.rs": "// Auto-generated schema",
12027 }
12028 }),
12029 )
12030 .await;
12031
12032 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12033
12034 // All files should be read-write when read_only_files is empty
12035 let main_buffer = project
12036 .update(cx, |project, cx| {
12037 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12038 })
12039 .await
12040 .unwrap();
12041
12042 main_buffer.read_with(cx, |buffer, _| {
12043 assert!(
12044 !buffer.read_only(),
12045 "Files should not be read-only when read_only_files is empty"
12046 );
12047 });
12048
12049 let generated_buffer = project
12050 .update(cx, |project, cx| {
12051 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12052 })
12053 .await
12054 .unwrap();
12055
12056 generated_buffer.read_with(cx, |buffer, _| {
12057 assert!(
12058 !buffer.read_only(),
12059 "Generated files should not be read-only when read_only_files is empty"
12060 );
12061 });
12062}
12063
12064#[gpui::test]
12065async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12066 init_test(cx);
12067
12068 // Configure to make lock files read-only
12069 cx.update(|cx| {
12070 cx.update_global::<SettingsStore, _>(|store, cx| {
12071 store.update_user_settings(cx, |settings| {
12072 settings.project.worktree.read_only_files = Some(vec![
12073 "**/*.lock".to_string(),
12074 "**/package-lock.json".to_string(),
12075 ]);
12076 });
12077 });
12078 });
12079
12080 let fs = FakeFs::new(cx.background_executor.clone());
12081 fs.insert_tree(
12082 path!("/root"),
12083 json!({
12084 "Cargo.lock": "# Lock file",
12085 "Cargo.toml": "[package]",
12086 "package-lock.json": "{}",
12087 "package.json": "{}",
12088 }),
12089 )
12090 .await;
12091
12092 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12093
12094 // Cargo.lock should be read-only
12095 let cargo_lock = project
12096 .update(cx, |project, cx| {
12097 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12098 })
12099 .await
12100 .unwrap();
12101
12102 cargo_lock.read_with(cx, |buffer, _| {
12103 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12104 });
12105
12106 // Cargo.toml should be read-write
12107 let cargo_toml = project
12108 .update(cx, |project, cx| {
12109 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12110 })
12111 .await
12112 .unwrap();
12113
12114 cargo_toml.read_with(cx, |buffer, _| {
12115 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12116 });
12117
12118 // package-lock.json should be read-only
12119 let package_lock = project
12120 .update(cx, |project, cx| {
12121 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12122 })
12123 .await
12124 .unwrap();
12125
12126 package_lock.read_with(cx, |buffer, _| {
12127 assert!(buffer.read_only(), "package-lock.json should be read-only");
12128 });
12129
12130 // package.json should be read-write
12131 let package_json = project
12132 .update(cx, |project, cx| {
12133 project.open_local_buffer(path!("/root/package.json"), cx)
12134 })
12135 .await
12136 .unwrap();
12137
12138 package_json.read_with(cx, |buffer, _| {
12139 assert!(!buffer.read_only(), "package.json should not be read-only");
12140 });
12141}
12142
12143mod disable_ai_settings_tests {
12144 use gpui::TestAppContext;
12145 use project::*;
12146 use settings::{Settings, SettingsStore};
12147
12148 #[gpui::test]
12149 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12150 cx.update(|cx| {
12151 settings::init(cx);
12152
12153 // Test 1: Default is false (AI enabled)
12154 assert!(
12155 !DisableAiSettings::get_global(cx).disable_ai,
12156 "Default should allow AI"
12157 );
12158 });
12159
12160 let disable_true = serde_json::json!({
12161 "disable_ai": true
12162 })
12163 .to_string();
12164 let disable_false = serde_json::json!({
12165 "disable_ai": false
12166 })
12167 .to_string();
12168
12169 cx.update_global::<SettingsStore, _>(|store, cx| {
12170 store.set_user_settings(&disable_false, cx).unwrap();
12171 store.set_global_settings(&disable_true, cx).unwrap();
12172 });
12173 cx.update(|cx| {
12174 assert!(
12175 DisableAiSettings::get_global(cx).disable_ai,
12176 "Local false cannot override global true"
12177 );
12178 });
12179
12180 cx.update_global::<SettingsStore, _>(|store, cx| {
12181 store.set_global_settings(&disable_false, cx).unwrap();
12182 store.set_user_settings(&disable_true, cx).unwrap();
12183 });
12184
12185 cx.update(|cx| {
12186 assert!(
12187 DisableAiSettings::get_global(cx).disable_ai,
12188 "Local false cannot override global true"
12189 );
12190 });
12191 }
12192
12193 #[gpui::test]
12194 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12195 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12196 use worktree::WorktreeId;
12197
12198 cx.update(|cx| {
12199 settings::init(cx);
12200
12201 // Default should allow AI
12202 assert!(
12203 !DisableAiSettings::get_global(cx).disable_ai,
12204 "Default should allow AI"
12205 );
12206 });
12207
12208 let worktree_id = WorktreeId::from_usize(1);
12209 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12210 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12211 };
12212 let project_path = rel_path("project");
12213 let settings_location = SettingsLocation {
12214 worktree_id,
12215 path: project_path.as_ref(),
12216 };
12217
12218 // Test: Project-level disable_ai=true should disable AI for files in that project
12219 cx.update_global::<SettingsStore, _>(|store, cx| {
12220 store
12221 .set_local_settings(
12222 worktree_id,
12223 LocalSettingsPath::InWorktree(project_path.clone()),
12224 LocalSettingsKind::Settings,
12225 Some(r#"{ "disable_ai": true }"#),
12226 cx,
12227 )
12228 .unwrap();
12229 });
12230
12231 cx.update(|cx| {
12232 let settings = DisableAiSettings::get(Some(settings_location), cx);
12233 assert!(
12234 settings.disable_ai,
12235 "Project-level disable_ai=true should disable AI for files in that project"
12236 );
12237 // Global should now also be true since project-level disable_ai is merged into global
12238 assert!(
12239 DisableAiSettings::get_global(cx).disable_ai,
12240 "Global setting should be affected by project-level disable_ai=true"
12241 );
12242 });
12243
12244 // Test: Setting project-level to false should allow AI for that project
12245 cx.update_global::<SettingsStore, _>(|store, cx| {
12246 store
12247 .set_local_settings(
12248 worktree_id,
12249 LocalSettingsPath::InWorktree(project_path.clone()),
12250 LocalSettingsKind::Settings,
12251 Some(r#"{ "disable_ai": false }"#),
12252 cx,
12253 )
12254 .unwrap();
12255 });
12256
12257 cx.update(|cx| {
12258 let settings = DisableAiSettings::get(Some(settings_location), cx);
12259 assert!(
12260 !settings.disable_ai,
12261 "Project-level disable_ai=false should allow AI"
12262 );
12263 // Global should also be false now
12264 assert!(
12265 !DisableAiSettings::get_global(cx).disable_ai,
12266 "Global setting should be false when project-level is false"
12267 );
12268 });
12269
12270 // Test: User-level true + project-level false = AI disabled (saturation)
12271 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12272 cx.update_global::<SettingsStore, _>(|store, cx| {
12273 store.set_user_settings(&disable_true, cx).unwrap();
12274 store
12275 .set_local_settings(
12276 worktree_id,
12277 LocalSettingsPath::InWorktree(project_path.clone()),
12278 LocalSettingsKind::Settings,
12279 Some(r#"{ "disable_ai": false }"#),
12280 cx,
12281 )
12282 .unwrap();
12283 });
12284
12285 cx.update(|cx| {
12286 let settings = DisableAiSettings::get(Some(settings_location), cx);
12287 assert!(
12288 settings.disable_ai,
12289 "Project-level false cannot override user-level true (SaturatingBool)"
12290 );
12291 });
12292 }
12293}