1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(
2206 lsp::RegistrationParams {
2207 registrations: vec![lsp::Registration {
2208 id: Default::default(),
2209 method: "workspace/didChangeWatchedFiles".to_string(),
2210 register_options: serde_json::to_value(
2211 lsp::DidChangeWatchedFilesRegistrationOptions {
2212 watchers: vec![
2213 lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.toml").to_string(),
2216 ),
2217 kind: None,
2218 },
2219 lsp::FileSystemWatcher {
2220 glob_pattern: lsp::GlobPattern::String(
2221 path!("/the-root/src/*.{rs,c}").to_string(),
2222 ),
2223 kind: None,
2224 },
2225 lsp::FileSystemWatcher {
2226 glob_pattern: lsp::GlobPattern::String(
2227 path!("/the-root/target/y/**/*.rs").to_string(),
2228 ),
2229 kind: None,
2230 },
2231 lsp::FileSystemWatcher {
2232 glob_pattern: lsp::GlobPattern::String(
2233 path!("/the/stdlib/src/**/*.rs").to_string(),
2234 ),
2235 kind: None,
2236 },
2237 lsp::FileSystemWatcher {
2238 glob_pattern: lsp::GlobPattern::String(
2239 path!("**/Cargo.lock").to_string(),
2240 ),
2241 kind: None,
2242 },
2243 ],
2244 },
2245 )
2246 .ok(),
2247 }],
2248 },
2249 DEFAULT_LSP_REQUEST_TIMEOUT,
2250 )
2251 .await
2252 .into_response()
2253 .unwrap();
2254 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2255 let file_changes = file_changes.clone();
2256 move |params, _| {
2257 let mut file_changes = file_changes.lock();
2258 file_changes.extend(params.changes);
2259 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2260 }
2261 });
2262
2263 cx.executor().run_until_parked();
2264 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2265 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2266
2267 let mut new_watched_paths = fs.watched_paths();
2268 new_watched_paths.retain(|path| {
2269 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2270 });
2271 assert_eq!(
2272 &new_watched_paths,
2273 &[
2274 Path::new(path!("/the-root")),
2275 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2276 Path::new(path!("/the/stdlib/src"))
2277 ]
2278 );
2279
2280 // Now the language server has asked us to watch an ignored directory path,
2281 // so we recursively load it.
2282 project.update(cx, |project, cx| {
2283 let worktree = project.visible_worktrees(cx).next().unwrap();
2284 assert_eq!(
2285 worktree
2286 .read(cx)
2287 .snapshot()
2288 .entries(true, 0)
2289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2290 .collect::<Vec<_>>(),
2291 &[
2292 ("", false),
2293 (".gitignore", false),
2294 ("Cargo.lock", false),
2295 ("src", false),
2296 ("src/a.rs", false),
2297 ("src/b.rs", false),
2298 ("target", true),
2299 ("target/x", true),
2300 ("target/y", true),
2301 ("target/y/out", true),
2302 ("target/y/out/y.rs", true),
2303 ("target/z", true),
2304 ]
2305 );
2306 });
2307
2308 // Perform some file system mutations, two of which match the watched patterns,
2309 // and one of which does not.
2310 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2317 .await
2318 .unwrap();
2319 fs.create_file(
2320 path!("/the-root/target/x/out/x2.rs").as_ref(),
2321 Default::default(),
2322 )
2323 .await
2324 .unwrap();
2325 fs.create_file(
2326 path!("/the-root/target/y/out/y2.rs").as_ref(),
2327 Default::default(),
2328 )
2329 .await
2330 .unwrap();
2331 fs.save(
2332 path!("/the-root/Cargo.lock").as_ref(),
2333 &"".into(),
2334 Default::default(),
2335 )
2336 .await
2337 .unwrap();
2338 fs.save(
2339 path!("/the-stdlib/LICENSE").as_ref(),
2340 &"".into(),
2341 Default::default(),
2342 )
2343 .await
2344 .unwrap();
2345 fs.save(
2346 path!("/the/stdlib/src/string.rs").as_ref(),
2347 &"".into(),
2348 Default::default(),
2349 )
2350 .await
2351 .unwrap();
2352
2353 // The language server receives events for the FS mutations that match its watch patterns.
2354 cx.executor().run_until_parked();
2355 assert_eq!(
2356 &*file_changes.lock(),
2357 &[
2358 lsp::FileEvent {
2359 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2360 typ: lsp::FileChangeType::CHANGED,
2361 },
2362 lsp::FileEvent {
2363 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2364 typ: lsp::FileChangeType::DELETED,
2365 },
2366 lsp::FileEvent {
2367 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2368 typ: lsp::FileChangeType::CREATED,
2369 },
2370 lsp::FileEvent {
2371 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2372 typ: lsp::FileChangeType::CREATED,
2373 },
2374 lsp::FileEvent {
2375 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2376 typ: lsp::FileChangeType::CHANGED,
2377 },
2378 ]
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let fs = FakeFs::new(cx.executor());
2387 fs.insert_tree(
2388 path!("/dir"),
2389 json!({
2390 "a.rs": "let a = 1;",
2391 "b.rs": "let b = 2;"
2392 }),
2393 )
2394 .await;
2395
2396 let project = Project::test(
2397 fs,
2398 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2399 cx,
2400 )
2401 .await;
2402 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2403
2404 let buffer_a = project
2405 .update(cx, |project, cx| {
2406 project.open_local_buffer(path!("/dir/a.rs"), cx)
2407 })
2408 .await
2409 .unwrap();
2410 let buffer_b = project
2411 .update(cx, |project, cx| {
2412 project.open_local_buffer(path!("/dir/b.rs"), cx)
2413 })
2414 .await
2415 .unwrap();
2416
2417 lsp_store.update(cx, |lsp_store, cx| {
2418 lsp_store
2419 .update_diagnostics(
2420 LanguageServerId(0),
2421 lsp::PublishDiagnosticsParams {
2422 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2423 version: None,
2424 diagnostics: vec![lsp::Diagnostic {
2425 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2426 severity: Some(lsp::DiagnosticSeverity::ERROR),
2427 message: "error 1".to_string(),
2428 ..Default::default()
2429 }],
2430 },
2431 None,
2432 DiagnosticSourceKind::Pushed,
2433 &[],
2434 cx,
2435 )
2436 .unwrap();
2437 lsp_store
2438 .update_diagnostics(
2439 LanguageServerId(0),
2440 lsp::PublishDiagnosticsParams {
2441 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2442 version: None,
2443 diagnostics: vec![lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2445 severity: Some(DiagnosticSeverity::WARNING),
2446 message: "error 2".to_string(),
2447 ..Default::default()
2448 }],
2449 },
2450 None,
2451 DiagnosticSourceKind::Pushed,
2452 &[],
2453 cx,
2454 )
2455 .unwrap();
2456 });
2457
2458 buffer_a.update(cx, |buffer, _| {
2459 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2460 assert_eq!(
2461 chunks
2462 .iter()
2463 .map(|(s, d)| (s.as_str(), *d))
2464 .collect::<Vec<_>>(),
2465 &[
2466 ("let ", None),
2467 ("a", Some(DiagnosticSeverity::ERROR)),
2468 (" = 1;", None),
2469 ]
2470 );
2471 });
2472 buffer_b.update(cx, |buffer, _| {
2473 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2474 assert_eq!(
2475 chunks
2476 .iter()
2477 .map(|(s, d)| (s.as_str(), *d))
2478 .collect::<Vec<_>>(),
2479 &[
2480 ("let ", None),
2481 ("b", Some(DiagnosticSeverity::WARNING)),
2482 (" = 2;", None),
2483 ]
2484 );
2485 });
2486}
2487
2488#[gpui::test]
2489async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2490 init_test(cx);
2491
2492 let fs = FakeFs::new(cx.executor());
2493 fs.insert_tree(
2494 path!("/root"),
2495 json!({
2496 "dir": {
2497 ".git": {
2498 "HEAD": "ref: refs/heads/main",
2499 },
2500 ".gitignore": "b.rs",
2501 "a.rs": "let a = 1;",
2502 "b.rs": "let b = 2;",
2503 },
2504 "other.rs": "let b = c;"
2505 }),
2506 )
2507 .await;
2508
2509 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2510 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2511 let (worktree, _) = project
2512 .update(cx, |project, cx| {
2513 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2514 })
2515 .await
2516 .unwrap();
2517 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2518
2519 let (worktree, _) = project
2520 .update(cx, |project, cx| {
2521 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2522 })
2523 .await
2524 .unwrap();
2525 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2526
2527 let server_id = LanguageServerId(0);
2528 lsp_store.update(cx, |lsp_store, cx| {
2529 lsp_store
2530 .update_diagnostics(
2531 server_id,
2532 lsp::PublishDiagnosticsParams {
2533 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2534 version: None,
2535 diagnostics: vec![lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2537 severity: Some(lsp::DiagnosticSeverity::ERROR),
2538 message: "unused variable 'b'".to_string(),
2539 ..Default::default()
2540 }],
2541 },
2542 None,
2543 DiagnosticSourceKind::Pushed,
2544 &[],
2545 cx,
2546 )
2547 .unwrap();
2548 lsp_store
2549 .update_diagnostics(
2550 server_id,
2551 lsp::PublishDiagnosticsParams {
2552 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2553 version: None,
2554 diagnostics: vec![lsp::Diagnostic {
2555 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2556 severity: Some(lsp::DiagnosticSeverity::ERROR),
2557 message: "unknown variable 'c'".to_string(),
2558 ..Default::default()
2559 }],
2560 },
2561 None,
2562 DiagnosticSourceKind::Pushed,
2563 &[],
2564 cx,
2565 )
2566 .unwrap();
2567 });
2568
2569 let main_ignored_buffer = project
2570 .update(cx, |project, cx| {
2571 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2572 })
2573 .await
2574 .unwrap();
2575 main_ignored_buffer.update(cx, |buffer, _| {
2576 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2577 assert_eq!(
2578 chunks
2579 .iter()
2580 .map(|(s, d)| (s.as_str(), *d))
2581 .collect::<Vec<_>>(),
2582 &[
2583 ("let ", None),
2584 ("b", Some(DiagnosticSeverity::ERROR)),
2585 (" = 2;", None),
2586 ],
2587 "Gigitnored buffers should still get in-buffer diagnostics",
2588 );
2589 });
2590 let other_buffer = project
2591 .update(cx, |project, cx| {
2592 project.open_buffer((other_worktree_id, rel_path("")), cx)
2593 })
2594 .await
2595 .unwrap();
2596 other_buffer.update(cx, |buffer, _| {
2597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2598 assert_eq!(
2599 chunks
2600 .iter()
2601 .map(|(s, d)| (s.as_str(), *d))
2602 .collect::<Vec<_>>(),
2603 &[
2604 ("let b = ", None),
2605 ("c", Some(DiagnosticSeverity::ERROR)),
2606 (";", None),
2607 ],
2608 "Buffers from hidden projects should still get in-buffer diagnostics"
2609 );
2610 });
2611
2612 project.update(cx, |project, cx| {
2613 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2614 assert_eq!(
2615 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2616 vec![(
2617 ProjectPath {
2618 worktree_id: main_worktree_id,
2619 path: rel_path("b.rs").into(),
2620 },
2621 server_id,
2622 DiagnosticSummary {
2623 error_count: 1,
2624 warning_count: 0,
2625 }
2626 )]
2627 );
2628 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2629 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2630 });
2631}
2632
2633#[gpui::test]
2634async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2635 init_test(cx);
2636
2637 let progress_token = "the-progress-token";
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 path!("/dir"),
2642 json!({
2643 "a.rs": "fn a() { A }",
2644 "b.rs": "const y: i32 = 1",
2645 }),
2646 )
2647 .await;
2648
2649 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651
2652 language_registry.add(rust_lang());
2653 let mut fake_servers = language_registry.register_fake_lsp(
2654 "Rust",
2655 FakeLspAdapter {
2656 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2657 disk_based_diagnostics_sources: vec!["disk".into()],
2658 ..Default::default()
2659 },
2660 );
2661
2662 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2663
2664 // Cause worktree to start the fake language server
2665 let _ = project
2666 .update(cx, |project, cx| {
2667 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2668 })
2669 .await
2670 .unwrap();
2671
2672 let mut events = cx.events(&project);
2673
2674 let fake_server = fake_servers.next().await.unwrap();
2675 assert_eq!(
2676 events.next().await.unwrap(),
2677 Event::LanguageServerAdded(
2678 LanguageServerId(0),
2679 fake_server.server.name(),
2680 Some(worktree_id)
2681 ),
2682 );
2683
2684 fake_server
2685 .start_progress(format!("{}/0", progress_token))
2686 .await;
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsStarted {
2690 language_server_id: LanguageServerId(0),
2691 }
2692 );
2693
2694 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2695 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2696 version: None,
2697 diagnostics: vec![lsp::Diagnostic {
2698 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2699 severity: Some(lsp::DiagnosticSeverity::ERROR),
2700 message: "undefined variable 'A'".to_string(),
2701 ..Default::default()
2702 }],
2703 });
2704 assert_eq!(
2705 events.next().await.unwrap(),
2706 Event::DiagnosticsUpdated {
2707 language_server_id: LanguageServerId(0),
2708 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2709 }
2710 );
2711
2712 fake_server.end_progress(format!("{}/0", progress_token));
2713 assert_eq!(
2714 events.next().await.unwrap(),
2715 Event::DiskBasedDiagnosticsFinished {
2716 language_server_id: LanguageServerId(0)
2717 }
2718 );
2719
2720 let buffer = project
2721 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2722 .await
2723 .unwrap();
2724
2725 buffer.update(cx, |buffer, _| {
2726 let snapshot = buffer.snapshot();
2727 let diagnostics = snapshot
2728 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2729 .collect::<Vec<_>>();
2730 assert_eq!(
2731 diagnostics,
2732 &[DiagnosticEntryRef {
2733 range: Point::new(0, 9)..Point::new(0, 10),
2734 diagnostic: &Diagnostic {
2735 severity: lsp::DiagnosticSeverity::ERROR,
2736 message: "undefined variable 'A'".to_string(),
2737 group_id: 0,
2738 is_primary: true,
2739 source_kind: DiagnosticSourceKind::Pushed,
2740 ..Diagnostic::default()
2741 }
2742 }]
2743 )
2744 });
2745
2746 // Ensure publishing empty diagnostics twice only results in one update event.
2747 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2748 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2749 version: None,
2750 diagnostics: Default::default(),
2751 });
2752 assert_eq!(
2753 events.next().await.unwrap(),
2754 Event::DiagnosticsUpdated {
2755 language_server_id: LanguageServerId(0),
2756 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2757 }
2758 );
2759
2760 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2761 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2762 version: None,
2763 diagnostics: Default::default(),
2764 });
2765 cx.executor().run_until_parked();
2766 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2767}
2768
2769#[gpui::test]
2770async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2771 init_test(cx);
2772
2773 let progress_token = "the-progress-token";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2777
2778 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2779
2780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2781 language_registry.add(rust_lang());
2782 let mut fake_servers = language_registry.register_fake_lsp(
2783 "Rust",
2784 FakeLspAdapter {
2785 name: "the-language-server",
2786 disk_based_diagnostics_sources: vec!["disk".into()],
2787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2788 ..FakeLspAdapter::default()
2789 },
2790 );
2791
2792 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2793
2794 let (buffer, _handle) = project
2795 .update(cx, |project, cx| {
2796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2797 })
2798 .await
2799 .unwrap();
2800 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2801 // Simulate diagnostics starting to update.
2802 let fake_server = fake_servers.next().await.unwrap();
2803 cx.executor().run_until_parked();
2804 fake_server.start_progress(progress_token).await;
2805
2806 // Restart the server before the diagnostics finish updating.
2807 project.update(cx, |project, cx| {
2808 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2809 });
2810 let mut events = cx.events(&project);
2811
2812 // Simulate the newly started server sending more diagnostics.
2813 let fake_server = fake_servers.next().await.unwrap();
2814 cx.executor().run_until_parked();
2815 assert_eq!(
2816 events.next().await.unwrap(),
2817 Event::LanguageServerRemoved(LanguageServerId(0))
2818 );
2819 assert_eq!(
2820 events.next().await.unwrap(),
2821 Event::LanguageServerAdded(
2822 LanguageServerId(1),
2823 fake_server.server.name(),
2824 Some(worktree_id)
2825 )
2826 );
2827 fake_server.start_progress(progress_token).await;
2828 assert_eq!(
2829 events.next().await.unwrap(),
2830 Event::LanguageServerBufferRegistered {
2831 server_id: LanguageServerId(1),
2832 buffer_id,
2833 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2834 name: Some(fake_server.server.name())
2835 }
2836 );
2837 assert_eq!(
2838 events.next().await.unwrap(),
2839 Event::DiskBasedDiagnosticsStarted {
2840 language_server_id: LanguageServerId(1)
2841 }
2842 );
2843 project.update(cx, |project, cx| {
2844 assert_eq!(
2845 project
2846 .language_servers_running_disk_based_diagnostics(cx)
2847 .collect::<Vec<_>>(),
2848 [LanguageServerId(1)]
2849 );
2850 });
2851
2852 // All diagnostics are considered done, despite the old server's diagnostic
2853 // task never completing.
2854 fake_server.end_progress(progress_token);
2855 assert_eq!(
2856 events.next().await.unwrap(),
2857 Event::DiskBasedDiagnosticsFinished {
2858 language_server_id: LanguageServerId(1)
2859 }
2860 );
2861 project.update(cx, |project, cx| {
2862 assert_eq!(
2863 project
2864 .language_servers_running_disk_based_diagnostics(cx)
2865 .collect::<Vec<_>>(),
2866 [] as [language::LanguageServerId; 0]
2867 );
2868 });
2869}
2870
2871#[gpui::test]
2872async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let fs = FakeFs::new(cx.executor());
2876 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(rust_lang());
2882 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2883
2884 let (buffer, _) = project
2885 .update(cx, |project, cx| {
2886 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2887 })
2888 .await
2889 .unwrap();
2890
2891 // Publish diagnostics
2892 let fake_server = fake_servers.next().await.unwrap();
2893 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2894 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2895 version: None,
2896 diagnostics: vec![lsp::Diagnostic {
2897 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2898 severity: Some(lsp::DiagnosticSeverity::ERROR),
2899 message: "the message".to_string(),
2900 ..Default::default()
2901 }],
2902 });
2903
2904 cx.executor().run_until_parked();
2905 buffer.update(cx, |buffer, _| {
2906 assert_eq!(
2907 buffer
2908 .snapshot()
2909 .diagnostics_in_range::<_, usize>(0..1, false)
2910 .map(|entry| entry.diagnostic.message.clone())
2911 .collect::<Vec<_>>(),
2912 ["the message".to_string()]
2913 );
2914 });
2915 project.update(cx, |project, cx| {
2916 assert_eq!(
2917 project.diagnostic_summary(false, cx),
2918 DiagnosticSummary {
2919 error_count: 1,
2920 warning_count: 0,
2921 }
2922 );
2923 });
2924
2925 project.update(cx, |project, cx| {
2926 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2927 });
2928
2929 // The diagnostics are cleared.
2930 cx.executor().run_until_parked();
2931 buffer.update(cx, |buffer, _| {
2932 assert_eq!(
2933 buffer
2934 .snapshot()
2935 .diagnostics_in_range::<_, usize>(0..1, false)
2936 .map(|entry| entry.diagnostic.message.clone())
2937 .collect::<Vec<_>>(),
2938 Vec::<String>::new(),
2939 );
2940 });
2941 project.update(cx, |project, cx| {
2942 assert_eq!(
2943 project.diagnostic_summary(false, cx),
2944 DiagnosticSummary {
2945 error_count: 0,
2946 warning_count: 0,
2947 }
2948 );
2949 });
2950}
2951
2952#[gpui::test]
2953async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2958
2959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2960 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2961
2962 language_registry.add(rust_lang());
2963 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2964
2965 let (buffer, _handle) = project
2966 .update(cx, |project, cx| {
2967 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2968 })
2969 .await
2970 .unwrap();
2971
2972 // Before restarting the server, report diagnostics with an unknown buffer version.
2973 let fake_server = fake_servers.next().await.unwrap();
2974 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2975 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2976 version: Some(10000),
2977 diagnostics: Vec::new(),
2978 });
2979 cx.executor().run_until_parked();
2980 project.update(cx, |project, cx| {
2981 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2982 });
2983
2984 let mut fake_server = fake_servers.next().await.unwrap();
2985 let notification = fake_server
2986 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2987 .await
2988 .text_document;
2989 assert_eq!(notification.version, 0);
2990}
2991
2992#[gpui::test]
2993async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let progress_token = "the-progress-token";
2997
2998 let fs = FakeFs::new(cx.executor());
2999 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3000
3001 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3002
3003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3004 language_registry.add(rust_lang());
3005 let mut fake_servers = language_registry.register_fake_lsp(
3006 "Rust",
3007 FakeLspAdapter {
3008 name: "the-language-server",
3009 disk_based_diagnostics_sources: vec!["disk".into()],
3010 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3011 ..Default::default()
3012 },
3013 );
3014
3015 let (buffer, _handle) = project
3016 .update(cx, |project, cx| {
3017 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3018 })
3019 .await
3020 .unwrap();
3021
3022 // Simulate diagnostics starting to update.
3023 let mut fake_server = fake_servers.next().await.unwrap();
3024 fake_server
3025 .start_progress_with(
3026 "another-token",
3027 lsp::WorkDoneProgressBegin {
3028 cancellable: Some(false),
3029 ..Default::default()
3030 },
3031 DEFAULT_LSP_REQUEST_TIMEOUT,
3032 )
3033 .await;
3034 // Ensure progress notification is fully processed before starting the next one
3035 cx.executor().run_until_parked();
3036
3037 fake_server
3038 .start_progress_with(
3039 progress_token,
3040 lsp::WorkDoneProgressBegin {
3041 cancellable: Some(true),
3042 ..Default::default()
3043 },
3044 DEFAULT_LSP_REQUEST_TIMEOUT,
3045 )
3046 .await;
3047 // Ensure progress notification is fully processed before cancelling
3048 cx.executor().run_until_parked();
3049
3050 project.update(cx, |project, cx| {
3051 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3052 });
3053 cx.executor().run_until_parked();
3054
3055 let cancel_notification = fake_server
3056 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3057 .await;
3058 assert_eq!(
3059 cancel_notification.token,
3060 NumberOrString::String(progress_token.into())
3061 );
3062}
3063
3064#[gpui::test]
3065async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3066 init_test(cx);
3067
3068 let fs = FakeFs::new(cx.executor());
3069 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3070 .await;
3071
3072 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3073 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3074
3075 let mut fake_rust_servers = language_registry.register_fake_lsp(
3076 "Rust",
3077 FakeLspAdapter {
3078 name: "rust-lsp",
3079 ..Default::default()
3080 },
3081 );
3082 let mut fake_js_servers = language_registry.register_fake_lsp(
3083 "JavaScript",
3084 FakeLspAdapter {
3085 name: "js-lsp",
3086 ..Default::default()
3087 },
3088 );
3089 language_registry.add(rust_lang());
3090 language_registry.add(js_lang());
3091
3092 let _rs_buffer = project
3093 .update(cx, |project, cx| {
3094 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3095 })
3096 .await
3097 .unwrap();
3098 let _js_buffer = project
3099 .update(cx, |project, cx| {
3100 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3101 })
3102 .await
3103 .unwrap();
3104
3105 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3106 assert_eq!(
3107 fake_rust_server_1
3108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3109 .await
3110 .text_document
3111 .uri
3112 .as_str(),
3113 uri!("file:///dir/a.rs")
3114 );
3115
3116 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3117 assert_eq!(
3118 fake_js_server
3119 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3120 .await
3121 .text_document
3122 .uri
3123 .as_str(),
3124 uri!("file:///dir/b.js")
3125 );
3126
3127 // Disable Rust language server, ensuring only that server gets stopped.
3128 cx.update(|cx| {
3129 SettingsStore::update_global(cx, |settings, cx| {
3130 settings.update_user_settings(cx, |settings| {
3131 settings.languages_mut().insert(
3132 "Rust".into(),
3133 LanguageSettingsContent {
3134 enable_language_server: Some(false),
3135 ..Default::default()
3136 },
3137 );
3138 });
3139 })
3140 });
3141 fake_rust_server_1
3142 .receive_notification::<lsp::notification::Exit>()
3143 .await;
3144
3145 // Enable Rust and disable JavaScript language servers, ensuring that the
3146 // former gets started again and that the latter stops.
3147 cx.update(|cx| {
3148 SettingsStore::update_global(cx, |settings, cx| {
3149 settings.update_user_settings(cx, |settings| {
3150 settings.languages_mut().insert(
3151 "Rust".into(),
3152 LanguageSettingsContent {
3153 enable_language_server: Some(true),
3154 ..Default::default()
3155 },
3156 );
3157 settings.languages_mut().insert(
3158 "JavaScript".into(),
3159 LanguageSettingsContent {
3160 enable_language_server: Some(false),
3161 ..Default::default()
3162 },
3163 );
3164 });
3165 })
3166 });
3167 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3168 assert_eq!(
3169 fake_rust_server_2
3170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3171 .await
3172 .text_document
3173 .uri
3174 .as_str(),
3175 uri!("file:///dir/a.rs")
3176 );
3177 fake_js_server
3178 .receive_notification::<lsp::notification::Exit>()
3179 .await;
3180}
3181
3182#[gpui::test(iterations = 3)]
3183async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3184 init_test(cx);
3185
3186 let text = "
3187 fn a() { A }
3188 fn b() { BB }
3189 fn c() { CCC }
3190 "
3191 .unindent();
3192
3193 let fs = FakeFs::new(cx.executor());
3194 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3195
3196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3198
3199 language_registry.add(rust_lang());
3200 let mut fake_servers = language_registry.register_fake_lsp(
3201 "Rust",
3202 FakeLspAdapter {
3203 disk_based_diagnostics_sources: vec!["disk".into()],
3204 ..Default::default()
3205 },
3206 );
3207
3208 let buffer = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer(path!("/dir/a.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let _handle = project.update(cx, |project, cx| {
3216 project.register_buffer_with_language_servers(&buffer, cx)
3217 });
3218
3219 let mut fake_server = fake_servers.next().await.unwrap();
3220 let open_notification = fake_server
3221 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3222 .await;
3223
3224 // Edit the buffer, moving the content down
3225 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3226 let change_notification_1 = fake_server
3227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3228 .await;
3229 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3230
3231 // Report some diagnostics for the initial version of the buffer
3232 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3233 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 version: Some(open_notification.text_document.version),
3235 diagnostics: vec![
3236 lsp::Diagnostic {
3237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3238 severity: Some(DiagnosticSeverity::ERROR),
3239 message: "undefined variable 'A'".to_string(),
3240 source: Some("disk".to_string()),
3241 ..Default::default()
3242 },
3243 lsp::Diagnostic {
3244 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3245 severity: Some(DiagnosticSeverity::ERROR),
3246 message: "undefined variable 'BB'".to_string(),
3247 source: Some("disk".to_string()),
3248 ..Default::default()
3249 },
3250 lsp::Diagnostic {
3251 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3252 severity: Some(DiagnosticSeverity::ERROR),
3253 source: Some("disk".to_string()),
3254 message: "undefined variable 'CCC'".to_string(),
3255 ..Default::default()
3256 },
3257 ],
3258 });
3259
3260 // The diagnostics have moved down since they were created.
3261 cx.executor().run_until_parked();
3262 buffer.update(cx, |buffer, _| {
3263 assert_eq!(
3264 buffer
3265 .snapshot()
3266 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3267 .collect::<Vec<_>>(),
3268 &[
3269 DiagnosticEntry {
3270 range: Point::new(3, 9)..Point::new(3, 11),
3271 diagnostic: Diagnostic {
3272 source: Some("disk".into()),
3273 severity: DiagnosticSeverity::ERROR,
3274 message: "undefined variable 'BB'".to_string(),
3275 is_disk_based: true,
3276 group_id: 1,
3277 is_primary: true,
3278 source_kind: DiagnosticSourceKind::Pushed,
3279 ..Diagnostic::default()
3280 },
3281 },
3282 DiagnosticEntry {
3283 range: Point::new(4, 9)..Point::new(4, 12),
3284 diagnostic: Diagnostic {
3285 source: Some("disk".into()),
3286 severity: DiagnosticSeverity::ERROR,
3287 message: "undefined variable 'CCC'".to_string(),
3288 is_disk_based: true,
3289 group_id: 2,
3290 is_primary: true,
3291 source_kind: DiagnosticSourceKind::Pushed,
3292 ..Diagnostic::default()
3293 }
3294 }
3295 ]
3296 );
3297 assert_eq!(
3298 chunks_with_diagnostics(buffer, 0..buffer.len()),
3299 [
3300 ("\n\nfn a() { ".to_string(), None),
3301 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3302 (" }\nfn b() { ".to_string(), None),
3303 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3304 (" }\nfn c() { ".to_string(), None),
3305 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3306 (" }\n".to_string(), None),
3307 ]
3308 );
3309 assert_eq!(
3310 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3311 [
3312 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3313 (" }\nfn c() { ".to_string(), None),
3314 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3315 ]
3316 );
3317 });
3318
3319 // Ensure overlapping diagnostics are highlighted correctly.
3320 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3321 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3322 version: Some(open_notification.text_document.version),
3323 diagnostics: vec![
3324 lsp::Diagnostic {
3325 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3326 severity: Some(DiagnosticSeverity::ERROR),
3327 message: "undefined variable 'A'".to_string(),
3328 source: Some("disk".to_string()),
3329 ..Default::default()
3330 },
3331 lsp::Diagnostic {
3332 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3333 severity: Some(DiagnosticSeverity::WARNING),
3334 message: "unreachable statement".to_string(),
3335 source: Some("disk".to_string()),
3336 ..Default::default()
3337 },
3338 ],
3339 });
3340
3341 cx.executor().run_until_parked();
3342 buffer.update(cx, |buffer, _| {
3343 assert_eq!(
3344 buffer
3345 .snapshot()
3346 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3347 .collect::<Vec<_>>(),
3348 &[
3349 DiagnosticEntry {
3350 range: Point::new(2, 9)..Point::new(2, 12),
3351 diagnostic: Diagnostic {
3352 source: Some("disk".into()),
3353 severity: DiagnosticSeverity::WARNING,
3354 message: "unreachable statement".to_string(),
3355 is_disk_based: true,
3356 group_id: 4,
3357 is_primary: true,
3358 source_kind: DiagnosticSourceKind::Pushed,
3359 ..Diagnostic::default()
3360 }
3361 },
3362 DiagnosticEntry {
3363 range: Point::new(2, 9)..Point::new(2, 10),
3364 diagnostic: Diagnostic {
3365 source: Some("disk".into()),
3366 severity: DiagnosticSeverity::ERROR,
3367 message: "undefined variable 'A'".to_string(),
3368 is_disk_based: true,
3369 group_id: 3,
3370 is_primary: true,
3371 source_kind: DiagnosticSourceKind::Pushed,
3372 ..Diagnostic::default()
3373 },
3374 }
3375 ]
3376 );
3377 assert_eq!(
3378 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3379 [
3380 ("fn a() { ".to_string(), None),
3381 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3383 ("\n".to_string(), None),
3384 ]
3385 );
3386 assert_eq!(
3387 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3388 [
3389 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3390 ("\n".to_string(), None),
3391 ]
3392 );
3393 });
3394
3395 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3396 // changes since the last save.
3397 buffer.update(cx, |buffer, cx| {
3398 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3399 buffer.edit(
3400 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3401 None,
3402 cx,
3403 );
3404 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3405 });
3406 let change_notification_2 = fake_server
3407 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3408 .await;
3409 assert!(
3410 change_notification_2.text_document.version > change_notification_1.text_document.version
3411 );
3412
3413 // Handle out-of-order diagnostics
3414 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3415 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3416 version: Some(change_notification_2.text_document.version),
3417 diagnostics: vec![
3418 lsp::Diagnostic {
3419 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3420 severity: Some(DiagnosticSeverity::ERROR),
3421 message: "undefined variable 'BB'".to_string(),
3422 source: Some("disk".to_string()),
3423 ..Default::default()
3424 },
3425 lsp::Diagnostic {
3426 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3427 severity: Some(DiagnosticSeverity::WARNING),
3428 message: "undefined variable 'A'".to_string(),
3429 source: Some("disk".to_string()),
3430 ..Default::default()
3431 },
3432 ],
3433 });
3434
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(
3438 buffer
3439 .snapshot()
3440 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3441 .collect::<Vec<_>>(),
3442 &[
3443 DiagnosticEntry {
3444 range: Point::new(2, 21)..Point::new(2, 22),
3445 diagnostic: Diagnostic {
3446 source: Some("disk".into()),
3447 severity: DiagnosticSeverity::WARNING,
3448 message: "undefined variable 'A'".to_string(),
3449 is_disk_based: true,
3450 group_id: 6,
3451 is_primary: true,
3452 source_kind: DiagnosticSourceKind::Pushed,
3453 ..Diagnostic::default()
3454 }
3455 },
3456 DiagnosticEntry {
3457 range: Point::new(3, 9)..Point::new(3, 14),
3458 diagnostic: Diagnostic {
3459 source: Some("disk".into()),
3460 severity: DiagnosticSeverity::ERROR,
3461 message: "undefined variable 'BB'".to_string(),
3462 is_disk_based: true,
3463 group_id: 5,
3464 is_primary: true,
3465 source_kind: DiagnosticSourceKind::Pushed,
3466 ..Diagnostic::default()
3467 },
3468 }
3469 ]
3470 );
3471 });
3472}
3473
3474#[gpui::test]
3475async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3476 init_test(cx);
3477
3478 let text = concat!(
3479 "let one = ;\n", //
3480 "let two = \n",
3481 "let three = 3;\n",
3482 );
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3486
3487 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3488 let buffer = project
3489 .update(cx, |project, cx| {
3490 project.open_local_buffer(path!("/dir/a.rs"), cx)
3491 })
3492 .await
3493 .unwrap();
3494
3495 project.update(cx, |project, cx| {
3496 project.lsp_store().update(cx, |lsp_store, cx| {
3497 lsp_store
3498 .update_diagnostic_entries(
3499 LanguageServerId(0),
3500 PathBuf::from(path!("/dir/a.rs")),
3501 None,
3502 None,
3503 vec![
3504 DiagnosticEntry {
3505 range: Unclipped(PointUtf16::new(0, 10))
3506 ..Unclipped(PointUtf16::new(0, 10)),
3507 diagnostic: Diagnostic {
3508 severity: DiagnosticSeverity::ERROR,
3509 message: "syntax error 1".to_string(),
3510 source_kind: DiagnosticSourceKind::Pushed,
3511 ..Diagnostic::default()
3512 },
3513 },
3514 DiagnosticEntry {
3515 range: Unclipped(PointUtf16::new(1, 10))
3516 ..Unclipped(PointUtf16::new(1, 10)),
3517 diagnostic: Diagnostic {
3518 severity: DiagnosticSeverity::ERROR,
3519 message: "syntax error 2".to_string(),
3520 source_kind: DiagnosticSourceKind::Pushed,
3521 ..Diagnostic::default()
3522 },
3523 },
3524 ],
3525 cx,
3526 )
3527 .unwrap();
3528 })
3529 });
3530
3531 // An empty range is extended forward to include the following character.
3532 // At the end of a line, an empty range is extended backward to include
3533 // the preceding character.
3534 buffer.update(cx, |buffer, _| {
3535 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3536 assert_eq!(
3537 chunks
3538 .iter()
3539 .map(|(s, d)| (s.as_str(), *d))
3540 .collect::<Vec<_>>(),
3541 &[
3542 ("let one = ", None),
3543 (";", Some(DiagnosticSeverity::ERROR)),
3544 ("\nlet two =", None),
3545 (" ", Some(DiagnosticSeverity::ERROR)),
3546 ("\nlet three = 3;\n", None)
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let fs = FakeFs::new(cx.executor());
3557 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3558 .await;
3559
3560 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3561 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3562
3563 lsp_store.update(cx, |lsp_store, cx| {
3564 lsp_store
3565 .update_diagnostic_entries(
3566 LanguageServerId(0),
3567 Path::new(path!("/dir/a.rs")).to_owned(),
3568 None,
3569 None,
3570 vec![DiagnosticEntry {
3571 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3572 diagnostic: Diagnostic {
3573 severity: DiagnosticSeverity::ERROR,
3574 is_primary: true,
3575 message: "syntax error a1".to_string(),
3576 source_kind: DiagnosticSourceKind::Pushed,
3577 ..Diagnostic::default()
3578 },
3579 }],
3580 cx,
3581 )
3582 .unwrap();
3583 lsp_store
3584 .update_diagnostic_entries(
3585 LanguageServerId(1),
3586 Path::new(path!("/dir/a.rs")).to_owned(),
3587 None,
3588 None,
3589 vec![DiagnosticEntry {
3590 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::ERROR,
3593 is_primary: true,
3594 message: "syntax error b1".to_string(),
3595 source_kind: DiagnosticSourceKind::Pushed,
3596 ..Diagnostic::default()
3597 },
3598 }],
3599 cx,
3600 )
3601 .unwrap();
3602
3603 assert_eq!(
3604 lsp_store.diagnostic_summary(false, cx),
3605 DiagnosticSummary {
3606 error_count: 2,
3607 warning_count: 0,
3608 }
3609 );
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let text = "
3618 fn a() {
3619 f1();
3620 }
3621 fn b() {
3622 f2();
3623 }
3624 fn c() {
3625 f3();
3626 }
3627 "
3628 .unindent();
3629
3630 let fs = FakeFs::new(cx.executor());
3631 fs.insert_tree(
3632 path!("/dir"),
3633 json!({
3634 "a.rs": text.clone(),
3635 }),
3636 )
3637 .await;
3638
3639 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3643 language_registry.add(rust_lang());
3644 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3645
3646 let (buffer, _handle) = project
3647 .update(cx, |project, cx| {
3648 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3649 })
3650 .await
3651 .unwrap();
3652
3653 let mut fake_server = fake_servers.next().await.unwrap();
3654 let lsp_document_version = fake_server
3655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3656 .await
3657 .text_document
3658 .version;
3659
3660 // Simulate editing the buffer after the language server computes some edits.
3661 buffer.update(cx, |buffer, cx| {
3662 buffer.edit(
3663 [(
3664 Point::new(0, 0)..Point::new(0, 0),
3665 "// above first function\n",
3666 )],
3667 None,
3668 cx,
3669 );
3670 buffer.edit(
3671 [(
3672 Point::new(2, 0)..Point::new(2, 0),
3673 " // inside first function\n",
3674 )],
3675 None,
3676 cx,
3677 );
3678 buffer.edit(
3679 [(
3680 Point::new(6, 4)..Point::new(6, 4),
3681 "// inside second function ",
3682 )],
3683 None,
3684 cx,
3685 );
3686
3687 assert_eq!(
3688 buffer.text(),
3689 "
3690 // above first function
3691 fn a() {
3692 // inside first function
3693 f1();
3694 }
3695 fn b() {
3696 // inside second function f2();
3697 }
3698 fn c() {
3699 f3();
3700 }
3701 "
3702 .unindent()
3703 );
3704 });
3705
3706 let edits = lsp_store
3707 .update(cx, |lsp_store, cx| {
3708 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3709 &buffer,
3710 vec![
3711 // replace body of first function
3712 lsp::TextEdit {
3713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3714 new_text: "
3715 fn a() {
3716 f10();
3717 }
3718 "
3719 .unindent(),
3720 },
3721 // edit inside second function
3722 lsp::TextEdit {
3723 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3724 new_text: "00".into(),
3725 },
3726 // edit inside third function via two distinct edits
3727 lsp::TextEdit {
3728 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3729 new_text: "4000".into(),
3730 },
3731 lsp::TextEdit {
3732 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3733 new_text: "".into(),
3734 },
3735 ],
3736 LanguageServerId(0),
3737 Some(lsp_document_version),
3738 cx,
3739 )
3740 })
3741 .await
3742 .unwrap();
3743
3744 buffer.update(cx, |buffer, cx| {
3745 for (range, new_text) in edits {
3746 buffer.edit([(range, new_text)], None, cx);
3747 }
3748 assert_eq!(
3749 buffer.text(),
3750 "
3751 // above first function
3752 fn a() {
3753 // inside first function
3754 f10();
3755 }
3756 fn b() {
3757 // inside second function f200();
3758 }
3759 fn c() {
3760 f4000();
3761 }
3762 "
3763 .unindent()
3764 );
3765 });
3766}
3767
3768#[gpui::test]
3769async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3770 init_test(cx);
3771
3772 let text = "
3773 use a::b;
3774 use a::c;
3775
3776 fn f() {
3777 b();
3778 c();
3779 }
3780 "
3781 .unindent();
3782
3783 let fs = FakeFs::new(cx.executor());
3784 fs.insert_tree(
3785 path!("/dir"),
3786 json!({
3787 "a.rs": text.clone(),
3788 }),
3789 )
3790 .await;
3791
3792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3793 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3794 let buffer = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 // Simulate the language server sending us a small edit in the form of a very large diff.
3802 // Rust-analyzer does this when performing a merge-imports code action.
3803 let edits = lsp_store
3804 .update(cx, |lsp_store, cx| {
3805 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3806 &buffer,
3807 [
3808 // Replace the first use statement without editing the semicolon.
3809 lsp::TextEdit {
3810 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3811 new_text: "a::{b, c}".into(),
3812 },
3813 // Reinsert the remainder of the file between the semicolon and the final
3814 // newline of the file.
3815 lsp::TextEdit {
3816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3817 new_text: "\n\n".into(),
3818 },
3819 lsp::TextEdit {
3820 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3821 new_text: "
3822 fn f() {
3823 b();
3824 c();
3825 }"
3826 .unindent(),
3827 },
3828 // Delete everything after the first newline of the file.
3829 lsp::TextEdit {
3830 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3831 new_text: "".into(),
3832 },
3833 ],
3834 LanguageServerId(0),
3835 None,
3836 cx,
3837 )
3838 })
3839 .await
3840 .unwrap();
3841
3842 buffer.update(cx, |buffer, cx| {
3843 let edits = edits
3844 .into_iter()
3845 .map(|(range, text)| {
3846 (
3847 range.start.to_point(buffer)..range.end.to_point(buffer),
3848 text,
3849 )
3850 })
3851 .collect::<Vec<_>>();
3852
3853 assert_eq!(
3854 edits,
3855 [
3856 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3857 (Point::new(1, 0)..Point::new(2, 0), "".into())
3858 ]
3859 );
3860
3861 for (range, new_text) in edits {
3862 buffer.edit([(range, new_text)], None, cx);
3863 }
3864 assert_eq!(
3865 buffer.text(),
3866 "
3867 use a::{b, c};
3868
3869 fn f() {
3870 b();
3871 c();
3872 }
3873 "
3874 .unindent()
3875 );
3876 });
3877}
3878
3879#[gpui::test]
3880async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3881 cx: &mut gpui::TestAppContext,
3882) {
3883 init_test(cx);
3884
3885 let text = "Path()";
3886
3887 let fs = FakeFs::new(cx.executor());
3888 fs.insert_tree(
3889 path!("/dir"),
3890 json!({
3891 "a.rs": text
3892 }),
3893 )
3894 .await;
3895
3896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3897 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3898 let buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 // Simulate the language server sending us a pair of edits at the same location,
3906 // with an insertion following a replacement (which violates the LSP spec).
3907 let edits = lsp_store
3908 .update(cx, |lsp_store, cx| {
3909 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3910 &buffer,
3911 [
3912 lsp::TextEdit {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3914 new_text: "Path".into(),
3915 },
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3918 new_text: "from path import Path\n\n\n".into(),
3919 },
3920 ],
3921 LanguageServerId(0),
3922 None,
3923 cx,
3924 )
3925 })
3926 .await
3927 .unwrap();
3928
3929 buffer.update(cx, |buffer, cx| {
3930 buffer.edit(edits, None, cx);
3931 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3932 });
3933}
3934
3935#[gpui::test]
3936async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3937 init_test(cx);
3938
3939 let text = "
3940 use a::b;
3941 use a::c;
3942
3943 fn f() {
3944 b();
3945 c();
3946 }
3947 "
3948 .unindent();
3949
3950 let fs = FakeFs::new(cx.executor());
3951 fs.insert_tree(
3952 path!("/dir"),
3953 json!({
3954 "a.rs": text.clone(),
3955 }),
3956 )
3957 .await;
3958
3959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3960 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3961 let buffer = project
3962 .update(cx, |project, cx| {
3963 project.open_local_buffer(path!("/dir/a.rs"), cx)
3964 })
3965 .await
3966 .unwrap();
3967
3968 // Simulate the language server sending us edits in a non-ordered fashion,
3969 // with ranges sometimes being inverted or pointing to invalid locations.
3970 let edits = lsp_store
3971 .update(cx, |lsp_store, cx| {
3972 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3973 &buffer,
3974 [
3975 lsp::TextEdit {
3976 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3977 new_text: "\n\n".into(),
3978 },
3979 lsp::TextEdit {
3980 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3981 new_text: "a::{b, c}".into(),
3982 },
3983 lsp::TextEdit {
3984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3985 new_text: "".into(),
3986 },
3987 lsp::TextEdit {
3988 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3989 new_text: "
3990 fn f() {
3991 b();
3992 c();
3993 }"
3994 .unindent(),
3995 },
3996 ],
3997 LanguageServerId(0),
3998 None,
3999 cx,
4000 )
4001 })
4002 .await
4003 .unwrap();
4004
4005 buffer.update(cx, |buffer, cx| {
4006 let edits = edits
4007 .into_iter()
4008 .map(|(range, text)| {
4009 (
4010 range.start.to_point(buffer)..range.end.to_point(buffer),
4011 text,
4012 )
4013 })
4014 .collect::<Vec<_>>();
4015
4016 assert_eq!(
4017 edits,
4018 [
4019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4020 (Point::new(1, 0)..Point::new(2, 0), "".into())
4021 ]
4022 );
4023
4024 for (range, new_text) in edits {
4025 buffer.edit([(range, new_text)], None, cx);
4026 }
4027 assert_eq!(
4028 buffer.text(),
4029 "
4030 use a::{b, c};
4031
4032 fn f() {
4033 b();
4034 c();
4035 }
4036 "
4037 .unindent()
4038 );
4039 });
4040}
4041
4042fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4043 buffer: &Buffer,
4044 range: Range<T>,
4045) -> Vec<(String, Option<DiagnosticSeverity>)> {
4046 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4047 for chunk in buffer.snapshot().chunks(range, true) {
4048 if chunks
4049 .last()
4050 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4051 {
4052 chunks.last_mut().unwrap().0.push_str(chunk.text);
4053 } else {
4054 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4055 }
4056 }
4057 chunks
4058}
4059
4060#[gpui::test(iterations = 10)]
4061async fn test_definition(cx: &mut gpui::TestAppContext) {
4062 init_test(cx);
4063
4064 let fs = FakeFs::new(cx.executor());
4065 fs.insert_tree(
4066 path!("/dir"),
4067 json!({
4068 "a.rs": "const fn a() { A }",
4069 "b.rs": "const y: i32 = crate::a()",
4070 }),
4071 )
4072 .await;
4073
4074 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4075
4076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4077 language_registry.add(rust_lang());
4078 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4079
4080 let (buffer, _handle) = project
4081 .update(cx, |project, cx| {
4082 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4083 })
4084 .await
4085 .unwrap();
4086
4087 let fake_server = fake_servers.next().await.unwrap();
4088 cx.executor().run_until_parked();
4089
4090 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4091 let params = params.text_document_position_params;
4092 assert_eq!(
4093 params.text_document.uri.to_file_path().unwrap(),
4094 Path::new(path!("/dir/b.rs")),
4095 );
4096 assert_eq!(params.position, lsp::Position::new(0, 22));
4097
4098 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4099 lsp::Location::new(
4100 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4101 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4102 ),
4103 )))
4104 });
4105 let mut definitions = project
4106 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4107 .await
4108 .unwrap()
4109 .unwrap();
4110
4111 // Assert no new language server started
4112 cx.executor().run_until_parked();
4113 assert!(fake_servers.try_next().is_err());
4114
4115 assert_eq!(definitions.len(), 1);
4116 let definition = definitions.pop().unwrap();
4117 cx.update(|cx| {
4118 let target_buffer = definition.target.buffer.read(cx);
4119 assert_eq!(
4120 target_buffer
4121 .file()
4122 .unwrap()
4123 .as_local()
4124 .unwrap()
4125 .abs_path(cx),
4126 Path::new(path!("/dir/a.rs")),
4127 );
4128 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4129 assert_eq!(
4130 list_worktrees(&project, cx),
4131 [
4132 (path!("/dir/a.rs").as_ref(), false),
4133 (path!("/dir/b.rs").as_ref(), true)
4134 ],
4135 );
4136
4137 drop(definition);
4138 });
4139 cx.update(|cx| {
4140 assert_eq!(
4141 list_worktrees(&project, cx),
4142 [(path!("/dir/b.rs").as_ref(), true)]
4143 );
4144 });
4145
4146 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4147 project
4148 .read(cx)
4149 .worktrees(cx)
4150 .map(|worktree| {
4151 let worktree = worktree.read(cx);
4152 (
4153 worktree.as_local().unwrap().abs_path().as_ref(),
4154 worktree.is_visible(),
4155 )
4156 })
4157 .collect::<Vec<_>>()
4158 }
4159}
4160
4161#[gpui::test]
4162async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4163 init_test(cx);
4164
4165 let fs = FakeFs::new(cx.executor());
4166 fs.insert_tree(
4167 path!("/dir"),
4168 json!({
4169 "a.ts": "",
4170 }),
4171 )
4172 .await;
4173
4174 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4175
4176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4177 language_registry.add(typescript_lang());
4178 let mut fake_language_servers = language_registry.register_fake_lsp(
4179 "TypeScript",
4180 FakeLspAdapter {
4181 capabilities: lsp::ServerCapabilities {
4182 completion_provider: Some(lsp::CompletionOptions {
4183 trigger_characters: Some(vec![".".to_string()]),
4184 ..Default::default()
4185 }),
4186 ..Default::default()
4187 },
4188 ..Default::default()
4189 },
4190 );
4191
4192 let (buffer, _handle) = project
4193 .update(cx, |p, cx| {
4194 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4195 })
4196 .await
4197 .unwrap();
4198
4199 let fake_server = fake_language_servers.next().await.unwrap();
4200 cx.executor().run_until_parked();
4201
4202 // When text_edit exists, it takes precedence over insert_text and label
4203 let text = "let a = obj.fqn";
4204 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4205 let completions = project.update(cx, |project, cx| {
4206 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4207 });
4208
4209 fake_server
4210 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4211 Ok(Some(lsp::CompletionResponse::Array(vec![
4212 lsp::CompletionItem {
4213 label: "labelText".into(),
4214 insert_text: Some("insertText".into()),
4215 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4216 range: lsp::Range::new(
4217 lsp::Position::new(0, text.len() as u32 - 3),
4218 lsp::Position::new(0, text.len() as u32),
4219 ),
4220 new_text: "textEditText".into(),
4221 })),
4222 ..Default::default()
4223 },
4224 ])))
4225 })
4226 .next()
4227 .await;
4228
4229 let completions = completions
4230 .await
4231 .unwrap()
4232 .into_iter()
4233 .flat_map(|response| response.completions)
4234 .collect::<Vec<_>>();
4235 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4236
4237 assert_eq!(completions.len(), 1);
4238 assert_eq!(completions[0].new_text, "textEditText");
4239 assert_eq!(
4240 completions[0].replace_range.to_offset(&snapshot),
4241 text.len() - 3..text.len()
4242 );
4243}
4244
4245#[gpui::test]
4246async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4247 init_test(cx);
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 path!("/dir"),
4252 json!({
4253 "a.ts": "",
4254 }),
4255 )
4256 .await;
4257
4258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4259
4260 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4261 language_registry.add(typescript_lang());
4262 let mut fake_language_servers = language_registry.register_fake_lsp(
4263 "TypeScript",
4264 FakeLspAdapter {
4265 capabilities: lsp::ServerCapabilities {
4266 completion_provider: Some(lsp::CompletionOptions {
4267 trigger_characters: Some(vec![".".to_string()]),
4268 ..Default::default()
4269 }),
4270 ..Default::default()
4271 },
4272 ..Default::default()
4273 },
4274 );
4275
4276 let (buffer, _handle) = project
4277 .update(cx, |p, cx| {
4278 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4279 })
4280 .await
4281 .unwrap();
4282
4283 let fake_server = fake_language_servers.next().await.unwrap();
4284 cx.executor().run_until_parked();
4285 let text = "let a = obj.fqn";
4286
4287 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4288 {
4289 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4290 let completions = project.update(cx, |project, cx| {
4291 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4292 });
4293
4294 fake_server
4295 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4296 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4297 is_incomplete: false,
4298 item_defaults: Some(lsp::CompletionListItemDefaults {
4299 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4300 lsp::Range::new(
4301 lsp::Position::new(0, text.len() as u32 - 3),
4302 lsp::Position::new(0, text.len() as u32),
4303 ),
4304 )),
4305 ..Default::default()
4306 }),
4307 items: vec![lsp::CompletionItem {
4308 label: "labelText".into(),
4309 text_edit_text: Some("textEditText".into()),
4310 text_edit: None,
4311 ..Default::default()
4312 }],
4313 })))
4314 })
4315 .next()
4316 .await;
4317
4318 let completions = completions
4319 .await
4320 .unwrap()
4321 .into_iter()
4322 .flat_map(|response| response.completions)
4323 .collect::<Vec<_>>();
4324 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4325
4326 assert_eq!(completions.len(), 1);
4327 assert_eq!(completions[0].new_text, "textEditText");
4328 assert_eq!(
4329 completions[0].replace_range.to_offset(&snapshot),
4330 text.len() - 3..text.len()
4331 );
4332 }
4333
4334 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4335 {
4336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4337 let completions = project.update(cx, |project, cx| {
4338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4339 });
4340
4341 fake_server
4342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4343 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4344 is_incomplete: false,
4345 item_defaults: Some(lsp::CompletionListItemDefaults {
4346 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4347 lsp::Range::new(
4348 lsp::Position::new(0, text.len() as u32 - 3),
4349 lsp::Position::new(0, text.len() as u32),
4350 ),
4351 )),
4352 ..Default::default()
4353 }),
4354 items: vec![lsp::CompletionItem {
4355 label: "labelText".into(),
4356 text_edit_text: None,
4357 insert_text: Some("irrelevant".into()),
4358 text_edit: None,
4359 ..Default::default()
4360 }],
4361 })))
4362 })
4363 .next()
4364 .await;
4365
4366 let completions = completions
4367 .await
4368 .unwrap()
4369 .into_iter()
4370 .flat_map(|response| response.completions)
4371 .collect::<Vec<_>>();
4372 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4373
4374 assert_eq!(completions.len(), 1);
4375 assert_eq!(completions[0].new_text, "labelText");
4376 assert_eq!(
4377 completions[0].replace_range.to_offset(&snapshot),
4378 text.len() - 3..text.len()
4379 );
4380 }
4381}
4382
4383#[gpui::test]
4384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4385 init_test(cx);
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "a.ts": "",
4392 }),
4393 )
4394 .await;
4395
4396 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4397
4398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4399 language_registry.add(typescript_lang());
4400 let mut fake_language_servers = language_registry.register_fake_lsp(
4401 "TypeScript",
4402 FakeLspAdapter {
4403 capabilities: lsp::ServerCapabilities {
4404 completion_provider: Some(lsp::CompletionOptions {
4405 trigger_characters: Some(vec![":".to_string()]),
4406 ..Default::default()
4407 }),
4408 ..Default::default()
4409 },
4410 ..Default::default()
4411 },
4412 );
4413
4414 let (buffer, _handle) = project
4415 .update(cx, |p, cx| {
4416 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4417 })
4418 .await
4419 .unwrap();
4420
4421 let fake_server = fake_language_servers.next().await.unwrap();
4422 cx.executor().run_until_parked();
4423
4424 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4425 let text = "let a = b.fqn";
4426 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4427 let completions = project.update(cx, |project, cx| {
4428 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4429 });
4430
4431 fake_server
4432 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4433 Ok(Some(lsp::CompletionResponse::Array(vec![
4434 lsp::CompletionItem {
4435 label: "fullyQualifiedName?".into(),
4436 insert_text: Some("fullyQualifiedName".into()),
4437 ..Default::default()
4438 },
4439 ])))
4440 })
4441 .next()
4442 .await;
4443 let completions = completions
4444 .await
4445 .unwrap()
4446 .into_iter()
4447 .flat_map(|response| response.completions)
4448 .collect::<Vec<_>>();
4449 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4450 assert_eq!(completions.len(), 1);
4451 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4452 assert_eq!(
4453 completions[0].replace_range.to_offset(&snapshot),
4454 text.len() - 3..text.len()
4455 );
4456
4457 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4458 let text = "let a = \"atoms/cmp\"";
4459 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4460 let completions = project.update(cx, |project, cx| {
4461 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4462 });
4463
4464 fake_server
4465 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4466 Ok(Some(lsp::CompletionResponse::Array(vec![
4467 lsp::CompletionItem {
4468 label: "component".into(),
4469 ..Default::default()
4470 },
4471 ])))
4472 })
4473 .next()
4474 .await;
4475 let completions = completions
4476 .await
4477 .unwrap()
4478 .into_iter()
4479 .flat_map(|response| response.completions)
4480 .collect::<Vec<_>>();
4481 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4482 assert_eq!(completions.len(), 1);
4483 assert_eq!(completions[0].new_text, "component");
4484 assert_eq!(
4485 completions[0].replace_range.to_offset(&snapshot),
4486 text.len() - 4..text.len() - 1
4487 );
4488}
4489
4490#[gpui::test]
4491async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4492 init_test(cx);
4493
4494 let fs = FakeFs::new(cx.executor());
4495 fs.insert_tree(
4496 path!("/dir"),
4497 json!({
4498 "a.ts": "",
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(typescript_lang());
4507 let mut fake_language_servers = language_registry.register_fake_lsp(
4508 "TypeScript",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 completion_provider: Some(lsp::CompletionOptions {
4512 trigger_characters: Some(vec![":".to_string()]),
4513 ..Default::default()
4514 }),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |p, cx| {
4523 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_language_servers.next().await.unwrap();
4529 cx.executor().run_until_parked();
4530
4531 let text = "let a = b.fqn";
4532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4533 let completions = project.update(cx, |project, cx| {
4534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4535 });
4536
4537 fake_server
4538 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4539 Ok(Some(lsp::CompletionResponse::Array(vec![
4540 lsp::CompletionItem {
4541 label: "fullyQualifiedName?".into(),
4542 insert_text: Some("fully\rQualified\r\nName".into()),
4543 ..Default::default()
4544 },
4545 ])))
4546 })
4547 .next()
4548 .await;
4549 let completions = completions
4550 .await
4551 .unwrap()
4552 .into_iter()
4553 .flat_map(|response| response.completions)
4554 .collect::<Vec<_>>();
4555 assert_eq!(completions.len(), 1);
4556 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4557}
4558
4559#[gpui::test(iterations = 10)]
4560async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4561 init_test(cx);
4562
4563 let fs = FakeFs::new(cx.executor());
4564 fs.insert_tree(
4565 path!("/dir"),
4566 json!({
4567 "a.ts": "a",
4568 }),
4569 )
4570 .await;
4571
4572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4573
4574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4575 language_registry.add(typescript_lang());
4576 let mut fake_language_servers = language_registry.register_fake_lsp(
4577 "TypeScript",
4578 FakeLspAdapter {
4579 capabilities: lsp::ServerCapabilities {
4580 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4581 lsp::CodeActionOptions {
4582 resolve_provider: Some(true),
4583 ..lsp::CodeActionOptions::default()
4584 },
4585 )),
4586 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4587 commands: vec!["_the/command".to_string()],
4588 ..lsp::ExecuteCommandOptions::default()
4589 }),
4590 ..lsp::ServerCapabilities::default()
4591 },
4592 ..FakeLspAdapter::default()
4593 },
4594 );
4595
4596 let (buffer, _handle) = project
4597 .update(cx, |p, cx| {
4598 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4599 })
4600 .await
4601 .unwrap();
4602
4603 let fake_server = fake_language_servers.next().await.unwrap();
4604 cx.executor().run_until_parked();
4605
4606 // Language server returns code actions that contain commands, and not edits.
4607 let actions = project.update(cx, |project, cx| {
4608 project.code_actions(&buffer, 0..0, None, cx)
4609 });
4610 fake_server
4611 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4612 Ok(Some(vec![
4613 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4614 title: "The code action".into(),
4615 data: Some(serde_json::json!({
4616 "command": "_the/command",
4617 })),
4618 ..lsp::CodeAction::default()
4619 }),
4620 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4621 title: "two".into(),
4622 ..lsp::CodeAction::default()
4623 }),
4624 ]))
4625 })
4626 .next()
4627 .await;
4628
4629 let action = actions.await.unwrap().unwrap()[0].clone();
4630 let apply = project.update(cx, |project, cx| {
4631 project.apply_code_action(buffer.clone(), action, true, cx)
4632 });
4633
4634 // Resolving the code action does not populate its edits. In absence of
4635 // edits, we must execute the given command.
4636 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4637 |mut action, _| async move {
4638 if action.data.is_some() {
4639 action.command = Some(lsp::Command {
4640 title: "The command".into(),
4641 command: "_the/command".into(),
4642 arguments: Some(vec![json!("the-argument")]),
4643 });
4644 }
4645 Ok(action)
4646 },
4647 );
4648
4649 // While executing the command, the language server sends the editor
4650 // a `workspaceEdit` request.
4651 fake_server
4652 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4653 let fake = fake_server.clone();
4654 move |params, _| {
4655 assert_eq!(params.command, "_the/command");
4656 let fake = fake.clone();
4657 async move {
4658 fake.server
4659 .request::<lsp::request::ApplyWorkspaceEdit>(
4660 lsp::ApplyWorkspaceEditParams {
4661 label: None,
4662 edit: lsp::WorkspaceEdit {
4663 changes: Some(
4664 [(
4665 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4666 vec![lsp::TextEdit {
4667 range: lsp::Range::new(
4668 lsp::Position::new(0, 0),
4669 lsp::Position::new(0, 0),
4670 ),
4671 new_text: "X".into(),
4672 }],
4673 )]
4674 .into_iter()
4675 .collect(),
4676 ),
4677 ..Default::default()
4678 },
4679 },
4680 DEFAULT_LSP_REQUEST_TIMEOUT,
4681 )
4682 .await
4683 .into_response()
4684 .unwrap();
4685 Ok(Some(json!(null)))
4686 }
4687 }
4688 })
4689 .next()
4690 .await;
4691
4692 // Applying the code action returns a project transaction containing the edits
4693 // sent by the language server in its `workspaceEdit` request.
4694 let transaction = apply.await.unwrap();
4695 assert!(transaction.0.contains_key(&buffer));
4696 buffer.update(cx, |buffer, cx| {
4697 assert_eq!(buffer.text(), "Xa");
4698 buffer.undo(cx);
4699 assert_eq!(buffer.text(), "a");
4700 });
4701}
4702
4703#[gpui::test]
4704async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706 let fs = FakeFs::new(cx.background_executor.clone());
4707 let expected_contents = "content";
4708 fs.as_fake()
4709 .insert_tree(
4710 "/root",
4711 json!({
4712 "test.txt": expected_contents
4713 }),
4714 )
4715 .await;
4716
4717 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4718
4719 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4720 let worktree = project.worktrees(cx).next().unwrap();
4721 let entry_id = worktree
4722 .read(cx)
4723 .entry_for_path(rel_path("test.txt"))
4724 .unwrap()
4725 .id;
4726 (worktree, entry_id)
4727 });
4728 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4729 let _result = project
4730 .update(cx, |project, cx| {
4731 project.rename_entry(
4732 entry_id,
4733 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4734 cx,
4735 )
4736 })
4737 .await
4738 .unwrap();
4739 worktree.read_with(cx, |worktree, _| {
4740 assert!(
4741 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4742 "Old file should have been removed"
4743 );
4744 assert!(
4745 worktree
4746 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4747 .is_some(),
4748 "Whole directory hierarchy and the new file should have been created"
4749 );
4750 });
4751 assert_eq!(
4752 worktree
4753 .update(cx, |worktree, cx| {
4754 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4755 })
4756 .await
4757 .unwrap()
4758 .text,
4759 expected_contents,
4760 "Moved file's contents should be preserved"
4761 );
4762
4763 let entry_id = worktree.read_with(cx, |worktree, _| {
4764 worktree
4765 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4766 .unwrap()
4767 .id
4768 });
4769
4770 let _result = project
4771 .update(cx, |project, cx| {
4772 project.rename_entry(
4773 entry_id,
4774 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4775 cx,
4776 )
4777 })
4778 .await
4779 .unwrap();
4780 worktree.read_with(cx, |worktree, _| {
4781 assert!(
4782 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4783 "First file should not reappear"
4784 );
4785 assert!(
4786 worktree
4787 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4788 .is_none(),
4789 "Old file should have been removed"
4790 );
4791 assert!(
4792 worktree
4793 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4794 .is_some(),
4795 "No error should have occurred after moving into existing directory"
4796 );
4797 });
4798 assert_eq!(
4799 worktree
4800 .update(cx, |worktree, cx| {
4801 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4802 })
4803 .await
4804 .unwrap()
4805 .text,
4806 expected_contents,
4807 "Moved file's contents should be preserved"
4808 );
4809}
4810
4811#[gpui::test(iterations = 10)]
4812async fn test_save_file(cx: &mut gpui::TestAppContext) {
4813 init_test(cx);
4814
4815 let fs = FakeFs::new(cx.executor());
4816 fs.insert_tree(
4817 path!("/dir"),
4818 json!({
4819 "file1": "the old contents",
4820 }),
4821 )
4822 .await;
4823
4824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4825 let buffer = project
4826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4827 .await
4828 .unwrap();
4829 buffer.update(cx, |buffer, cx| {
4830 assert_eq!(buffer.text(), "the old contents");
4831 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4832 });
4833
4834 project
4835 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4836 .await
4837 .unwrap();
4838
4839 let new_text = fs
4840 .load(Path::new(path!("/dir/file1")))
4841 .await
4842 .unwrap()
4843 .replace("\r\n", "\n");
4844 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4845}
4846
4847#[gpui::test(iterations = 10)]
4848async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4849 // Issue: #24349
4850 init_test(cx);
4851
4852 let fs = FakeFs::new(cx.executor());
4853 fs.insert_tree(path!("/dir"), json!({})).await;
4854
4855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4856 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4857
4858 language_registry.add(rust_lang());
4859 let mut fake_rust_servers = language_registry.register_fake_lsp(
4860 "Rust",
4861 FakeLspAdapter {
4862 name: "the-rust-language-server",
4863 capabilities: lsp::ServerCapabilities {
4864 completion_provider: Some(lsp::CompletionOptions {
4865 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4866 ..Default::default()
4867 }),
4868 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4869 lsp::TextDocumentSyncOptions {
4870 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4871 ..Default::default()
4872 },
4873 )),
4874 ..Default::default()
4875 },
4876 ..Default::default()
4877 },
4878 );
4879
4880 let buffer = project
4881 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4882 .unwrap()
4883 .await;
4884 project.update(cx, |this, cx| {
4885 this.register_buffer_with_language_servers(&buffer, cx);
4886 buffer.update(cx, |buffer, cx| {
4887 assert!(!this.has_language_servers_for(buffer, cx));
4888 })
4889 });
4890
4891 project
4892 .update(cx, |this, cx| {
4893 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4894 this.save_buffer_as(
4895 buffer.clone(),
4896 ProjectPath {
4897 worktree_id,
4898 path: rel_path("file.rs").into(),
4899 },
4900 cx,
4901 )
4902 })
4903 .await
4904 .unwrap();
4905 // A server is started up, and it is notified about Rust files.
4906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4907 assert_eq!(
4908 fake_rust_server
4909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4910 .await
4911 .text_document,
4912 lsp::TextDocumentItem {
4913 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4914 version: 0,
4915 text: "".to_string(),
4916 language_id: "rust".to_string(),
4917 }
4918 );
4919
4920 project.update(cx, |this, cx| {
4921 buffer.update(cx, |buffer, cx| {
4922 assert!(this.has_language_servers_for(buffer, cx));
4923 })
4924 });
4925}
4926
4927#[gpui::test(iterations = 30)]
4928async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4929 init_test(cx);
4930
4931 let fs = FakeFs::new(cx.executor());
4932 fs.insert_tree(
4933 path!("/dir"),
4934 json!({
4935 "file1": "the original contents",
4936 }),
4937 )
4938 .await;
4939
4940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4941 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4942 let buffer = project
4943 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4944 .await
4945 .unwrap();
4946
4947 // Change the buffer's file on disk, and then wait for the file change
4948 // to be detected by the worktree, so that the buffer starts reloading.
4949 fs.save(
4950 path!("/dir/file1").as_ref(),
4951 &"the first contents".into(),
4952 Default::default(),
4953 )
4954 .await
4955 .unwrap();
4956 worktree.next_event(cx).await;
4957
4958 // Change the buffer's file again. Depending on the random seed, the
4959 // previous file change may still be in progress.
4960 fs.save(
4961 path!("/dir/file1").as_ref(),
4962 &"the second contents".into(),
4963 Default::default(),
4964 )
4965 .await
4966 .unwrap();
4967 worktree.next_event(cx).await;
4968
4969 cx.executor().run_until_parked();
4970 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4971 buffer.read_with(cx, |buffer, _| {
4972 assert_eq!(buffer.text(), on_disk_text);
4973 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4974 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4975 });
4976}
4977
4978#[gpui::test(iterations = 30)]
4979async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4980 init_test(cx);
4981
4982 let fs = FakeFs::new(cx.executor());
4983 fs.insert_tree(
4984 path!("/dir"),
4985 json!({
4986 "file1": "the original contents",
4987 }),
4988 )
4989 .await;
4990
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4993 let buffer = project
4994 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4995 .await
4996 .unwrap();
4997
4998 // Change the buffer's file on disk, and then wait for the file change
4999 // to be detected by the worktree, so that the buffer starts reloading.
5000 fs.save(
5001 path!("/dir/file1").as_ref(),
5002 &"the first contents".into(),
5003 Default::default(),
5004 )
5005 .await
5006 .unwrap();
5007 worktree.next_event(cx).await;
5008
5009 cx.executor()
5010 .spawn(cx.executor().simulate_random_delay())
5011 .await;
5012
5013 // Perform a noop edit, causing the buffer's version to increase.
5014 buffer.update(cx, |buffer, cx| {
5015 buffer.edit([(0..0, " ")], None, cx);
5016 buffer.undo(cx);
5017 });
5018
5019 cx.executor().run_until_parked();
5020 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5021 buffer.read_with(cx, |buffer, _| {
5022 let buffer_text = buffer.text();
5023 if buffer_text == on_disk_text {
5024 assert!(
5025 !buffer.is_dirty() && !buffer.has_conflict(),
5026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5027 );
5028 }
5029 // If the file change occurred while the buffer was processing the first
5030 // change, the buffer will be in a conflicting state.
5031 else {
5032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5034 }
5035 });
5036}
5037
5038#[gpui::test]
5039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5040 init_test(cx);
5041
5042 let fs = FakeFs::new(cx.executor());
5043 fs.insert_tree(
5044 path!("/dir"),
5045 json!({
5046 "file1": "the old contents",
5047 }),
5048 )
5049 .await;
5050
5051 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5052 let buffer = project
5053 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5054 .await
5055 .unwrap();
5056 buffer.update(cx, |buffer, cx| {
5057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5058 });
5059
5060 project
5061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5062 .await
5063 .unwrap();
5064
5065 let new_text = fs
5066 .load(Path::new(path!("/dir/file1")))
5067 .await
5068 .unwrap()
5069 .replace("\r\n", "\n");
5070 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5071}
5072
5073#[gpui::test]
5074async fn test_save_as(cx: &mut gpui::TestAppContext) {
5075 init_test(cx);
5076
5077 let fs = FakeFs::new(cx.executor());
5078 fs.insert_tree("/dir", json!({})).await;
5079
5080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5081
5082 let languages = project.update(cx, |project, _| project.languages().clone());
5083 languages.add(rust_lang());
5084
5085 let buffer = project.update(cx, |project, cx| {
5086 project.create_local_buffer("", None, false, cx)
5087 });
5088 buffer.update(cx, |buffer, cx| {
5089 buffer.edit([(0..0, "abc")], None, cx);
5090 assert!(buffer.is_dirty());
5091 assert!(!buffer.has_conflict());
5092 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5093 });
5094 project
5095 .update(cx, |project, cx| {
5096 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5097 let path = ProjectPath {
5098 worktree_id,
5099 path: rel_path("file1.rs").into(),
5100 };
5101 project.save_buffer_as(buffer.clone(), path, cx)
5102 })
5103 .await
5104 .unwrap();
5105 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5106
5107 cx.executor().run_until_parked();
5108 buffer.update(cx, |buffer, cx| {
5109 assert_eq!(
5110 buffer.file().unwrap().full_path(cx),
5111 Path::new("dir/file1.rs")
5112 );
5113 assert!(!buffer.is_dirty());
5114 assert!(!buffer.has_conflict());
5115 assert_eq!(buffer.language().unwrap().name(), "Rust");
5116 });
5117
5118 let opened_buffer = project
5119 .update(cx, |project, cx| {
5120 project.open_local_buffer("/dir/file1.rs", cx)
5121 })
5122 .await
5123 .unwrap();
5124 assert_eq!(opened_buffer, buffer);
5125}
5126
5127#[gpui::test]
5128async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5129 init_test(cx);
5130
5131 let fs = FakeFs::new(cx.executor());
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "data_a.txt": "data about a"
5138 }),
5139 )
5140 .await;
5141
5142 let buffer = project
5143 .update(cx, |project, cx| {
5144 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5145 })
5146 .await
5147 .unwrap();
5148
5149 buffer.update(cx, |buffer, cx| {
5150 buffer.edit([(11..12, "b")], None, cx);
5151 });
5152
5153 // Save buffer's contents as a new file and confirm that the buffer's now
5154 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5155 // file associated with the buffer has now been updated to `data_b.txt`
5156 project
5157 .update(cx, |project, cx| {
5158 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5159 let new_path = ProjectPath {
5160 worktree_id,
5161 path: rel_path("data_b.txt").into(),
5162 };
5163
5164 project.save_buffer_as(buffer.clone(), new_path, cx)
5165 })
5166 .await
5167 .unwrap();
5168
5169 buffer.update(cx, |buffer, cx| {
5170 assert_eq!(
5171 buffer.file().unwrap().full_path(cx),
5172 Path::new("dir/data_b.txt")
5173 )
5174 });
5175
5176 // Open the original `data_a.txt` file, confirming that its contents are
5177 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5178 let original_buffer = project
5179 .update(cx, |project, cx| {
5180 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5181 })
5182 .await
5183 .unwrap();
5184
5185 original_buffer.update(cx, |buffer, cx| {
5186 assert_eq!(buffer.text(), "data about a");
5187 assert_eq!(
5188 buffer.file().unwrap().full_path(cx),
5189 Path::new("dir/data_a.txt")
5190 )
5191 });
5192}
5193
5194#[gpui::test(retries = 5)]
5195async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5196 use worktree::WorktreeModelHandle as _;
5197
5198 init_test(cx);
5199 cx.executor().allow_parking();
5200
5201 let dir = TempTree::new(json!({
5202 "a": {
5203 "file1": "",
5204 "file2": "",
5205 "file3": "",
5206 },
5207 "b": {
5208 "c": {
5209 "file4": "",
5210 "file5": "",
5211 }
5212 }
5213 }));
5214
5215 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5216
5217 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5218 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5219 async move { buffer.await.unwrap() }
5220 };
5221 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5222 project.update(cx, |project, cx| {
5223 let tree = project.worktrees(cx).next().unwrap();
5224 tree.read(cx)
5225 .entry_for_path(rel_path(path))
5226 .unwrap_or_else(|| panic!("no entry for path {}", path))
5227 .id
5228 })
5229 };
5230
5231 let buffer2 = buffer_for_path("a/file2", cx).await;
5232 let buffer3 = buffer_for_path("a/file3", cx).await;
5233 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5234 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5235
5236 let file2_id = id_for_path("a/file2", cx);
5237 let file3_id = id_for_path("a/file3", cx);
5238 let file4_id = id_for_path("b/c/file4", cx);
5239
5240 // Create a remote copy of this worktree.
5241 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5242 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5243
5244 let updates = Arc::new(Mutex::new(Vec::new()));
5245 tree.update(cx, |tree, cx| {
5246 let updates = updates.clone();
5247 tree.observe_updates(0, cx, move |update| {
5248 updates.lock().push(update);
5249 async { true }
5250 });
5251 });
5252
5253 let remote = cx.update(|cx| {
5254 Worktree::remote(
5255 0,
5256 ReplicaId::REMOTE_SERVER,
5257 metadata,
5258 project.read(cx).client().into(),
5259 project.read(cx).path_style(cx),
5260 cx,
5261 )
5262 });
5263
5264 cx.executor().run_until_parked();
5265
5266 cx.update(|cx| {
5267 assert!(!buffer2.read(cx).is_dirty());
5268 assert!(!buffer3.read(cx).is_dirty());
5269 assert!(!buffer4.read(cx).is_dirty());
5270 assert!(!buffer5.read(cx).is_dirty());
5271 });
5272
5273 // Rename and delete files and directories.
5274 tree.flush_fs_events(cx).await;
5275 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5276 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5277 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5278 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5279 tree.flush_fs_events(cx).await;
5280
5281 cx.update(|app| {
5282 assert_eq!(
5283 tree.read(app).paths().collect::<Vec<_>>(),
5284 vec![
5285 rel_path("a"),
5286 rel_path("a/file1"),
5287 rel_path("a/file2.new"),
5288 rel_path("b"),
5289 rel_path("d"),
5290 rel_path("d/file3"),
5291 rel_path("d/file4"),
5292 ]
5293 );
5294 });
5295
5296 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5297 assert_eq!(id_for_path("d/file3", cx), file3_id);
5298 assert_eq!(id_for_path("d/file4", cx), file4_id);
5299
5300 cx.update(|cx| {
5301 assert_eq!(
5302 buffer2.read(cx).file().unwrap().path().as_ref(),
5303 rel_path("a/file2.new")
5304 );
5305 assert_eq!(
5306 buffer3.read(cx).file().unwrap().path().as_ref(),
5307 rel_path("d/file3")
5308 );
5309 assert_eq!(
5310 buffer4.read(cx).file().unwrap().path().as_ref(),
5311 rel_path("d/file4")
5312 );
5313 assert_eq!(
5314 buffer5.read(cx).file().unwrap().path().as_ref(),
5315 rel_path("b/c/file5")
5316 );
5317
5318 assert_matches!(
5319 buffer2.read(cx).file().unwrap().disk_state(),
5320 DiskState::Present { .. }
5321 );
5322 assert_matches!(
5323 buffer3.read(cx).file().unwrap().disk_state(),
5324 DiskState::Present { .. }
5325 );
5326 assert_matches!(
5327 buffer4.read(cx).file().unwrap().disk_state(),
5328 DiskState::Present { .. }
5329 );
5330 assert_eq!(
5331 buffer5.read(cx).file().unwrap().disk_state(),
5332 DiskState::Deleted
5333 );
5334 });
5335
5336 // Update the remote worktree. Check that it becomes consistent with the
5337 // local worktree.
5338 cx.executor().run_until_parked();
5339
5340 remote.update(cx, |remote, _| {
5341 for update in updates.lock().drain(..) {
5342 remote.as_remote_mut().unwrap().update_from_remote(update);
5343 }
5344 });
5345 cx.executor().run_until_parked();
5346 remote.update(cx, |remote, _| {
5347 assert_eq!(
5348 remote.paths().collect::<Vec<_>>(),
5349 vec![
5350 rel_path("a"),
5351 rel_path("a/file1"),
5352 rel_path("a/file2.new"),
5353 rel_path("b"),
5354 rel_path("d"),
5355 rel_path("d/file3"),
5356 rel_path("d/file4"),
5357 ]
5358 );
5359 });
5360}
5361
5362#[cfg(target_os = "linux")]
5363#[gpui::test(retries = 5)]
5364async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5365 init_test(cx);
5366 cx.executor().allow_parking();
5367
5368 let dir = TempTree::new(json!({}));
5369 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5370 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5371
5372 tree.flush_fs_events(cx).await;
5373
5374 let repro_dir = dir.path().join("repro");
5375 std::fs::create_dir(&repro_dir).unwrap();
5376 tree.flush_fs_events(cx).await;
5377
5378 cx.update(|cx| {
5379 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5380 });
5381
5382 std::fs::remove_dir_all(&repro_dir).unwrap();
5383 tree.flush_fs_events(cx).await;
5384
5385 cx.update(|cx| {
5386 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5387 });
5388
5389 std::fs::create_dir(&repro_dir).unwrap();
5390 tree.flush_fs_events(cx).await;
5391
5392 cx.update(|cx| {
5393 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5394 });
5395
5396 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5397 tree.flush_fs_events(cx).await;
5398
5399 cx.update(|cx| {
5400 assert!(
5401 tree.read(cx)
5402 .entry_for_path(rel_path("repro/repro-marker"))
5403 .is_some()
5404 );
5405 });
5406}
5407
5408#[gpui::test(iterations = 10)]
5409async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5410 init_test(cx);
5411
5412 let fs = FakeFs::new(cx.executor());
5413 fs.insert_tree(
5414 path!("/dir"),
5415 json!({
5416 "a": {
5417 "file1": "",
5418 }
5419 }),
5420 )
5421 .await;
5422
5423 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5424 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5425 let tree_id = tree.update(cx, |tree, _| tree.id());
5426
5427 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5428 project.update(cx, |project, cx| {
5429 let tree = project.worktrees(cx).next().unwrap();
5430 tree.read(cx)
5431 .entry_for_path(rel_path(path))
5432 .unwrap_or_else(|| panic!("no entry for path {}", path))
5433 .id
5434 })
5435 };
5436
5437 let dir_id = id_for_path("a", cx);
5438 let file_id = id_for_path("a/file1", cx);
5439 let buffer = project
5440 .update(cx, |p, cx| {
5441 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5442 })
5443 .await
5444 .unwrap();
5445 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5446
5447 project
5448 .update(cx, |project, cx| {
5449 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5450 })
5451 .unwrap()
5452 .await
5453 .into_included()
5454 .unwrap();
5455 cx.executor().run_until_parked();
5456
5457 assert_eq!(id_for_path("b", cx), dir_id);
5458 assert_eq!(id_for_path("b/file1", cx), file_id);
5459 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5460}
5461
5462#[gpui::test]
5463async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let fs = FakeFs::new(cx.executor());
5467 fs.insert_tree(
5468 "/dir",
5469 json!({
5470 "a.txt": "a-contents",
5471 "b.txt": "b-contents",
5472 }),
5473 )
5474 .await;
5475
5476 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5477
5478 // Spawn multiple tasks to open paths, repeating some paths.
5479 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5480 (
5481 p.open_local_buffer("/dir/a.txt", cx),
5482 p.open_local_buffer("/dir/b.txt", cx),
5483 p.open_local_buffer("/dir/a.txt", cx),
5484 )
5485 });
5486
5487 let buffer_a_1 = buffer_a_1.await.unwrap();
5488 let buffer_a_2 = buffer_a_2.await.unwrap();
5489 let buffer_b = buffer_b.await.unwrap();
5490 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5491 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5492
5493 // There is only one buffer per path.
5494 let buffer_a_id = buffer_a_1.entity_id();
5495 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5496
5497 // Open the same path again while it is still open.
5498 drop(buffer_a_1);
5499 let buffer_a_3 = project
5500 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5501 .await
5502 .unwrap();
5503
5504 // There's still only one buffer per path.
5505 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5506}
5507
5508#[gpui::test]
5509async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5510 init_test(cx);
5511
5512 let fs = FakeFs::new(cx.executor());
5513 fs.insert_tree(
5514 path!("/dir"),
5515 json!({
5516 "file1": "abc",
5517 "file2": "def",
5518 "file3": "ghi",
5519 }),
5520 )
5521 .await;
5522
5523 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5524
5525 let buffer1 = project
5526 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5527 .await
5528 .unwrap();
5529 let events = Arc::new(Mutex::new(Vec::new()));
5530
5531 // initially, the buffer isn't dirty.
5532 buffer1.update(cx, |buffer, cx| {
5533 cx.subscribe(&buffer1, {
5534 let events = events.clone();
5535 move |_, _, event, _| match event {
5536 BufferEvent::Operation { .. } => {}
5537 _ => events.lock().push(event.clone()),
5538 }
5539 })
5540 .detach();
5541
5542 assert!(!buffer.is_dirty());
5543 assert!(events.lock().is_empty());
5544
5545 buffer.edit([(1..2, "")], None, cx);
5546 });
5547
5548 // after the first edit, the buffer is dirty, and emits a dirtied event.
5549 buffer1.update(cx, |buffer, cx| {
5550 assert!(buffer.text() == "ac");
5551 assert!(buffer.is_dirty());
5552 assert_eq!(
5553 *events.lock(),
5554 &[
5555 language::BufferEvent::Edited,
5556 language::BufferEvent::DirtyChanged
5557 ]
5558 );
5559 events.lock().clear();
5560 buffer.did_save(
5561 buffer.version(),
5562 buffer.file().unwrap().disk_state().mtime(),
5563 cx,
5564 );
5565 });
5566
5567 // after saving, the buffer is not dirty, and emits a saved event.
5568 buffer1.update(cx, |buffer, cx| {
5569 assert!(!buffer.is_dirty());
5570 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5571 events.lock().clear();
5572
5573 buffer.edit([(1..1, "B")], None, cx);
5574 buffer.edit([(2..2, "D")], None, cx);
5575 });
5576
5577 // after editing again, the buffer is dirty, and emits another dirty event.
5578 buffer1.update(cx, |buffer, cx| {
5579 assert!(buffer.text() == "aBDc");
5580 assert!(buffer.is_dirty());
5581 assert_eq!(
5582 *events.lock(),
5583 &[
5584 language::BufferEvent::Edited,
5585 language::BufferEvent::DirtyChanged,
5586 language::BufferEvent::Edited,
5587 ],
5588 );
5589 events.lock().clear();
5590
5591 // After restoring the buffer to its previously-saved state,
5592 // the buffer is not considered dirty anymore.
5593 buffer.edit([(1..3, "")], None, cx);
5594 assert!(buffer.text() == "ac");
5595 assert!(!buffer.is_dirty());
5596 });
5597
5598 assert_eq!(
5599 *events.lock(),
5600 &[
5601 language::BufferEvent::Edited,
5602 language::BufferEvent::DirtyChanged
5603 ]
5604 );
5605
5606 // When a file is deleted, it is not considered dirty.
5607 let events = Arc::new(Mutex::new(Vec::new()));
5608 let buffer2 = project
5609 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5610 .await
5611 .unwrap();
5612 buffer2.update(cx, |_, cx| {
5613 cx.subscribe(&buffer2, {
5614 let events = events.clone();
5615 move |_, _, event, _| match event {
5616 BufferEvent::Operation { .. } => {}
5617 _ => events.lock().push(event.clone()),
5618 }
5619 })
5620 .detach();
5621 });
5622
5623 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5624 .await
5625 .unwrap();
5626 cx.executor().run_until_parked();
5627 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5628 assert_eq!(
5629 mem::take(&mut *events.lock()),
5630 &[language::BufferEvent::FileHandleChanged]
5631 );
5632
5633 // Buffer becomes dirty when edited.
5634 buffer2.update(cx, |buffer, cx| {
5635 buffer.edit([(2..3, "")], None, cx);
5636 assert_eq!(buffer.is_dirty(), true);
5637 });
5638 assert_eq!(
5639 mem::take(&mut *events.lock()),
5640 &[
5641 language::BufferEvent::Edited,
5642 language::BufferEvent::DirtyChanged
5643 ]
5644 );
5645
5646 // Buffer becomes clean again when all of its content is removed, because
5647 // the file was deleted.
5648 buffer2.update(cx, |buffer, cx| {
5649 buffer.edit([(0..2, "")], None, cx);
5650 assert_eq!(buffer.is_empty(), true);
5651 assert_eq!(buffer.is_dirty(), false);
5652 });
5653 assert_eq!(
5654 *events.lock(),
5655 &[
5656 language::BufferEvent::Edited,
5657 language::BufferEvent::DirtyChanged
5658 ]
5659 );
5660
5661 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5662 let events = Arc::new(Mutex::new(Vec::new()));
5663 let buffer3 = project
5664 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5665 .await
5666 .unwrap();
5667 buffer3.update(cx, |_, cx| {
5668 cx.subscribe(&buffer3, {
5669 let events = events.clone();
5670 move |_, _, event, _| match event {
5671 BufferEvent::Operation { .. } => {}
5672 _ => events.lock().push(event.clone()),
5673 }
5674 })
5675 .detach();
5676 });
5677
5678 buffer3.update(cx, |buffer, cx| {
5679 buffer.edit([(0..0, "x")], None, cx);
5680 });
5681 events.lock().clear();
5682 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5683 .await
5684 .unwrap();
5685 cx.executor().run_until_parked();
5686 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5687 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5688}
5689
5690#[gpui::test]
5691async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let (initial_contents, initial_offsets) =
5695 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5696 let fs = FakeFs::new(cx.executor());
5697 fs.insert_tree(
5698 path!("/dir"),
5699 json!({
5700 "the-file": initial_contents,
5701 }),
5702 )
5703 .await;
5704 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5705 let buffer = project
5706 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5707 .await
5708 .unwrap();
5709
5710 let anchors = initial_offsets
5711 .iter()
5712 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5713 .collect::<Vec<_>>();
5714
5715 // Change the file on disk, adding two new lines of text, and removing
5716 // one line.
5717 buffer.update(cx, |buffer, _| {
5718 assert!(!buffer.is_dirty());
5719 assert!(!buffer.has_conflict());
5720 });
5721
5722 let (new_contents, new_offsets) =
5723 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5724 fs.save(
5725 path!("/dir/the-file").as_ref(),
5726 &new_contents.as_str().into(),
5727 LineEnding::Unix,
5728 )
5729 .await
5730 .unwrap();
5731
5732 // Because the buffer was not modified, it is reloaded from disk. Its
5733 // contents are edited according to the diff between the old and new
5734 // file contents.
5735 cx.executor().run_until_parked();
5736 buffer.update(cx, |buffer, _| {
5737 assert_eq!(buffer.text(), new_contents);
5738 assert!(!buffer.is_dirty());
5739 assert!(!buffer.has_conflict());
5740
5741 let anchor_offsets = anchors
5742 .iter()
5743 .map(|anchor| anchor.to_offset(&*buffer))
5744 .collect::<Vec<_>>();
5745 assert_eq!(anchor_offsets, new_offsets);
5746 });
5747
5748 // Modify the buffer
5749 buffer.update(cx, |buffer, cx| {
5750 buffer.edit([(0..0, " ")], None, cx);
5751 assert!(buffer.is_dirty());
5752 assert!(!buffer.has_conflict());
5753 });
5754
5755 // Change the file on disk again, adding blank lines to the beginning.
5756 fs.save(
5757 path!("/dir/the-file").as_ref(),
5758 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5759 LineEnding::Unix,
5760 )
5761 .await
5762 .unwrap();
5763
5764 // Because the buffer is modified, it doesn't reload from disk, but is
5765 // marked as having a conflict.
5766 cx.executor().run_until_parked();
5767 buffer.update(cx, |buffer, _| {
5768 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5769 assert!(buffer.has_conflict());
5770 });
5771}
5772
5773#[gpui::test]
5774async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5775 init_test(cx);
5776
5777 let fs = FakeFs::new(cx.executor());
5778 fs.insert_tree(
5779 path!("/dir"),
5780 json!({
5781 "file1": "a\nb\nc\n",
5782 "file2": "one\r\ntwo\r\nthree\r\n",
5783 }),
5784 )
5785 .await;
5786
5787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5788 let buffer1 = project
5789 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5790 .await
5791 .unwrap();
5792 let buffer2 = project
5793 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5794 .await
5795 .unwrap();
5796
5797 buffer1.update(cx, |buffer, _| {
5798 assert_eq!(buffer.text(), "a\nb\nc\n");
5799 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5800 });
5801 buffer2.update(cx, |buffer, _| {
5802 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5803 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5804 });
5805
5806 // Change a file's line endings on disk from unix to windows. The buffer's
5807 // state updates correctly.
5808 fs.save(
5809 path!("/dir/file1").as_ref(),
5810 &"aaa\nb\nc\n".into(),
5811 LineEnding::Windows,
5812 )
5813 .await
5814 .unwrap();
5815 cx.executor().run_until_parked();
5816 buffer1.update(cx, |buffer, _| {
5817 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5818 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5819 });
5820
5821 // Save a file with windows line endings. The file is written correctly.
5822 buffer2.update(cx, |buffer, cx| {
5823 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5824 });
5825 project
5826 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5827 .await
5828 .unwrap();
5829 assert_eq!(
5830 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5831 "one\r\ntwo\r\nthree\r\nfour\r\n",
5832 );
5833}
5834
5835#[gpui::test]
5836async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5837 init_test(cx);
5838
5839 let fs = FakeFs::new(cx.executor());
5840 fs.insert_tree(
5841 path!("/dir"),
5842 json!({
5843 "a.rs": "
5844 fn foo(mut v: Vec<usize>) {
5845 for x in &v {
5846 v.push(1);
5847 }
5848 }
5849 "
5850 .unindent(),
5851 }),
5852 )
5853 .await;
5854
5855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5856 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5857 let buffer = project
5858 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5859 .await
5860 .unwrap();
5861
5862 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5863 let message = lsp::PublishDiagnosticsParams {
5864 uri: buffer_uri.clone(),
5865 diagnostics: vec![
5866 lsp::Diagnostic {
5867 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5868 severity: Some(DiagnosticSeverity::WARNING),
5869 message: "error 1".to_string(),
5870 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5871 location: lsp::Location {
5872 uri: buffer_uri.clone(),
5873 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5874 },
5875 message: "error 1 hint 1".to_string(),
5876 }]),
5877 ..Default::default()
5878 },
5879 lsp::Diagnostic {
5880 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5881 severity: Some(DiagnosticSeverity::HINT),
5882 message: "error 1 hint 1".to_string(),
5883 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5884 location: lsp::Location {
5885 uri: buffer_uri.clone(),
5886 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5887 },
5888 message: "original diagnostic".to_string(),
5889 }]),
5890 ..Default::default()
5891 },
5892 lsp::Diagnostic {
5893 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5894 severity: Some(DiagnosticSeverity::ERROR),
5895 message: "error 2".to_string(),
5896 related_information: Some(vec![
5897 lsp::DiagnosticRelatedInformation {
5898 location: lsp::Location {
5899 uri: buffer_uri.clone(),
5900 range: lsp::Range::new(
5901 lsp::Position::new(1, 13),
5902 lsp::Position::new(1, 15),
5903 ),
5904 },
5905 message: "error 2 hint 1".to_string(),
5906 },
5907 lsp::DiagnosticRelatedInformation {
5908 location: lsp::Location {
5909 uri: buffer_uri.clone(),
5910 range: lsp::Range::new(
5911 lsp::Position::new(1, 13),
5912 lsp::Position::new(1, 15),
5913 ),
5914 },
5915 message: "error 2 hint 2".to_string(),
5916 },
5917 ]),
5918 ..Default::default()
5919 },
5920 lsp::Diagnostic {
5921 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5922 severity: Some(DiagnosticSeverity::HINT),
5923 message: "error 2 hint 1".to_string(),
5924 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5925 location: lsp::Location {
5926 uri: buffer_uri.clone(),
5927 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5928 },
5929 message: "original diagnostic".to_string(),
5930 }]),
5931 ..Default::default()
5932 },
5933 lsp::Diagnostic {
5934 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5935 severity: Some(DiagnosticSeverity::HINT),
5936 message: "error 2 hint 2".to_string(),
5937 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5938 location: lsp::Location {
5939 uri: buffer_uri,
5940 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5941 },
5942 message: "original diagnostic".to_string(),
5943 }]),
5944 ..Default::default()
5945 },
5946 ],
5947 version: None,
5948 };
5949
5950 lsp_store
5951 .update(cx, |lsp_store, cx| {
5952 lsp_store.update_diagnostics(
5953 LanguageServerId(0),
5954 message,
5955 None,
5956 DiagnosticSourceKind::Pushed,
5957 &[],
5958 cx,
5959 )
5960 })
5961 .unwrap();
5962 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5963
5964 assert_eq!(
5965 buffer
5966 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5967 .collect::<Vec<_>>(),
5968 &[
5969 DiagnosticEntry {
5970 range: Point::new(1, 8)..Point::new(1, 9),
5971 diagnostic: Diagnostic {
5972 severity: DiagnosticSeverity::WARNING,
5973 message: "error 1".to_string(),
5974 group_id: 1,
5975 is_primary: true,
5976 source_kind: DiagnosticSourceKind::Pushed,
5977 ..Diagnostic::default()
5978 }
5979 },
5980 DiagnosticEntry {
5981 range: Point::new(1, 8)..Point::new(1, 9),
5982 diagnostic: Diagnostic {
5983 severity: DiagnosticSeverity::HINT,
5984 message: "error 1 hint 1".to_string(),
5985 group_id: 1,
5986 is_primary: false,
5987 source_kind: DiagnosticSourceKind::Pushed,
5988 ..Diagnostic::default()
5989 }
5990 },
5991 DiagnosticEntry {
5992 range: Point::new(1, 13)..Point::new(1, 15),
5993 diagnostic: Diagnostic {
5994 severity: DiagnosticSeverity::HINT,
5995 message: "error 2 hint 1".to_string(),
5996 group_id: 0,
5997 is_primary: false,
5998 source_kind: DiagnosticSourceKind::Pushed,
5999 ..Diagnostic::default()
6000 }
6001 },
6002 DiagnosticEntry {
6003 range: Point::new(1, 13)..Point::new(1, 15),
6004 diagnostic: Diagnostic {
6005 severity: DiagnosticSeverity::HINT,
6006 message: "error 2 hint 2".to_string(),
6007 group_id: 0,
6008 is_primary: false,
6009 source_kind: DiagnosticSourceKind::Pushed,
6010 ..Diagnostic::default()
6011 }
6012 },
6013 DiagnosticEntry {
6014 range: Point::new(2, 8)..Point::new(2, 17),
6015 diagnostic: Diagnostic {
6016 severity: DiagnosticSeverity::ERROR,
6017 message: "error 2".to_string(),
6018 group_id: 0,
6019 is_primary: true,
6020 source_kind: DiagnosticSourceKind::Pushed,
6021 ..Diagnostic::default()
6022 }
6023 }
6024 ]
6025 );
6026
6027 assert_eq!(
6028 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6029 &[
6030 DiagnosticEntry {
6031 range: Point::new(1, 13)..Point::new(1, 15),
6032 diagnostic: Diagnostic {
6033 severity: DiagnosticSeverity::HINT,
6034 message: "error 2 hint 1".to_string(),
6035 group_id: 0,
6036 is_primary: false,
6037 source_kind: DiagnosticSourceKind::Pushed,
6038 ..Diagnostic::default()
6039 }
6040 },
6041 DiagnosticEntry {
6042 range: Point::new(1, 13)..Point::new(1, 15),
6043 diagnostic: Diagnostic {
6044 severity: DiagnosticSeverity::HINT,
6045 message: "error 2 hint 2".to_string(),
6046 group_id: 0,
6047 is_primary: false,
6048 source_kind: DiagnosticSourceKind::Pushed,
6049 ..Diagnostic::default()
6050 }
6051 },
6052 DiagnosticEntry {
6053 range: Point::new(2, 8)..Point::new(2, 17),
6054 diagnostic: Diagnostic {
6055 severity: DiagnosticSeverity::ERROR,
6056 message: "error 2".to_string(),
6057 group_id: 0,
6058 is_primary: true,
6059 source_kind: DiagnosticSourceKind::Pushed,
6060 ..Diagnostic::default()
6061 }
6062 }
6063 ]
6064 );
6065
6066 assert_eq!(
6067 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6068 &[
6069 DiagnosticEntry {
6070 range: Point::new(1, 8)..Point::new(1, 9),
6071 diagnostic: Diagnostic {
6072 severity: DiagnosticSeverity::WARNING,
6073 message: "error 1".to_string(),
6074 group_id: 1,
6075 is_primary: true,
6076 source_kind: DiagnosticSourceKind::Pushed,
6077 ..Diagnostic::default()
6078 }
6079 },
6080 DiagnosticEntry {
6081 range: Point::new(1, 8)..Point::new(1, 9),
6082 diagnostic: Diagnostic {
6083 severity: DiagnosticSeverity::HINT,
6084 message: "error 1 hint 1".to_string(),
6085 group_id: 1,
6086 is_primary: false,
6087 source_kind: DiagnosticSourceKind::Pushed,
6088 ..Diagnostic::default()
6089 }
6090 },
6091 ]
6092 );
6093}
6094
6095#[gpui::test]
6096async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6097 init_test(cx);
6098
6099 let fs = FakeFs::new(cx.executor());
6100 fs.insert_tree(
6101 path!("/dir"),
6102 json!({
6103 "one.rs": "const ONE: usize = 1;",
6104 "two": {
6105 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6106 }
6107
6108 }),
6109 )
6110 .await;
6111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6112
6113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6114 language_registry.add(rust_lang());
6115 let watched_paths = lsp::FileOperationRegistrationOptions {
6116 filters: vec![
6117 FileOperationFilter {
6118 scheme: Some("file".to_owned()),
6119 pattern: lsp::FileOperationPattern {
6120 glob: "**/*.rs".to_owned(),
6121 matches: Some(lsp::FileOperationPatternKind::File),
6122 options: None,
6123 },
6124 },
6125 FileOperationFilter {
6126 scheme: Some("file".to_owned()),
6127 pattern: lsp::FileOperationPattern {
6128 glob: "**/**".to_owned(),
6129 matches: Some(lsp::FileOperationPatternKind::Folder),
6130 options: None,
6131 },
6132 },
6133 ],
6134 };
6135 let mut fake_servers = language_registry.register_fake_lsp(
6136 "Rust",
6137 FakeLspAdapter {
6138 capabilities: lsp::ServerCapabilities {
6139 workspace: Some(lsp::WorkspaceServerCapabilities {
6140 workspace_folders: None,
6141 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6142 did_rename: Some(watched_paths.clone()),
6143 will_rename: Some(watched_paths),
6144 ..Default::default()
6145 }),
6146 }),
6147 ..Default::default()
6148 },
6149 ..Default::default()
6150 },
6151 );
6152
6153 let _ = project
6154 .update(cx, |project, cx| {
6155 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6156 })
6157 .await
6158 .unwrap();
6159
6160 let fake_server = fake_servers.next().await.unwrap();
6161 cx.executor().run_until_parked();
6162 let response = project.update(cx, |project, cx| {
6163 let worktree = project.worktrees(cx).next().unwrap();
6164 let entry = worktree
6165 .read(cx)
6166 .entry_for_path(rel_path("one.rs"))
6167 .unwrap();
6168 project.rename_entry(
6169 entry.id,
6170 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6171 cx,
6172 )
6173 });
6174 let expected_edit = lsp::WorkspaceEdit {
6175 changes: None,
6176 document_changes: Some(DocumentChanges::Edits({
6177 vec![TextDocumentEdit {
6178 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6179 range: lsp::Range {
6180 start: lsp::Position {
6181 line: 0,
6182 character: 1,
6183 },
6184 end: lsp::Position {
6185 line: 0,
6186 character: 3,
6187 },
6188 },
6189 new_text: "This is not a drill".to_owned(),
6190 })],
6191 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6192 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6193 version: Some(1337),
6194 },
6195 }]
6196 })),
6197 change_annotations: None,
6198 };
6199 let resolved_workspace_edit = Arc::new(OnceLock::new());
6200 fake_server
6201 .set_request_handler::<WillRenameFiles, _, _>({
6202 let resolved_workspace_edit = resolved_workspace_edit.clone();
6203 let expected_edit = expected_edit.clone();
6204 move |params, _| {
6205 let resolved_workspace_edit = resolved_workspace_edit.clone();
6206 let expected_edit = expected_edit.clone();
6207 async move {
6208 assert_eq!(params.files.len(), 1);
6209 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6210 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6211 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6212 Ok(Some(expected_edit))
6213 }
6214 }
6215 })
6216 .next()
6217 .await
6218 .unwrap();
6219 let _ = response.await.unwrap();
6220 fake_server
6221 .handle_notification::<DidRenameFiles, _>(|params, _| {
6222 assert_eq!(params.files.len(), 1);
6223 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6224 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6225 })
6226 .next()
6227 .await
6228 .unwrap();
6229 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6230}
6231
6232#[gpui::test]
6233async fn test_rename(cx: &mut gpui::TestAppContext) {
6234 // hi
6235 init_test(cx);
6236
6237 let fs = FakeFs::new(cx.executor());
6238 fs.insert_tree(
6239 path!("/dir"),
6240 json!({
6241 "one.rs": "const ONE: usize = 1;",
6242 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6243 }),
6244 )
6245 .await;
6246
6247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6248
6249 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6250 language_registry.add(rust_lang());
6251 let mut fake_servers = language_registry.register_fake_lsp(
6252 "Rust",
6253 FakeLspAdapter {
6254 capabilities: lsp::ServerCapabilities {
6255 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6256 prepare_provider: Some(true),
6257 work_done_progress_options: Default::default(),
6258 })),
6259 ..Default::default()
6260 },
6261 ..Default::default()
6262 },
6263 );
6264
6265 let (buffer, _handle) = project
6266 .update(cx, |project, cx| {
6267 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6268 })
6269 .await
6270 .unwrap();
6271
6272 let fake_server = fake_servers.next().await.unwrap();
6273 cx.executor().run_until_parked();
6274
6275 let response = project.update(cx, |project, cx| {
6276 project.prepare_rename(buffer.clone(), 7, cx)
6277 });
6278 fake_server
6279 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6280 assert_eq!(
6281 params.text_document.uri.as_str(),
6282 uri!("file:///dir/one.rs")
6283 );
6284 assert_eq!(params.position, lsp::Position::new(0, 7));
6285 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6286 lsp::Position::new(0, 6),
6287 lsp::Position::new(0, 9),
6288 ))))
6289 })
6290 .next()
6291 .await
6292 .unwrap();
6293 let response = response.await.unwrap();
6294 let PrepareRenameResponse::Success(range) = response else {
6295 panic!("{:?}", response);
6296 };
6297 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6298 assert_eq!(range, 6..9);
6299
6300 let response = project.update(cx, |project, cx| {
6301 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6302 });
6303 fake_server
6304 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6305 assert_eq!(
6306 params.text_document_position.text_document.uri.as_str(),
6307 uri!("file:///dir/one.rs")
6308 );
6309 assert_eq!(
6310 params.text_document_position.position,
6311 lsp::Position::new(0, 7)
6312 );
6313 assert_eq!(params.new_name, "THREE");
6314 Ok(Some(lsp::WorkspaceEdit {
6315 changes: Some(
6316 [
6317 (
6318 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6319 vec![lsp::TextEdit::new(
6320 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6321 "THREE".to_string(),
6322 )],
6323 ),
6324 (
6325 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6326 vec![
6327 lsp::TextEdit::new(
6328 lsp::Range::new(
6329 lsp::Position::new(0, 24),
6330 lsp::Position::new(0, 27),
6331 ),
6332 "THREE".to_string(),
6333 ),
6334 lsp::TextEdit::new(
6335 lsp::Range::new(
6336 lsp::Position::new(0, 35),
6337 lsp::Position::new(0, 38),
6338 ),
6339 "THREE".to_string(),
6340 ),
6341 ],
6342 ),
6343 ]
6344 .into_iter()
6345 .collect(),
6346 ),
6347 ..Default::default()
6348 }))
6349 })
6350 .next()
6351 .await
6352 .unwrap();
6353 let mut transaction = response.await.unwrap().0;
6354 assert_eq!(transaction.len(), 2);
6355 assert_eq!(
6356 transaction
6357 .remove_entry(&buffer)
6358 .unwrap()
6359 .0
6360 .update(cx, |buffer, _| buffer.text()),
6361 "const THREE: usize = 1;"
6362 );
6363 assert_eq!(
6364 transaction
6365 .into_keys()
6366 .next()
6367 .unwrap()
6368 .update(cx, |buffer, _| buffer.text()),
6369 "const TWO: usize = one::THREE + one::THREE;"
6370 );
6371}
6372
6373#[gpui::test]
6374async fn test_search(cx: &mut gpui::TestAppContext) {
6375 init_test(cx);
6376
6377 let fs = FakeFs::new(cx.executor());
6378 fs.insert_tree(
6379 path!("/dir"),
6380 json!({
6381 "one.rs": "const ONE: usize = 1;",
6382 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6383 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6384 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6385 }),
6386 )
6387 .await;
6388 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6389 assert_eq!(
6390 search(
6391 &project,
6392 SearchQuery::text(
6393 "TWO",
6394 false,
6395 true,
6396 false,
6397 Default::default(),
6398 Default::default(),
6399 false,
6400 None
6401 )
6402 .unwrap(),
6403 cx
6404 )
6405 .await
6406 .unwrap(),
6407 HashMap::from_iter([
6408 (path!("dir/two.rs").to_string(), vec![6..9]),
6409 (path!("dir/three.rs").to_string(), vec![37..40])
6410 ])
6411 );
6412
6413 let buffer_4 = project
6414 .update(cx, |project, cx| {
6415 project.open_local_buffer(path!("/dir/four.rs"), cx)
6416 })
6417 .await
6418 .unwrap();
6419 buffer_4.update(cx, |buffer, cx| {
6420 let text = "two::TWO";
6421 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6422 });
6423
6424 assert_eq!(
6425 search(
6426 &project,
6427 SearchQuery::text(
6428 "TWO",
6429 false,
6430 true,
6431 false,
6432 Default::default(),
6433 Default::default(),
6434 false,
6435 None,
6436 )
6437 .unwrap(),
6438 cx
6439 )
6440 .await
6441 .unwrap(),
6442 HashMap::from_iter([
6443 (path!("dir/two.rs").to_string(), vec![6..9]),
6444 (path!("dir/three.rs").to_string(), vec![37..40]),
6445 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6446 ])
6447 );
6448}
6449
6450#[gpui::test]
6451async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6452 init_test(cx);
6453
6454 let search_query = "file";
6455
6456 let fs = FakeFs::new(cx.executor());
6457 fs.insert_tree(
6458 path!("/dir"),
6459 json!({
6460 "one.rs": r#"// Rust file one"#,
6461 "one.ts": r#"// TypeScript file one"#,
6462 "two.rs": r#"// Rust file two"#,
6463 "two.ts": r#"// TypeScript file two"#,
6464 }),
6465 )
6466 .await;
6467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6468
6469 assert!(
6470 search(
6471 &project,
6472 SearchQuery::text(
6473 search_query,
6474 false,
6475 true,
6476 false,
6477 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6478 Default::default(),
6479 false,
6480 None
6481 )
6482 .unwrap(),
6483 cx
6484 )
6485 .await
6486 .unwrap()
6487 .is_empty(),
6488 "If no inclusions match, no files should be returned"
6489 );
6490
6491 assert_eq!(
6492 search(
6493 &project,
6494 SearchQuery::text(
6495 search_query,
6496 false,
6497 true,
6498 false,
6499 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6500 Default::default(),
6501 false,
6502 None
6503 )
6504 .unwrap(),
6505 cx
6506 )
6507 .await
6508 .unwrap(),
6509 HashMap::from_iter([
6510 (path!("dir/one.rs").to_string(), vec![8..12]),
6511 (path!("dir/two.rs").to_string(), vec![8..12]),
6512 ]),
6513 "Rust only search should give only Rust files"
6514 );
6515
6516 assert_eq!(
6517 search(
6518 &project,
6519 SearchQuery::text(
6520 search_query,
6521 false,
6522 true,
6523 false,
6524 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6525 .unwrap(),
6526 Default::default(),
6527 false,
6528 None,
6529 )
6530 .unwrap(),
6531 cx
6532 )
6533 .await
6534 .unwrap(),
6535 HashMap::from_iter([
6536 (path!("dir/one.ts").to_string(), vec![14..18]),
6537 (path!("dir/two.ts").to_string(), vec![14..18]),
6538 ]),
6539 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6540 );
6541
6542 assert_eq!(
6543 search(
6544 &project,
6545 SearchQuery::text(
6546 search_query,
6547 false,
6548 true,
6549 false,
6550 PathMatcher::new(
6551 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6552 PathStyle::local()
6553 )
6554 .unwrap(),
6555 Default::default(),
6556 false,
6557 None,
6558 )
6559 .unwrap(),
6560 cx
6561 )
6562 .await
6563 .unwrap(),
6564 HashMap::from_iter([
6565 (path!("dir/two.ts").to_string(), vec![14..18]),
6566 (path!("dir/one.rs").to_string(), vec![8..12]),
6567 (path!("dir/one.ts").to_string(), vec![14..18]),
6568 (path!("dir/two.rs").to_string(), vec![8..12]),
6569 ]),
6570 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6571 );
6572}
6573
6574#[gpui::test]
6575async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6576 init_test(cx);
6577
6578 let search_query = "file";
6579
6580 let fs = FakeFs::new(cx.executor());
6581 fs.insert_tree(
6582 path!("/dir"),
6583 json!({
6584 "one.rs": r#"// Rust file one"#,
6585 "one.ts": r#"// TypeScript file one"#,
6586 "two.rs": r#"// Rust file two"#,
6587 "two.ts": r#"// TypeScript file two"#,
6588 }),
6589 )
6590 .await;
6591 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6592
6593 assert_eq!(
6594 search(
6595 &project,
6596 SearchQuery::text(
6597 search_query,
6598 false,
6599 true,
6600 false,
6601 Default::default(),
6602 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6603 false,
6604 None,
6605 )
6606 .unwrap(),
6607 cx
6608 )
6609 .await
6610 .unwrap(),
6611 HashMap::from_iter([
6612 (path!("dir/one.rs").to_string(), vec![8..12]),
6613 (path!("dir/one.ts").to_string(), vec![14..18]),
6614 (path!("dir/two.rs").to_string(), vec![8..12]),
6615 (path!("dir/two.ts").to_string(), vec![14..18]),
6616 ]),
6617 "If no exclusions match, all files should be returned"
6618 );
6619
6620 assert_eq!(
6621 search(
6622 &project,
6623 SearchQuery::text(
6624 search_query,
6625 false,
6626 true,
6627 false,
6628 Default::default(),
6629 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6630 false,
6631 None,
6632 )
6633 .unwrap(),
6634 cx
6635 )
6636 .await
6637 .unwrap(),
6638 HashMap::from_iter([
6639 (path!("dir/one.ts").to_string(), vec![14..18]),
6640 (path!("dir/two.ts").to_string(), vec![14..18]),
6641 ]),
6642 "Rust exclusion search should give only TypeScript files"
6643 );
6644
6645 assert_eq!(
6646 search(
6647 &project,
6648 SearchQuery::text(
6649 search_query,
6650 false,
6651 true,
6652 false,
6653 Default::default(),
6654 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6655 .unwrap(),
6656 false,
6657 None,
6658 )
6659 .unwrap(),
6660 cx
6661 )
6662 .await
6663 .unwrap(),
6664 HashMap::from_iter([
6665 (path!("dir/one.rs").to_string(), vec![8..12]),
6666 (path!("dir/two.rs").to_string(), vec![8..12]),
6667 ]),
6668 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6669 );
6670
6671 assert!(
6672 search(
6673 &project,
6674 SearchQuery::text(
6675 search_query,
6676 false,
6677 true,
6678 false,
6679 Default::default(),
6680 PathMatcher::new(
6681 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6682 PathStyle::local(),
6683 )
6684 .unwrap(),
6685 false,
6686 None,
6687 )
6688 .unwrap(),
6689 cx
6690 )
6691 .await
6692 .unwrap()
6693 .is_empty(),
6694 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6695 );
6696}
6697
6698#[gpui::test]
6699async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6700 init_test(cx);
6701
6702 let search_query = "file";
6703
6704 let fs = FakeFs::new(cx.executor());
6705 fs.insert_tree(
6706 path!("/dir"),
6707 json!({
6708 "one.rs": r#"// Rust file one"#,
6709 "one.ts": r#"// TypeScript file one"#,
6710 "two.rs": r#"// Rust file two"#,
6711 "two.ts": r#"// TypeScript file two"#,
6712 }),
6713 )
6714 .await;
6715
6716 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6717 let path_style = PathStyle::local();
6718 let _buffer = project.update(cx, |project, cx| {
6719 project.create_local_buffer("file", None, false, cx)
6720 });
6721
6722 assert_eq!(
6723 search(
6724 &project,
6725 SearchQuery::text(
6726 search_query,
6727 false,
6728 true,
6729 false,
6730 Default::default(),
6731 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6732 false,
6733 None,
6734 )
6735 .unwrap(),
6736 cx
6737 )
6738 .await
6739 .unwrap(),
6740 HashMap::from_iter([
6741 (path!("dir/one.rs").to_string(), vec![8..12]),
6742 (path!("dir/one.ts").to_string(), vec![14..18]),
6743 (path!("dir/two.rs").to_string(), vec![8..12]),
6744 (path!("dir/two.ts").to_string(), vec![14..18]),
6745 ]),
6746 "If no exclusions match, all files should be returned"
6747 );
6748
6749 assert_eq!(
6750 search(
6751 &project,
6752 SearchQuery::text(
6753 search_query,
6754 false,
6755 true,
6756 false,
6757 Default::default(),
6758 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6759 false,
6760 None,
6761 )
6762 .unwrap(),
6763 cx
6764 )
6765 .await
6766 .unwrap(),
6767 HashMap::from_iter([
6768 (path!("dir/one.ts").to_string(), vec![14..18]),
6769 (path!("dir/two.ts").to_string(), vec![14..18]),
6770 ]),
6771 "Rust exclusion search should give only TypeScript files"
6772 );
6773
6774 assert_eq!(
6775 search(
6776 &project,
6777 SearchQuery::text(
6778 search_query,
6779 false,
6780 true,
6781 false,
6782 Default::default(),
6783 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6784 false,
6785 None,
6786 )
6787 .unwrap(),
6788 cx
6789 )
6790 .await
6791 .unwrap(),
6792 HashMap::from_iter([
6793 (path!("dir/one.rs").to_string(), vec![8..12]),
6794 (path!("dir/two.rs").to_string(), vec![8..12]),
6795 ]),
6796 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6797 );
6798
6799 assert!(
6800 search(
6801 &project,
6802 SearchQuery::text(
6803 search_query,
6804 false,
6805 true,
6806 false,
6807 Default::default(),
6808 PathMatcher::new(
6809 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6810 PathStyle::local(),
6811 )
6812 .unwrap(),
6813 false,
6814 None,
6815 )
6816 .unwrap(),
6817 cx
6818 )
6819 .await
6820 .unwrap()
6821 .is_empty(),
6822 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6823 );
6824}
6825
6826#[gpui::test]
6827async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6828 init_test(cx);
6829
6830 let search_query = "file";
6831
6832 let fs = FakeFs::new(cx.executor());
6833 fs.insert_tree(
6834 path!("/dir"),
6835 json!({
6836 "one.rs": r#"// Rust file one"#,
6837 "one.ts": r#"// TypeScript file one"#,
6838 "two.rs": r#"// Rust file two"#,
6839 "two.ts": r#"// TypeScript file two"#,
6840 }),
6841 )
6842 .await;
6843 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6844 assert!(
6845 search(
6846 &project,
6847 SearchQuery::text(
6848 search_query,
6849 false,
6850 true,
6851 false,
6852 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6853 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6854 false,
6855 None,
6856 )
6857 .unwrap(),
6858 cx
6859 )
6860 .await
6861 .unwrap()
6862 .is_empty(),
6863 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6864 );
6865
6866 assert!(
6867 search(
6868 &project,
6869 SearchQuery::text(
6870 search_query,
6871 false,
6872 true,
6873 false,
6874 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6875 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6876 false,
6877 None,
6878 )
6879 .unwrap(),
6880 cx
6881 )
6882 .await
6883 .unwrap()
6884 .is_empty(),
6885 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6886 );
6887
6888 assert!(
6889 search(
6890 &project,
6891 SearchQuery::text(
6892 search_query,
6893 false,
6894 true,
6895 false,
6896 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6897 .unwrap(),
6898 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6899 .unwrap(),
6900 false,
6901 None,
6902 )
6903 .unwrap(),
6904 cx
6905 )
6906 .await
6907 .unwrap()
6908 .is_empty(),
6909 "Non-matching inclusions and exclusions should not change that."
6910 );
6911
6912 assert_eq!(
6913 search(
6914 &project,
6915 SearchQuery::text(
6916 search_query,
6917 false,
6918 true,
6919 false,
6920 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6921 .unwrap(),
6922 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6923 .unwrap(),
6924 false,
6925 None,
6926 )
6927 .unwrap(),
6928 cx
6929 )
6930 .await
6931 .unwrap(),
6932 HashMap::from_iter([
6933 (path!("dir/one.ts").to_string(), vec![14..18]),
6934 (path!("dir/two.ts").to_string(), vec![14..18]),
6935 ]),
6936 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6937 );
6938}
6939
6940#[gpui::test]
6941async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6942 init_test(cx);
6943
6944 let fs = FakeFs::new(cx.executor());
6945 fs.insert_tree(
6946 path!("/worktree-a"),
6947 json!({
6948 "haystack.rs": r#"// NEEDLE"#,
6949 "haystack.ts": r#"// NEEDLE"#,
6950 }),
6951 )
6952 .await;
6953 fs.insert_tree(
6954 path!("/worktree-b"),
6955 json!({
6956 "haystack.rs": r#"// NEEDLE"#,
6957 "haystack.ts": r#"// NEEDLE"#,
6958 }),
6959 )
6960 .await;
6961
6962 let path_style = PathStyle::local();
6963 let project = Project::test(
6964 fs.clone(),
6965 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6966 cx,
6967 )
6968 .await;
6969
6970 assert_eq!(
6971 search(
6972 &project,
6973 SearchQuery::text(
6974 "NEEDLE",
6975 false,
6976 true,
6977 false,
6978 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6979 Default::default(),
6980 true,
6981 None,
6982 )
6983 .unwrap(),
6984 cx
6985 )
6986 .await
6987 .unwrap(),
6988 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6989 "should only return results from included worktree"
6990 );
6991 assert_eq!(
6992 search(
6993 &project,
6994 SearchQuery::text(
6995 "NEEDLE",
6996 false,
6997 true,
6998 false,
6999 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7000 Default::default(),
7001 true,
7002 None,
7003 )
7004 .unwrap(),
7005 cx
7006 )
7007 .await
7008 .unwrap(),
7009 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7010 "should only return results from included worktree"
7011 );
7012
7013 assert_eq!(
7014 search(
7015 &project,
7016 SearchQuery::text(
7017 "NEEDLE",
7018 false,
7019 true,
7020 false,
7021 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7022 Default::default(),
7023 false,
7024 None,
7025 )
7026 .unwrap(),
7027 cx
7028 )
7029 .await
7030 .unwrap(),
7031 HashMap::from_iter([
7032 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7033 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7034 ]),
7035 "should return results from both worktrees"
7036 );
7037}
7038
7039#[gpui::test]
7040async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7041 init_test(cx);
7042
7043 let fs = FakeFs::new(cx.background_executor.clone());
7044 fs.insert_tree(
7045 path!("/dir"),
7046 json!({
7047 ".git": {},
7048 ".gitignore": "**/target\n/node_modules\n",
7049 "target": {
7050 "index.txt": "index_key:index_value"
7051 },
7052 "node_modules": {
7053 "eslint": {
7054 "index.ts": "const eslint_key = 'eslint value'",
7055 "package.json": r#"{ "some_key": "some value" }"#,
7056 },
7057 "prettier": {
7058 "index.ts": "const prettier_key = 'prettier value'",
7059 "package.json": r#"{ "other_key": "other value" }"#,
7060 },
7061 },
7062 "package.json": r#"{ "main_key": "main value" }"#,
7063 }),
7064 )
7065 .await;
7066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7067
7068 let query = "key";
7069 assert_eq!(
7070 search(
7071 &project,
7072 SearchQuery::text(
7073 query,
7074 false,
7075 false,
7076 false,
7077 Default::default(),
7078 Default::default(),
7079 false,
7080 None,
7081 )
7082 .unwrap(),
7083 cx
7084 )
7085 .await
7086 .unwrap(),
7087 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7088 "Only one non-ignored file should have the query"
7089 );
7090
7091 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7092 let path_style = PathStyle::local();
7093 assert_eq!(
7094 search(
7095 &project,
7096 SearchQuery::text(
7097 query,
7098 false,
7099 false,
7100 true,
7101 Default::default(),
7102 Default::default(),
7103 false,
7104 None,
7105 )
7106 .unwrap(),
7107 cx
7108 )
7109 .await
7110 .unwrap(),
7111 HashMap::from_iter([
7112 (path!("dir/package.json").to_string(), vec![8..11]),
7113 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7114 (
7115 path!("dir/node_modules/prettier/package.json").to_string(),
7116 vec![9..12]
7117 ),
7118 (
7119 path!("dir/node_modules/prettier/index.ts").to_string(),
7120 vec![15..18]
7121 ),
7122 (
7123 path!("dir/node_modules/eslint/index.ts").to_string(),
7124 vec![13..16]
7125 ),
7126 (
7127 path!("dir/node_modules/eslint/package.json").to_string(),
7128 vec![8..11]
7129 ),
7130 ]),
7131 "Unrestricted search with ignored directories should find every file with the query"
7132 );
7133
7134 let files_to_include =
7135 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7136 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7138 assert_eq!(
7139 search(
7140 &project,
7141 SearchQuery::text(
7142 query,
7143 false,
7144 false,
7145 true,
7146 files_to_include,
7147 files_to_exclude,
7148 false,
7149 None,
7150 )
7151 .unwrap(),
7152 cx
7153 )
7154 .await
7155 .unwrap(),
7156 HashMap::from_iter([(
7157 path!("dir/node_modules/prettier/package.json").to_string(),
7158 vec![9..12]
7159 )]),
7160 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7161 );
7162}
7163
7164#[gpui::test]
7165async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7166 init_test(cx);
7167
7168 let fs = FakeFs::new(cx.executor());
7169 fs.insert_tree(
7170 path!("/dir"),
7171 json!({
7172 "one.rs": "// ПРИВЕТ? привет!",
7173 "two.rs": "// ПРИВЕТ.",
7174 "three.rs": "// привет",
7175 }),
7176 )
7177 .await;
7178 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7179 let unicode_case_sensitive_query = SearchQuery::text(
7180 "привет",
7181 false,
7182 true,
7183 false,
7184 Default::default(),
7185 Default::default(),
7186 false,
7187 None,
7188 );
7189 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7190 assert_eq!(
7191 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7192 .await
7193 .unwrap(),
7194 HashMap::from_iter([
7195 (path!("dir/one.rs").to_string(), vec![17..29]),
7196 (path!("dir/three.rs").to_string(), vec![3..15]),
7197 ])
7198 );
7199
7200 let unicode_case_insensitive_query = SearchQuery::text(
7201 "привет",
7202 false,
7203 false,
7204 false,
7205 Default::default(),
7206 Default::default(),
7207 false,
7208 None,
7209 );
7210 assert_matches!(
7211 unicode_case_insensitive_query,
7212 Ok(SearchQuery::Regex { .. })
7213 );
7214 assert_eq!(
7215 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7216 .await
7217 .unwrap(),
7218 HashMap::from_iter([
7219 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7220 (path!("dir/two.rs").to_string(), vec![3..15]),
7221 (path!("dir/three.rs").to_string(), vec![3..15]),
7222 ])
7223 );
7224
7225 assert_eq!(
7226 search(
7227 &project,
7228 SearchQuery::text(
7229 "привет.",
7230 false,
7231 false,
7232 false,
7233 Default::default(),
7234 Default::default(),
7235 false,
7236 None,
7237 )
7238 .unwrap(),
7239 cx
7240 )
7241 .await
7242 .unwrap(),
7243 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7244 );
7245}
7246
7247#[gpui::test]
7248async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7249 init_test(cx);
7250
7251 let fs = FakeFs::new(cx.executor());
7252 fs.insert_tree(
7253 "/one/two",
7254 json!({
7255 "three": {
7256 "a.txt": "",
7257 "four": {}
7258 },
7259 "c.rs": ""
7260 }),
7261 )
7262 .await;
7263
7264 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7265 project
7266 .update(cx, |project, cx| {
7267 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7268 project.create_entry((id, rel_path("b..")), true, cx)
7269 })
7270 .await
7271 .unwrap()
7272 .into_included()
7273 .unwrap();
7274
7275 assert_eq!(
7276 fs.paths(true),
7277 vec![
7278 PathBuf::from(path!("/")),
7279 PathBuf::from(path!("/one")),
7280 PathBuf::from(path!("/one/two")),
7281 PathBuf::from(path!("/one/two/c.rs")),
7282 PathBuf::from(path!("/one/two/three")),
7283 PathBuf::from(path!("/one/two/three/a.txt")),
7284 PathBuf::from(path!("/one/two/three/b..")),
7285 PathBuf::from(path!("/one/two/three/four")),
7286 ]
7287 );
7288}
7289
7290#[gpui::test]
7291async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7292 init_test(cx);
7293
7294 let fs = FakeFs::new(cx.executor());
7295 fs.insert_tree(
7296 path!("/dir"),
7297 json!({
7298 "a.tsx": "a",
7299 }),
7300 )
7301 .await;
7302
7303 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7304
7305 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7306 language_registry.add(tsx_lang());
7307 let language_server_names = [
7308 "TypeScriptServer",
7309 "TailwindServer",
7310 "ESLintServer",
7311 "NoHoverCapabilitiesServer",
7312 ];
7313 let mut language_servers = [
7314 language_registry.register_fake_lsp(
7315 "tsx",
7316 FakeLspAdapter {
7317 name: language_server_names[0],
7318 capabilities: lsp::ServerCapabilities {
7319 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7320 ..lsp::ServerCapabilities::default()
7321 },
7322 ..FakeLspAdapter::default()
7323 },
7324 ),
7325 language_registry.register_fake_lsp(
7326 "tsx",
7327 FakeLspAdapter {
7328 name: language_server_names[1],
7329 capabilities: lsp::ServerCapabilities {
7330 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7331 ..lsp::ServerCapabilities::default()
7332 },
7333 ..FakeLspAdapter::default()
7334 },
7335 ),
7336 language_registry.register_fake_lsp(
7337 "tsx",
7338 FakeLspAdapter {
7339 name: language_server_names[2],
7340 capabilities: lsp::ServerCapabilities {
7341 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7342 ..lsp::ServerCapabilities::default()
7343 },
7344 ..FakeLspAdapter::default()
7345 },
7346 ),
7347 language_registry.register_fake_lsp(
7348 "tsx",
7349 FakeLspAdapter {
7350 name: language_server_names[3],
7351 capabilities: lsp::ServerCapabilities {
7352 hover_provider: None,
7353 ..lsp::ServerCapabilities::default()
7354 },
7355 ..FakeLspAdapter::default()
7356 },
7357 ),
7358 ];
7359
7360 let (buffer, _handle) = project
7361 .update(cx, |p, cx| {
7362 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7363 })
7364 .await
7365 .unwrap();
7366 cx.executor().run_until_parked();
7367
7368 let mut servers_with_hover_requests = HashMap::default();
7369 for i in 0..language_server_names.len() {
7370 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7371 panic!(
7372 "Failed to get language server #{i} with name {}",
7373 &language_server_names[i]
7374 )
7375 });
7376 let new_server_name = new_server.server.name();
7377 assert!(
7378 !servers_with_hover_requests.contains_key(&new_server_name),
7379 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7380 );
7381 match new_server_name.as_ref() {
7382 "TailwindServer" | "TypeScriptServer" => {
7383 servers_with_hover_requests.insert(
7384 new_server_name.clone(),
7385 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7386 move |_, _| {
7387 let name = new_server_name.clone();
7388 async move {
7389 Ok(Some(lsp::Hover {
7390 contents: lsp::HoverContents::Scalar(
7391 lsp::MarkedString::String(format!("{name} hover")),
7392 ),
7393 range: None,
7394 }))
7395 }
7396 },
7397 ),
7398 );
7399 }
7400 "ESLintServer" => {
7401 servers_with_hover_requests.insert(
7402 new_server_name,
7403 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7404 |_, _| async move { Ok(None) },
7405 ),
7406 );
7407 }
7408 "NoHoverCapabilitiesServer" => {
7409 let _never_handled = new_server
7410 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7411 panic!(
7412 "Should not call for hovers server with no corresponding capabilities"
7413 )
7414 });
7415 }
7416 unexpected => panic!("Unexpected server name: {unexpected}"),
7417 }
7418 }
7419
7420 let hover_task = project.update(cx, |project, cx| {
7421 project.hover(&buffer, Point::new(0, 0), cx)
7422 });
7423 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7424 |mut hover_request| async move {
7425 hover_request
7426 .next()
7427 .await
7428 .expect("All hover requests should have been triggered")
7429 },
7430 ))
7431 .await;
7432 assert_eq!(
7433 vec!["TailwindServer hover", "TypeScriptServer hover"],
7434 hover_task
7435 .await
7436 .into_iter()
7437 .flatten()
7438 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7439 .sorted()
7440 .collect::<Vec<_>>(),
7441 "Should receive hover responses from all related servers with hover capabilities"
7442 );
7443}
7444
7445#[gpui::test]
7446async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7447 init_test(cx);
7448
7449 let fs = FakeFs::new(cx.executor());
7450 fs.insert_tree(
7451 path!("/dir"),
7452 json!({
7453 "a.ts": "a",
7454 }),
7455 )
7456 .await;
7457
7458 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7459
7460 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7461 language_registry.add(typescript_lang());
7462 let mut fake_language_servers = language_registry.register_fake_lsp(
7463 "TypeScript",
7464 FakeLspAdapter {
7465 capabilities: lsp::ServerCapabilities {
7466 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7467 ..lsp::ServerCapabilities::default()
7468 },
7469 ..FakeLspAdapter::default()
7470 },
7471 );
7472
7473 let (buffer, _handle) = project
7474 .update(cx, |p, cx| {
7475 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7476 })
7477 .await
7478 .unwrap();
7479 cx.executor().run_until_parked();
7480
7481 let fake_server = fake_language_servers
7482 .next()
7483 .await
7484 .expect("failed to get the language server");
7485
7486 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7487 move |_, _| async move {
7488 Ok(Some(lsp::Hover {
7489 contents: lsp::HoverContents::Array(vec![
7490 lsp::MarkedString::String("".to_string()),
7491 lsp::MarkedString::String(" ".to_string()),
7492 lsp::MarkedString::String("\n\n\n".to_string()),
7493 ]),
7494 range: None,
7495 }))
7496 },
7497 );
7498
7499 let hover_task = project.update(cx, |project, cx| {
7500 project.hover(&buffer, Point::new(0, 0), cx)
7501 });
7502 let () = request_handled
7503 .next()
7504 .await
7505 .expect("All hover requests should have been triggered");
7506 assert_eq!(
7507 Vec::<String>::new(),
7508 hover_task
7509 .await
7510 .into_iter()
7511 .flatten()
7512 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7513 .sorted()
7514 .collect::<Vec<_>>(),
7515 "Empty hover parts should be ignored"
7516 );
7517}
7518
7519#[gpui::test]
7520async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7521 init_test(cx);
7522
7523 let fs = FakeFs::new(cx.executor());
7524 fs.insert_tree(
7525 path!("/dir"),
7526 json!({
7527 "a.ts": "a",
7528 }),
7529 )
7530 .await;
7531
7532 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7533
7534 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7535 language_registry.add(typescript_lang());
7536 let mut fake_language_servers = language_registry.register_fake_lsp(
7537 "TypeScript",
7538 FakeLspAdapter {
7539 capabilities: lsp::ServerCapabilities {
7540 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7541 ..lsp::ServerCapabilities::default()
7542 },
7543 ..FakeLspAdapter::default()
7544 },
7545 );
7546
7547 let (buffer, _handle) = project
7548 .update(cx, |p, cx| {
7549 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7550 })
7551 .await
7552 .unwrap();
7553 cx.executor().run_until_parked();
7554
7555 let fake_server = fake_language_servers
7556 .next()
7557 .await
7558 .expect("failed to get the language server");
7559
7560 let mut request_handled = fake_server
7561 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7562 Ok(Some(vec![
7563 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7564 title: "organize imports".to_string(),
7565 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7566 ..lsp::CodeAction::default()
7567 }),
7568 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7569 title: "fix code".to_string(),
7570 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7571 ..lsp::CodeAction::default()
7572 }),
7573 ]))
7574 });
7575
7576 let code_actions_task = project.update(cx, |project, cx| {
7577 project.code_actions(
7578 &buffer,
7579 0..buffer.read(cx).len(),
7580 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7581 cx,
7582 )
7583 });
7584
7585 let () = request_handled
7586 .next()
7587 .await
7588 .expect("The code action request should have been triggered");
7589
7590 let code_actions = code_actions_task.await.unwrap().unwrap();
7591 assert_eq!(code_actions.len(), 1);
7592 assert_eq!(
7593 code_actions[0].lsp_action.action_kind(),
7594 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7595 );
7596}
7597
7598#[gpui::test]
7599async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7600 init_test(cx);
7601
7602 let fs = FakeFs::new(cx.executor());
7603 fs.insert_tree(
7604 path!("/dir"),
7605 json!({
7606 "a.tsx": "a",
7607 }),
7608 )
7609 .await;
7610
7611 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7612
7613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7614 language_registry.add(tsx_lang());
7615 let language_server_names = [
7616 "TypeScriptServer",
7617 "TailwindServer",
7618 "ESLintServer",
7619 "NoActionsCapabilitiesServer",
7620 ];
7621
7622 let mut language_server_rxs = [
7623 language_registry.register_fake_lsp(
7624 "tsx",
7625 FakeLspAdapter {
7626 name: language_server_names[0],
7627 capabilities: lsp::ServerCapabilities {
7628 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7629 ..lsp::ServerCapabilities::default()
7630 },
7631 ..FakeLspAdapter::default()
7632 },
7633 ),
7634 language_registry.register_fake_lsp(
7635 "tsx",
7636 FakeLspAdapter {
7637 name: language_server_names[1],
7638 capabilities: lsp::ServerCapabilities {
7639 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7640 ..lsp::ServerCapabilities::default()
7641 },
7642 ..FakeLspAdapter::default()
7643 },
7644 ),
7645 language_registry.register_fake_lsp(
7646 "tsx",
7647 FakeLspAdapter {
7648 name: language_server_names[2],
7649 capabilities: lsp::ServerCapabilities {
7650 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7651 ..lsp::ServerCapabilities::default()
7652 },
7653 ..FakeLspAdapter::default()
7654 },
7655 ),
7656 language_registry.register_fake_lsp(
7657 "tsx",
7658 FakeLspAdapter {
7659 name: language_server_names[3],
7660 capabilities: lsp::ServerCapabilities {
7661 code_action_provider: None,
7662 ..lsp::ServerCapabilities::default()
7663 },
7664 ..FakeLspAdapter::default()
7665 },
7666 ),
7667 ];
7668
7669 let (buffer, _handle) = project
7670 .update(cx, |p, cx| {
7671 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7672 })
7673 .await
7674 .unwrap();
7675 cx.executor().run_until_parked();
7676
7677 let mut servers_with_actions_requests = HashMap::default();
7678 for i in 0..language_server_names.len() {
7679 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7680 panic!(
7681 "Failed to get language server #{i} with name {}",
7682 &language_server_names[i]
7683 )
7684 });
7685 let new_server_name = new_server.server.name();
7686
7687 assert!(
7688 !servers_with_actions_requests.contains_key(&new_server_name),
7689 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7690 );
7691 match new_server_name.0.as_ref() {
7692 "TailwindServer" | "TypeScriptServer" => {
7693 servers_with_actions_requests.insert(
7694 new_server_name.clone(),
7695 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7696 move |_, _| {
7697 let name = new_server_name.clone();
7698 async move {
7699 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7700 lsp::CodeAction {
7701 title: format!("{name} code action"),
7702 ..lsp::CodeAction::default()
7703 },
7704 )]))
7705 }
7706 },
7707 ),
7708 );
7709 }
7710 "ESLintServer" => {
7711 servers_with_actions_requests.insert(
7712 new_server_name,
7713 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7714 |_, _| async move { Ok(None) },
7715 ),
7716 );
7717 }
7718 "NoActionsCapabilitiesServer" => {
7719 let _never_handled = new_server
7720 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7721 panic!(
7722 "Should not call for code actions server with no corresponding capabilities"
7723 )
7724 });
7725 }
7726 unexpected => panic!("Unexpected server name: {unexpected}"),
7727 }
7728 }
7729
7730 let code_actions_task = project.update(cx, |project, cx| {
7731 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7732 });
7733
7734 // cx.run_until_parked();
7735 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7736 |mut code_actions_request| async move {
7737 code_actions_request
7738 .next()
7739 .await
7740 .expect("All code actions requests should have been triggered")
7741 },
7742 ))
7743 .await;
7744 assert_eq!(
7745 vec!["TailwindServer code action", "TypeScriptServer code action"],
7746 code_actions_task
7747 .await
7748 .unwrap()
7749 .unwrap()
7750 .into_iter()
7751 .map(|code_action| code_action.lsp_action.title().to_owned())
7752 .sorted()
7753 .collect::<Vec<_>>(),
7754 "Should receive code actions responses from all related servers with hover capabilities"
7755 );
7756}
7757
7758#[gpui::test]
7759async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761
7762 let fs = FakeFs::new(cx.executor());
7763 fs.insert_tree(
7764 "/dir",
7765 json!({
7766 "a.rs": "let a = 1;",
7767 "b.rs": "let b = 2;",
7768 "c.rs": "let c = 2;",
7769 }),
7770 )
7771 .await;
7772
7773 let project = Project::test(
7774 fs,
7775 [
7776 "/dir/a.rs".as_ref(),
7777 "/dir/b.rs".as_ref(),
7778 "/dir/c.rs".as_ref(),
7779 ],
7780 cx,
7781 )
7782 .await;
7783
7784 // check the initial state and get the worktrees
7785 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7786 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7787 assert_eq!(worktrees.len(), 3);
7788
7789 let worktree_a = worktrees[0].read(cx);
7790 let worktree_b = worktrees[1].read(cx);
7791 let worktree_c = worktrees[2].read(cx);
7792
7793 // check they start in the right order
7794 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7795 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7796 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7797
7798 (
7799 worktrees[0].clone(),
7800 worktrees[1].clone(),
7801 worktrees[2].clone(),
7802 )
7803 });
7804
7805 // move first worktree to after the second
7806 // [a, b, c] -> [b, a, c]
7807 project
7808 .update(cx, |project, cx| {
7809 let first = worktree_a.read(cx);
7810 let second = worktree_b.read(cx);
7811 project.move_worktree(first.id(), second.id(), cx)
7812 })
7813 .expect("moving first after second");
7814
7815 // check the state after moving
7816 project.update(cx, |project, cx| {
7817 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7818 assert_eq!(worktrees.len(), 3);
7819
7820 let first = worktrees[0].read(cx);
7821 let second = worktrees[1].read(cx);
7822 let third = worktrees[2].read(cx);
7823
7824 // check they are now in the right order
7825 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7826 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7827 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7828 });
7829
7830 // move the second worktree to before the first
7831 // [b, a, c] -> [a, b, c]
7832 project
7833 .update(cx, |project, cx| {
7834 let second = worktree_a.read(cx);
7835 let first = worktree_b.read(cx);
7836 project.move_worktree(first.id(), second.id(), cx)
7837 })
7838 .expect("moving second before first");
7839
7840 // check the state after moving
7841 project.update(cx, |project, cx| {
7842 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7843 assert_eq!(worktrees.len(), 3);
7844
7845 let first = worktrees[0].read(cx);
7846 let second = worktrees[1].read(cx);
7847 let third = worktrees[2].read(cx);
7848
7849 // check they are now in the right order
7850 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7851 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7852 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7853 });
7854
7855 // move the second worktree to after the third
7856 // [a, b, c] -> [a, c, b]
7857 project
7858 .update(cx, |project, cx| {
7859 let second = worktree_b.read(cx);
7860 let third = worktree_c.read(cx);
7861 project.move_worktree(second.id(), third.id(), cx)
7862 })
7863 .expect("moving second after third");
7864
7865 // check the state after moving
7866 project.update(cx, |project, cx| {
7867 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7868 assert_eq!(worktrees.len(), 3);
7869
7870 let first = worktrees[0].read(cx);
7871 let second = worktrees[1].read(cx);
7872 let third = worktrees[2].read(cx);
7873
7874 // check they are now in the right order
7875 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7876 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7877 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7878 });
7879
7880 // move the third worktree to before the second
7881 // [a, c, b] -> [a, b, c]
7882 project
7883 .update(cx, |project, cx| {
7884 let third = worktree_c.read(cx);
7885 let second = worktree_b.read(cx);
7886 project.move_worktree(third.id(), second.id(), cx)
7887 })
7888 .expect("moving third before second");
7889
7890 // check the state after moving
7891 project.update(cx, |project, cx| {
7892 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7893 assert_eq!(worktrees.len(), 3);
7894
7895 let first = worktrees[0].read(cx);
7896 let second = worktrees[1].read(cx);
7897 let third = worktrees[2].read(cx);
7898
7899 // check they are now in the right order
7900 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7901 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7902 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7903 });
7904
7905 // move the first worktree to after the third
7906 // [a, b, c] -> [b, c, a]
7907 project
7908 .update(cx, |project, cx| {
7909 let first = worktree_a.read(cx);
7910 let third = worktree_c.read(cx);
7911 project.move_worktree(first.id(), third.id(), cx)
7912 })
7913 .expect("moving first after third");
7914
7915 // check the state after moving
7916 project.update(cx, |project, cx| {
7917 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7918 assert_eq!(worktrees.len(), 3);
7919
7920 let first = worktrees[0].read(cx);
7921 let second = worktrees[1].read(cx);
7922 let third = worktrees[2].read(cx);
7923
7924 // check they are now in the right order
7925 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7926 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7927 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7928 });
7929
7930 // move the third worktree to before the first
7931 // [b, c, a] -> [a, b, c]
7932 project
7933 .update(cx, |project, cx| {
7934 let third = worktree_a.read(cx);
7935 let first = worktree_b.read(cx);
7936 project.move_worktree(third.id(), first.id(), cx)
7937 })
7938 .expect("moving third before first");
7939
7940 // check the state after moving
7941 project.update(cx, |project, cx| {
7942 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7943 assert_eq!(worktrees.len(), 3);
7944
7945 let first = worktrees[0].read(cx);
7946 let second = worktrees[1].read(cx);
7947 let third = worktrees[2].read(cx);
7948
7949 // check they are now in the right order
7950 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7951 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7952 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7953 });
7954}
7955
7956#[gpui::test]
7957async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7958 init_test(cx);
7959
7960 let staged_contents = r#"
7961 fn main() {
7962 println!("hello world");
7963 }
7964 "#
7965 .unindent();
7966 let file_contents = r#"
7967 // print goodbye
7968 fn main() {
7969 println!("goodbye world");
7970 }
7971 "#
7972 .unindent();
7973
7974 let fs = FakeFs::new(cx.background_executor.clone());
7975 fs.insert_tree(
7976 "/dir",
7977 json!({
7978 ".git": {},
7979 "src": {
7980 "main.rs": file_contents,
7981 }
7982 }),
7983 )
7984 .await;
7985
7986 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7987
7988 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7989
7990 let buffer = project
7991 .update(cx, |project, cx| {
7992 project.open_local_buffer("/dir/src/main.rs", cx)
7993 })
7994 .await
7995 .unwrap();
7996 let unstaged_diff = project
7997 .update(cx, |project, cx| {
7998 project.open_unstaged_diff(buffer.clone(), cx)
7999 })
8000 .await
8001 .unwrap();
8002
8003 cx.run_until_parked();
8004 unstaged_diff.update(cx, |unstaged_diff, cx| {
8005 let snapshot = buffer.read(cx).snapshot();
8006 assert_hunks(
8007 unstaged_diff.snapshot(cx).hunks(&snapshot),
8008 &snapshot,
8009 &unstaged_diff.base_text_string(cx).unwrap(),
8010 &[
8011 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8012 (
8013 2..3,
8014 " println!(\"hello world\");\n",
8015 " println!(\"goodbye world\");\n",
8016 DiffHunkStatus::modified_none(),
8017 ),
8018 ],
8019 );
8020 });
8021
8022 let staged_contents = r#"
8023 // print goodbye
8024 fn main() {
8025 }
8026 "#
8027 .unindent();
8028
8029 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8030
8031 cx.run_until_parked();
8032 unstaged_diff.update(cx, |unstaged_diff, cx| {
8033 let snapshot = buffer.read(cx).snapshot();
8034 assert_hunks(
8035 unstaged_diff
8036 .snapshot(cx)
8037 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8038 &snapshot,
8039 &unstaged_diff.base_text(cx).text(),
8040 &[(
8041 2..3,
8042 "",
8043 " println!(\"goodbye world\");\n",
8044 DiffHunkStatus::added_none(),
8045 )],
8046 );
8047 });
8048}
8049
8050#[gpui::test]
8051async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8052 init_test(cx);
8053
8054 let committed_contents = r#"
8055 fn main() {
8056 println!("hello world");
8057 }
8058 "#
8059 .unindent();
8060 let staged_contents = r#"
8061 fn main() {
8062 println!("goodbye world");
8063 }
8064 "#
8065 .unindent();
8066 let file_contents = r#"
8067 // print goodbye
8068 fn main() {
8069 println!("goodbye world");
8070 }
8071 "#
8072 .unindent();
8073
8074 let fs = FakeFs::new(cx.background_executor.clone());
8075 fs.insert_tree(
8076 "/dir",
8077 json!({
8078 ".git": {},
8079 "src": {
8080 "modification.rs": file_contents,
8081 }
8082 }),
8083 )
8084 .await;
8085
8086 fs.set_head_for_repo(
8087 Path::new("/dir/.git"),
8088 &[
8089 ("src/modification.rs", committed_contents),
8090 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8091 ],
8092 "deadbeef",
8093 );
8094 fs.set_index_for_repo(
8095 Path::new("/dir/.git"),
8096 &[
8097 ("src/modification.rs", staged_contents),
8098 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8099 ],
8100 );
8101
8102 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8103 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8104 let language = rust_lang();
8105 language_registry.add(language.clone());
8106
8107 let buffer_1 = project
8108 .update(cx, |project, cx| {
8109 project.open_local_buffer("/dir/src/modification.rs", cx)
8110 })
8111 .await
8112 .unwrap();
8113 let diff_1 = project
8114 .update(cx, |project, cx| {
8115 project.open_uncommitted_diff(buffer_1.clone(), cx)
8116 })
8117 .await
8118 .unwrap();
8119 diff_1.read_with(cx, |diff, cx| {
8120 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8121 });
8122 cx.run_until_parked();
8123 diff_1.update(cx, |diff, cx| {
8124 let snapshot = buffer_1.read(cx).snapshot();
8125 assert_hunks(
8126 diff.snapshot(cx)
8127 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8128 &snapshot,
8129 &diff.base_text_string(cx).unwrap(),
8130 &[
8131 (
8132 0..1,
8133 "",
8134 "// print goodbye\n",
8135 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8136 ),
8137 (
8138 2..3,
8139 " println!(\"hello world\");\n",
8140 " println!(\"goodbye world\");\n",
8141 DiffHunkStatus::modified_none(),
8142 ),
8143 ],
8144 );
8145 });
8146
8147 // Reset HEAD to a version that differs from both the buffer and the index.
8148 let committed_contents = r#"
8149 // print goodbye
8150 fn main() {
8151 }
8152 "#
8153 .unindent();
8154 fs.set_head_for_repo(
8155 Path::new("/dir/.git"),
8156 &[
8157 ("src/modification.rs", committed_contents.clone()),
8158 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8159 ],
8160 "deadbeef",
8161 );
8162
8163 // Buffer now has an unstaged hunk.
8164 cx.run_until_parked();
8165 diff_1.update(cx, |diff, cx| {
8166 let snapshot = buffer_1.read(cx).snapshot();
8167 assert_hunks(
8168 diff.snapshot(cx)
8169 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8170 &snapshot,
8171 &diff.base_text(cx).text(),
8172 &[(
8173 2..3,
8174 "",
8175 " println!(\"goodbye world\");\n",
8176 DiffHunkStatus::added_none(),
8177 )],
8178 );
8179 });
8180
8181 // Open a buffer for a file that's been deleted.
8182 let buffer_2 = project
8183 .update(cx, |project, cx| {
8184 project.open_local_buffer("/dir/src/deletion.rs", cx)
8185 })
8186 .await
8187 .unwrap();
8188 let diff_2 = project
8189 .update(cx, |project, cx| {
8190 project.open_uncommitted_diff(buffer_2.clone(), cx)
8191 })
8192 .await
8193 .unwrap();
8194 cx.run_until_parked();
8195 diff_2.update(cx, |diff, cx| {
8196 let snapshot = buffer_2.read(cx).snapshot();
8197 assert_hunks(
8198 diff.snapshot(cx)
8199 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8200 &snapshot,
8201 &diff.base_text_string(cx).unwrap(),
8202 &[(
8203 0..0,
8204 "// the-deleted-contents\n",
8205 "",
8206 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8207 )],
8208 );
8209 });
8210
8211 // Stage the deletion of this file
8212 fs.set_index_for_repo(
8213 Path::new("/dir/.git"),
8214 &[("src/modification.rs", committed_contents.clone())],
8215 );
8216 cx.run_until_parked();
8217 diff_2.update(cx, |diff, cx| {
8218 let snapshot = buffer_2.read(cx).snapshot();
8219 assert_hunks(
8220 diff.snapshot(cx)
8221 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8222 &snapshot,
8223 &diff.base_text_string(cx).unwrap(),
8224 &[(
8225 0..0,
8226 "// the-deleted-contents\n",
8227 "",
8228 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8229 )],
8230 );
8231 });
8232}
8233
8234#[gpui::test]
8235async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8236 use DiffHunkSecondaryStatus::*;
8237 init_test(cx);
8238
8239 let committed_contents = r#"
8240 zero
8241 one
8242 two
8243 three
8244 four
8245 five
8246 "#
8247 .unindent();
8248 let file_contents = r#"
8249 one
8250 TWO
8251 three
8252 FOUR
8253 five
8254 "#
8255 .unindent();
8256
8257 let fs = FakeFs::new(cx.background_executor.clone());
8258 fs.insert_tree(
8259 "/dir",
8260 json!({
8261 ".git": {},
8262 "file.txt": file_contents.clone()
8263 }),
8264 )
8265 .await;
8266
8267 fs.set_head_and_index_for_repo(
8268 path!("/dir/.git").as_ref(),
8269 &[("file.txt", committed_contents.clone())],
8270 );
8271
8272 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8273
8274 let buffer = project
8275 .update(cx, |project, cx| {
8276 project.open_local_buffer("/dir/file.txt", cx)
8277 })
8278 .await
8279 .unwrap();
8280 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8281 let uncommitted_diff = project
8282 .update(cx, |project, cx| {
8283 project.open_uncommitted_diff(buffer.clone(), cx)
8284 })
8285 .await
8286 .unwrap();
8287 let mut diff_events = cx.events(&uncommitted_diff);
8288
8289 // The hunks are initially unstaged.
8290 uncommitted_diff.read_with(cx, |diff, cx| {
8291 assert_hunks(
8292 diff.snapshot(cx).hunks(&snapshot),
8293 &snapshot,
8294 &diff.base_text_string(cx).unwrap(),
8295 &[
8296 (
8297 0..0,
8298 "zero\n",
8299 "",
8300 DiffHunkStatus::deleted(HasSecondaryHunk),
8301 ),
8302 (
8303 1..2,
8304 "two\n",
8305 "TWO\n",
8306 DiffHunkStatus::modified(HasSecondaryHunk),
8307 ),
8308 (
8309 3..4,
8310 "four\n",
8311 "FOUR\n",
8312 DiffHunkStatus::modified(HasSecondaryHunk),
8313 ),
8314 ],
8315 );
8316 });
8317
8318 // Stage a hunk. It appears as optimistically staged.
8319 uncommitted_diff.update(cx, |diff, cx| {
8320 let range =
8321 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8322 let hunks = diff
8323 .snapshot(cx)
8324 .hunks_intersecting_range(range, &snapshot)
8325 .collect::<Vec<_>>();
8326 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8327
8328 assert_hunks(
8329 diff.snapshot(cx).hunks(&snapshot),
8330 &snapshot,
8331 &diff.base_text_string(cx).unwrap(),
8332 &[
8333 (
8334 0..0,
8335 "zero\n",
8336 "",
8337 DiffHunkStatus::deleted(HasSecondaryHunk),
8338 ),
8339 (
8340 1..2,
8341 "two\n",
8342 "TWO\n",
8343 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8344 ),
8345 (
8346 3..4,
8347 "four\n",
8348 "FOUR\n",
8349 DiffHunkStatus::modified(HasSecondaryHunk),
8350 ),
8351 ],
8352 );
8353 });
8354
8355 // The diff emits a change event for the range of the staged hunk.
8356 assert!(matches!(
8357 diff_events.next().await.unwrap(),
8358 BufferDiffEvent::HunksStagedOrUnstaged(_)
8359 ));
8360 let event = diff_events.next().await.unwrap();
8361 if let BufferDiffEvent::DiffChanged(DiffChanged {
8362 changed_range: Some(changed_range),
8363 base_text_changed_range: _,
8364 extended_range: _,
8365 }) = event
8366 {
8367 let changed_range = changed_range.to_point(&snapshot);
8368 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8369 } else {
8370 panic!("Unexpected event {event:?}");
8371 }
8372
8373 // When the write to the index completes, it appears as staged.
8374 cx.run_until_parked();
8375 uncommitted_diff.update(cx, |diff, cx| {
8376 assert_hunks(
8377 diff.snapshot(cx).hunks(&snapshot),
8378 &snapshot,
8379 &diff.base_text_string(cx).unwrap(),
8380 &[
8381 (
8382 0..0,
8383 "zero\n",
8384 "",
8385 DiffHunkStatus::deleted(HasSecondaryHunk),
8386 ),
8387 (
8388 1..2,
8389 "two\n",
8390 "TWO\n",
8391 DiffHunkStatus::modified(NoSecondaryHunk),
8392 ),
8393 (
8394 3..4,
8395 "four\n",
8396 "FOUR\n",
8397 DiffHunkStatus::modified(HasSecondaryHunk),
8398 ),
8399 ],
8400 );
8401 });
8402
8403 // The diff emits a change event for the changed index text.
8404 let event = diff_events.next().await.unwrap();
8405 if let BufferDiffEvent::DiffChanged(DiffChanged {
8406 changed_range: Some(changed_range),
8407 base_text_changed_range: _,
8408 extended_range: _,
8409 }) = event
8410 {
8411 let changed_range = changed_range.to_point(&snapshot);
8412 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8413 } else {
8414 panic!("Unexpected event {event:?}");
8415 }
8416
8417 // Simulate a problem writing to the git index.
8418 fs.set_error_message_for_index_write(
8419 "/dir/.git".as_ref(),
8420 Some("failed to write git index".into()),
8421 );
8422
8423 // Stage another hunk.
8424 uncommitted_diff.update(cx, |diff, cx| {
8425 let range =
8426 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8427 let hunks = diff
8428 .snapshot(cx)
8429 .hunks_intersecting_range(range, &snapshot)
8430 .collect::<Vec<_>>();
8431 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8432
8433 assert_hunks(
8434 diff.snapshot(cx).hunks(&snapshot),
8435 &snapshot,
8436 &diff.base_text_string(cx).unwrap(),
8437 &[
8438 (
8439 0..0,
8440 "zero\n",
8441 "",
8442 DiffHunkStatus::deleted(HasSecondaryHunk),
8443 ),
8444 (
8445 1..2,
8446 "two\n",
8447 "TWO\n",
8448 DiffHunkStatus::modified(NoSecondaryHunk),
8449 ),
8450 (
8451 3..4,
8452 "four\n",
8453 "FOUR\n",
8454 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8455 ),
8456 ],
8457 );
8458 });
8459 assert!(matches!(
8460 diff_events.next().await.unwrap(),
8461 BufferDiffEvent::HunksStagedOrUnstaged(_)
8462 ));
8463 let event = diff_events.next().await.unwrap();
8464 if let BufferDiffEvent::DiffChanged(DiffChanged {
8465 changed_range: Some(changed_range),
8466 base_text_changed_range: _,
8467 extended_range: _,
8468 }) = event
8469 {
8470 let changed_range = changed_range.to_point(&snapshot);
8471 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8472 } else {
8473 panic!("Unexpected event {event:?}");
8474 }
8475
8476 // When the write fails, the hunk returns to being unstaged.
8477 cx.run_until_parked();
8478 uncommitted_diff.update(cx, |diff, cx| {
8479 assert_hunks(
8480 diff.snapshot(cx).hunks(&snapshot),
8481 &snapshot,
8482 &diff.base_text_string(cx).unwrap(),
8483 &[
8484 (
8485 0..0,
8486 "zero\n",
8487 "",
8488 DiffHunkStatus::deleted(HasSecondaryHunk),
8489 ),
8490 (
8491 1..2,
8492 "two\n",
8493 "TWO\n",
8494 DiffHunkStatus::modified(NoSecondaryHunk),
8495 ),
8496 (
8497 3..4,
8498 "four\n",
8499 "FOUR\n",
8500 DiffHunkStatus::modified(HasSecondaryHunk),
8501 ),
8502 ],
8503 );
8504 });
8505
8506 let event = diff_events.next().await.unwrap();
8507 if let BufferDiffEvent::DiffChanged(DiffChanged {
8508 changed_range: Some(changed_range),
8509 base_text_changed_range: _,
8510 extended_range: _,
8511 }) = event
8512 {
8513 let changed_range = changed_range.to_point(&snapshot);
8514 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8515 } else {
8516 panic!("Unexpected event {event:?}");
8517 }
8518
8519 // Allow writing to the git index to succeed again.
8520 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8521
8522 // Stage two hunks with separate operations.
8523 uncommitted_diff.update(cx, |diff, cx| {
8524 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8525 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8526 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8527 });
8528
8529 // Both staged hunks appear as pending.
8530 uncommitted_diff.update(cx, |diff, cx| {
8531 assert_hunks(
8532 diff.snapshot(cx).hunks(&snapshot),
8533 &snapshot,
8534 &diff.base_text_string(cx).unwrap(),
8535 &[
8536 (
8537 0..0,
8538 "zero\n",
8539 "",
8540 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8541 ),
8542 (
8543 1..2,
8544 "two\n",
8545 "TWO\n",
8546 DiffHunkStatus::modified(NoSecondaryHunk),
8547 ),
8548 (
8549 3..4,
8550 "four\n",
8551 "FOUR\n",
8552 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8553 ),
8554 ],
8555 );
8556 });
8557
8558 // Both staging operations take effect.
8559 cx.run_until_parked();
8560 uncommitted_diff.update(cx, |diff, cx| {
8561 assert_hunks(
8562 diff.snapshot(cx).hunks(&snapshot),
8563 &snapshot,
8564 &diff.base_text_string(cx).unwrap(),
8565 &[
8566 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8567 (
8568 1..2,
8569 "two\n",
8570 "TWO\n",
8571 DiffHunkStatus::modified(NoSecondaryHunk),
8572 ),
8573 (
8574 3..4,
8575 "four\n",
8576 "FOUR\n",
8577 DiffHunkStatus::modified(NoSecondaryHunk),
8578 ),
8579 ],
8580 );
8581 });
8582}
8583
8584#[gpui::test(seeds(340, 472))]
8585async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8586 use DiffHunkSecondaryStatus::*;
8587 init_test(cx);
8588
8589 let committed_contents = r#"
8590 zero
8591 one
8592 two
8593 three
8594 four
8595 five
8596 "#
8597 .unindent();
8598 let file_contents = r#"
8599 one
8600 TWO
8601 three
8602 FOUR
8603 five
8604 "#
8605 .unindent();
8606
8607 let fs = FakeFs::new(cx.background_executor.clone());
8608 fs.insert_tree(
8609 "/dir",
8610 json!({
8611 ".git": {},
8612 "file.txt": file_contents.clone()
8613 }),
8614 )
8615 .await;
8616
8617 fs.set_head_for_repo(
8618 "/dir/.git".as_ref(),
8619 &[("file.txt", committed_contents.clone())],
8620 "deadbeef",
8621 );
8622 fs.set_index_for_repo(
8623 "/dir/.git".as_ref(),
8624 &[("file.txt", committed_contents.clone())],
8625 );
8626
8627 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8628
8629 let buffer = project
8630 .update(cx, |project, cx| {
8631 project.open_local_buffer("/dir/file.txt", cx)
8632 })
8633 .await
8634 .unwrap();
8635 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8636 let uncommitted_diff = project
8637 .update(cx, |project, cx| {
8638 project.open_uncommitted_diff(buffer.clone(), cx)
8639 })
8640 .await
8641 .unwrap();
8642
8643 // The hunks are initially unstaged.
8644 uncommitted_diff.read_with(cx, |diff, cx| {
8645 assert_hunks(
8646 diff.snapshot(cx).hunks(&snapshot),
8647 &snapshot,
8648 &diff.base_text_string(cx).unwrap(),
8649 &[
8650 (
8651 0..0,
8652 "zero\n",
8653 "",
8654 DiffHunkStatus::deleted(HasSecondaryHunk),
8655 ),
8656 (
8657 1..2,
8658 "two\n",
8659 "TWO\n",
8660 DiffHunkStatus::modified(HasSecondaryHunk),
8661 ),
8662 (
8663 3..4,
8664 "four\n",
8665 "FOUR\n",
8666 DiffHunkStatus::modified(HasSecondaryHunk),
8667 ),
8668 ],
8669 );
8670 });
8671
8672 // Pause IO events
8673 fs.pause_events();
8674
8675 // Stage the first hunk.
8676 uncommitted_diff.update(cx, |diff, cx| {
8677 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8678 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8679 assert_hunks(
8680 diff.snapshot(cx).hunks(&snapshot),
8681 &snapshot,
8682 &diff.base_text_string(cx).unwrap(),
8683 &[
8684 (
8685 0..0,
8686 "zero\n",
8687 "",
8688 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8689 ),
8690 (
8691 1..2,
8692 "two\n",
8693 "TWO\n",
8694 DiffHunkStatus::modified(HasSecondaryHunk),
8695 ),
8696 (
8697 3..4,
8698 "four\n",
8699 "FOUR\n",
8700 DiffHunkStatus::modified(HasSecondaryHunk),
8701 ),
8702 ],
8703 );
8704 });
8705
8706 // Stage the second hunk *before* receiving the FS event for the first hunk.
8707 cx.run_until_parked();
8708 uncommitted_diff.update(cx, |diff, cx| {
8709 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8710 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8711 assert_hunks(
8712 diff.snapshot(cx).hunks(&snapshot),
8713 &snapshot,
8714 &diff.base_text_string(cx).unwrap(),
8715 &[
8716 (
8717 0..0,
8718 "zero\n",
8719 "",
8720 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8721 ),
8722 (
8723 1..2,
8724 "two\n",
8725 "TWO\n",
8726 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8727 ),
8728 (
8729 3..4,
8730 "four\n",
8731 "FOUR\n",
8732 DiffHunkStatus::modified(HasSecondaryHunk),
8733 ),
8734 ],
8735 );
8736 });
8737
8738 // Process the FS event for staging the first hunk (second event is still pending).
8739 fs.flush_events(1);
8740 cx.run_until_parked();
8741
8742 // Stage the third hunk before receiving the second FS event.
8743 uncommitted_diff.update(cx, |diff, cx| {
8744 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8745 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8746 });
8747
8748 // Wait for all remaining IO.
8749 cx.run_until_parked();
8750 fs.flush_events(fs.buffered_event_count());
8751
8752 // Now all hunks are staged.
8753 cx.run_until_parked();
8754 uncommitted_diff.update(cx, |diff, cx| {
8755 assert_hunks(
8756 diff.snapshot(cx).hunks(&snapshot),
8757 &snapshot,
8758 &diff.base_text_string(cx).unwrap(),
8759 &[
8760 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8761 (
8762 1..2,
8763 "two\n",
8764 "TWO\n",
8765 DiffHunkStatus::modified(NoSecondaryHunk),
8766 ),
8767 (
8768 3..4,
8769 "four\n",
8770 "FOUR\n",
8771 DiffHunkStatus::modified(NoSecondaryHunk),
8772 ),
8773 ],
8774 );
8775 });
8776}
8777
8778#[gpui::test(iterations = 25)]
8779async fn test_staging_random_hunks(
8780 mut rng: StdRng,
8781 _executor: BackgroundExecutor,
8782 cx: &mut gpui::TestAppContext,
8783) {
8784 let operations = env::var("OPERATIONS")
8785 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8786 .unwrap_or(20);
8787
8788 use DiffHunkSecondaryStatus::*;
8789 init_test(cx);
8790
8791 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8792 let index_text = committed_text.clone();
8793 let buffer_text = (0..30)
8794 .map(|i| match i % 5 {
8795 0 => format!("line {i} (modified)\n"),
8796 _ => format!("line {i}\n"),
8797 })
8798 .collect::<String>();
8799
8800 let fs = FakeFs::new(cx.background_executor.clone());
8801 fs.insert_tree(
8802 path!("/dir"),
8803 json!({
8804 ".git": {},
8805 "file.txt": buffer_text.clone()
8806 }),
8807 )
8808 .await;
8809 fs.set_head_for_repo(
8810 path!("/dir/.git").as_ref(),
8811 &[("file.txt", committed_text.clone())],
8812 "deadbeef",
8813 );
8814 fs.set_index_for_repo(
8815 path!("/dir/.git").as_ref(),
8816 &[("file.txt", index_text.clone())],
8817 );
8818 let repo = fs
8819 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8820 .unwrap();
8821
8822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8823 let buffer = project
8824 .update(cx, |project, cx| {
8825 project.open_local_buffer(path!("/dir/file.txt"), cx)
8826 })
8827 .await
8828 .unwrap();
8829 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8830 let uncommitted_diff = project
8831 .update(cx, |project, cx| {
8832 project.open_uncommitted_diff(buffer.clone(), cx)
8833 })
8834 .await
8835 .unwrap();
8836
8837 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8838 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8839 });
8840 assert_eq!(hunks.len(), 6);
8841
8842 for _i in 0..operations {
8843 let hunk_ix = rng.random_range(0..hunks.len());
8844 let hunk = &mut hunks[hunk_ix];
8845 let row = hunk.range.start.row;
8846
8847 if hunk.status().has_secondary_hunk() {
8848 log::info!("staging hunk at {row}");
8849 uncommitted_diff.update(cx, |diff, cx| {
8850 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8851 });
8852 hunk.secondary_status = SecondaryHunkRemovalPending;
8853 } else {
8854 log::info!("unstaging hunk at {row}");
8855 uncommitted_diff.update(cx, |diff, cx| {
8856 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8857 });
8858 hunk.secondary_status = SecondaryHunkAdditionPending;
8859 }
8860
8861 for _ in 0..rng.random_range(0..10) {
8862 log::info!("yielding");
8863 cx.executor().simulate_random_delay().await;
8864 }
8865 }
8866
8867 cx.executor().run_until_parked();
8868
8869 for hunk in &mut hunks {
8870 if hunk.secondary_status == SecondaryHunkRemovalPending {
8871 hunk.secondary_status = NoSecondaryHunk;
8872 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8873 hunk.secondary_status = HasSecondaryHunk;
8874 }
8875 }
8876
8877 log::info!(
8878 "index text:\n{}",
8879 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8880 .await
8881 .unwrap()
8882 );
8883
8884 uncommitted_diff.update(cx, |diff, cx| {
8885 let expected_hunks = hunks
8886 .iter()
8887 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8888 .collect::<Vec<_>>();
8889 let actual_hunks = diff
8890 .snapshot(cx)
8891 .hunks(&snapshot)
8892 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8893 .collect::<Vec<_>>();
8894 assert_eq!(actual_hunks, expected_hunks);
8895 });
8896}
8897
8898#[gpui::test]
8899async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8900 init_test(cx);
8901
8902 let committed_contents = r#"
8903 fn main() {
8904 println!("hello from HEAD");
8905 }
8906 "#
8907 .unindent();
8908 let file_contents = r#"
8909 fn main() {
8910 println!("hello from the working copy");
8911 }
8912 "#
8913 .unindent();
8914
8915 let fs = FakeFs::new(cx.background_executor.clone());
8916 fs.insert_tree(
8917 "/dir",
8918 json!({
8919 ".git": {},
8920 "src": {
8921 "main.rs": file_contents,
8922 }
8923 }),
8924 )
8925 .await;
8926
8927 fs.set_head_for_repo(
8928 Path::new("/dir/.git"),
8929 &[("src/main.rs", committed_contents.clone())],
8930 "deadbeef",
8931 );
8932 fs.set_index_for_repo(
8933 Path::new("/dir/.git"),
8934 &[("src/main.rs", committed_contents.clone())],
8935 );
8936
8937 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8938
8939 let buffer = project
8940 .update(cx, |project, cx| {
8941 project.open_local_buffer("/dir/src/main.rs", cx)
8942 })
8943 .await
8944 .unwrap();
8945 let uncommitted_diff = project
8946 .update(cx, |project, cx| {
8947 project.open_uncommitted_diff(buffer.clone(), cx)
8948 })
8949 .await
8950 .unwrap();
8951
8952 cx.run_until_parked();
8953 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8954 let snapshot = buffer.read(cx).snapshot();
8955 assert_hunks(
8956 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8957 &snapshot,
8958 &uncommitted_diff.base_text_string(cx).unwrap(),
8959 &[(
8960 1..2,
8961 " println!(\"hello from HEAD\");\n",
8962 " println!(\"hello from the working copy\");\n",
8963 DiffHunkStatus {
8964 kind: DiffHunkStatusKind::Modified,
8965 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8966 },
8967 )],
8968 );
8969 });
8970}
8971
8972// TODO: Should we test this on Windows also?
8973#[gpui::test]
8974#[cfg(not(windows))]
8975async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8976 use std::os::unix::fs::PermissionsExt;
8977 init_test(cx);
8978 cx.executor().allow_parking();
8979 let committed_contents = "bar\n";
8980 let file_contents = "baz\n";
8981 let root = TempTree::new(json!({
8982 "project": {
8983 "foo": committed_contents
8984 },
8985 }));
8986
8987 let work_dir = root.path().join("project");
8988 let file_path = work_dir.join("foo");
8989 let repo = git_init(work_dir.as_path());
8990 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8991 perms.set_mode(0o755);
8992 std::fs::set_permissions(&file_path, perms).unwrap();
8993 git_add("foo", &repo);
8994 git_commit("Initial commit", &repo);
8995 std::fs::write(&file_path, file_contents).unwrap();
8996
8997 let project = Project::test(
8998 Arc::new(RealFs::new(None, cx.executor())),
8999 [root.path()],
9000 cx,
9001 )
9002 .await;
9003
9004 let buffer = project
9005 .update(cx, |project, cx| {
9006 project.open_local_buffer(file_path.as_path(), cx)
9007 })
9008 .await
9009 .unwrap();
9010
9011 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9012
9013 let uncommitted_diff = project
9014 .update(cx, |project, cx| {
9015 project.open_uncommitted_diff(buffer.clone(), cx)
9016 })
9017 .await
9018 .unwrap();
9019
9020 uncommitted_diff.update(cx, |diff, cx| {
9021 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9022 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9023 });
9024
9025 cx.run_until_parked();
9026
9027 let output = smol::process::Command::new("git")
9028 .current_dir(&work_dir)
9029 .args(["diff", "--staged"])
9030 .output()
9031 .await
9032 .unwrap();
9033
9034 let staged_diff = String::from_utf8_lossy(&output.stdout);
9035
9036 assert!(
9037 !staged_diff.contains("new mode 100644"),
9038 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9039 staged_diff
9040 );
9041
9042 let output = smol::process::Command::new("git")
9043 .current_dir(&work_dir)
9044 .args(["ls-files", "-s"])
9045 .output()
9046 .await
9047 .unwrap();
9048 let index_contents = String::from_utf8_lossy(&output.stdout);
9049
9050 assert!(
9051 index_contents.contains("100755"),
9052 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9053 index_contents
9054 );
9055}
9056
9057#[gpui::test]
9058async fn test_repository_and_path_for_project_path(
9059 background_executor: BackgroundExecutor,
9060 cx: &mut gpui::TestAppContext,
9061) {
9062 init_test(cx);
9063 let fs = FakeFs::new(background_executor);
9064 fs.insert_tree(
9065 path!("/root"),
9066 json!({
9067 "c.txt": "",
9068 "dir1": {
9069 ".git": {},
9070 "deps": {
9071 "dep1": {
9072 ".git": {},
9073 "src": {
9074 "a.txt": ""
9075 }
9076 }
9077 },
9078 "src": {
9079 "b.txt": ""
9080 }
9081 },
9082 }),
9083 )
9084 .await;
9085
9086 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9087 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9088 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9089 project
9090 .update(cx, |project, cx| project.git_scans_complete(cx))
9091 .await;
9092 cx.run_until_parked();
9093
9094 project.read_with(cx, |project, cx| {
9095 let git_store = project.git_store().read(cx);
9096 let pairs = [
9097 ("c.txt", None),
9098 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9099 (
9100 "dir1/deps/dep1/src/a.txt",
9101 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9102 ),
9103 ];
9104 let expected = pairs
9105 .iter()
9106 .map(|(path, result)| {
9107 (
9108 path,
9109 result.map(|(repo, repo_path)| {
9110 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9111 }),
9112 )
9113 })
9114 .collect::<Vec<_>>();
9115 let actual = pairs
9116 .iter()
9117 .map(|(path, _)| {
9118 let project_path = (tree_id, rel_path(path)).into();
9119 let result = maybe!({
9120 let (repo, repo_path) =
9121 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9122 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9123 });
9124 (path, result)
9125 })
9126 .collect::<Vec<_>>();
9127 pretty_assertions::assert_eq!(expected, actual);
9128 });
9129
9130 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9131 .await
9132 .unwrap();
9133 cx.run_until_parked();
9134
9135 project.read_with(cx, |project, cx| {
9136 let git_store = project.git_store().read(cx);
9137 assert_eq!(
9138 git_store.repository_and_path_for_project_path(
9139 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9140 cx
9141 ),
9142 None
9143 );
9144 });
9145}
9146
9147#[gpui::test]
9148async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9149 init_test(cx);
9150 let fs = FakeFs::new(cx.background_executor.clone());
9151 let home = paths::home_dir();
9152 fs.insert_tree(
9153 home,
9154 json!({
9155 ".git": {},
9156 "project": {
9157 "a.txt": "A"
9158 },
9159 }),
9160 )
9161 .await;
9162
9163 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9164 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9165 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9166
9167 project
9168 .update(cx, |project, cx| project.git_scans_complete(cx))
9169 .await;
9170 tree.flush_fs_events(cx).await;
9171
9172 project.read_with(cx, |project, cx| {
9173 let containing = project
9174 .git_store()
9175 .read(cx)
9176 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9177 assert!(containing.is_none());
9178 });
9179
9180 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9181 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9182 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9183 project
9184 .update(cx, |project, cx| project.git_scans_complete(cx))
9185 .await;
9186 tree.flush_fs_events(cx).await;
9187
9188 project.read_with(cx, |project, cx| {
9189 let containing = project
9190 .git_store()
9191 .read(cx)
9192 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9193 assert_eq!(
9194 containing
9195 .unwrap()
9196 .0
9197 .read(cx)
9198 .work_directory_abs_path
9199 .as_ref(),
9200 home,
9201 );
9202 });
9203}
9204
9205#[gpui::test]
9206async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9207 init_test(cx);
9208 cx.executor().allow_parking();
9209
9210 let root = TempTree::new(json!({
9211 "project": {
9212 "a.txt": "a", // Modified
9213 "b.txt": "bb", // Added
9214 "c.txt": "ccc", // Unchanged
9215 "d.txt": "dddd", // Deleted
9216 },
9217 }));
9218
9219 // Set up git repository before creating the project.
9220 let work_dir = root.path().join("project");
9221 let repo = git_init(work_dir.as_path());
9222 git_add("a.txt", &repo);
9223 git_add("c.txt", &repo);
9224 git_add("d.txt", &repo);
9225 git_commit("Initial commit", &repo);
9226 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9227 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9228
9229 let project = Project::test(
9230 Arc::new(RealFs::new(None, cx.executor())),
9231 [root.path()],
9232 cx,
9233 )
9234 .await;
9235
9236 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9237 tree.flush_fs_events(cx).await;
9238 project
9239 .update(cx, |project, cx| project.git_scans_complete(cx))
9240 .await;
9241 cx.executor().run_until_parked();
9242
9243 let repository = project.read_with(cx, |project, cx| {
9244 project.repositories(cx).values().next().unwrap().clone()
9245 });
9246
9247 // Check that the right git state is observed on startup
9248 repository.read_with(cx, |repository, _| {
9249 let entries = repository.cached_status().collect::<Vec<_>>();
9250 assert_eq!(
9251 entries,
9252 [
9253 StatusEntry {
9254 repo_path: repo_path("a.txt"),
9255 status: StatusCode::Modified.worktree(),
9256 diff_stat: Some(DiffStat {
9257 added: 1,
9258 deleted: 1,
9259 }),
9260 },
9261 StatusEntry {
9262 repo_path: repo_path("b.txt"),
9263 status: FileStatus::Untracked,
9264 diff_stat: None,
9265 },
9266 StatusEntry {
9267 repo_path: repo_path("d.txt"),
9268 status: StatusCode::Deleted.worktree(),
9269 diff_stat: Some(DiffStat {
9270 added: 0,
9271 deleted: 1,
9272 }),
9273 },
9274 ]
9275 );
9276 });
9277
9278 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9279
9280 tree.flush_fs_events(cx).await;
9281 project
9282 .update(cx, |project, cx| project.git_scans_complete(cx))
9283 .await;
9284 cx.executor().run_until_parked();
9285
9286 repository.read_with(cx, |repository, _| {
9287 let entries = repository.cached_status().collect::<Vec<_>>();
9288 assert_eq!(
9289 entries,
9290 [
9291 StatusEntry {
9292 repo_path: repo_path("a.txt"),
9293 status: StatusCode::Modified.worktree(),
9294 diff_stat: Some(DiffStat {
9295 added: 1,
9296 deleted: 1,
9297 }),
9298 },
9299 StatusEntry {
9300 repo_path: repo_path("b.txt"),
9301 status: FileStatus::Untracked,
9302 diff_stat: None,
9303 },
9304 StatusEntry {
9305 repo_path: repo_path("c.txt"),
9306 status: StatusCode::Modified.worktree(),
9307 diff_stat: Some(DiffStat {
9308 added: 1,
9309 deleted: 1,
9310 }),
9311 },
9312 StatusEntry {
9313 repo_path: repo_path("d.txt"),
9314 status: StatusCode::Deleted.worktree(),
9315 diff_stat: Some(DiffStat {
9316 added: 0,
9317 deleted: 1,
9318 }),
9319 },
9320 ]
9321 );
9322 });
9323
9324 git_add("a.txt", &repo);
9325 git_add("c.txt", &repo);
9326 git_remove_index(Path::new("d.txt"), &repo);
9327 git_commit("Another commit", &repo);
9328 tree.flush_fs_events(cx).await;
9329 project
9330 .update(cx, |project, cx| project.git_scans_complete(cx))
9331 .await;
9332 cx.executor().run_until_parked();
9333
9334 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9335 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9336 tree.flush_fs_events(cx).await;
9337 project
9338 .update(cx, |project, cx| project.git_scans_complete(cx))
9339 .await;
9340 cx.executor().run_until_parked();
9341
9342 repository.read_with(cx, |repository, _cx| {
9343 let entries = repository.cached_status().collect::<Vec<_>>();
9344
9345 // Deleting an untracked entry, b.txt, should leave no status
9346 // a.txt was tracked, and so should have a status
9347 assert_eq!(
9348 entries,
9349 [StatusEntry {
9350 repo_path: repo_path("a.txt"),
9351 status: StatusCode::Deleted.worktree(),
9352 diff_stat: Some(DiffStat {
9353 added: 0,
9354 deleted: 1,
9355 }),
9356 }]
9357 );
9358 });
9359}
9360
9361#[gpui::test]
9362#[ignore]
9363async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9364 init_test(cx);
9365 cx.executor().allow_parking();
9366
9367 let root = TempTree::new(json!({
9368 "project": {
9369 "sub": {},
9370 "a.txt": "",
9371 },
9372 }));
9373
9374 let work_dir = root.path().join("project");
9375 let repo = git_init(work_dir.as_path());
9376 // a.txt exists in HEAD and the working copy but is deleted in the index.
9377 git_add("a.txt", &repo);
9378 git_commit("Initial commit", &repo);
9379 git_remove_index("a.txt".as_ref(), &repo);
9380 // `sub` is a nested git repository.
9381 let _sub = git_init(&work_dir.join("sub"));
9382
9383 let project = Project::test(
9384 Arc::new(RealFs::new(None, cx.executor())),
9385 [root.path()],
9386 cx,
9387 )
9388 .await;
9389
9390 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9391 tree.flush_fs_events(cx).await;
9392 project
9393 .update(cx, |project, cx| project.git_scans_complete(cx))
9394 .await;
9395 cx.executor().run_until_parked();
9396
9397 let repository = project.read_with(cx, |project, cx| {
9398 project
9399 .repositories(cx)
9400 .values()
9401 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9402 .unwrap()
9403 .clone()
9404 });
9405
9406 repository.read_with(cx, |repository, _cx| {
9407 let entries = repository.cached_status().collect::<Vec<_>>();
9408
9409 // `sub` doesn't appear in our computed statuses.
9410 // a.txt appears with a combined `DA` status.
9411 assert_eq!(
9412 entries,
9413 [StatusEntry {
9414 repo_path: repo_path("a.txt"),
9415 status: TrackedStatus {
9416 index_status: StatusCode::Deleted,
9417 worktree_status: StatusCode::Added
9418 }
9419 .into(),
9420 diff_stat: None,
9421 }]
9422 )
9423 });
9424}
9425
9426#[track_caller]
9427/// We merge lhs into rhs.
9428fn merge_pending_ops_snapshots(
9429 source: Vec<pending_op::PendingOps>,
9430 mut target: Vec<pending_op::PendingOps>,
9431) -> Vec<pending_op::PendingOps> {
9432 for s_ops in source {
9433 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9434 if ops.repo_path == s_ops.repo_path {
9435 Some(idx)
9436 } else {
9437 None
9438 }
9439 }) {
9440 let t_ops = &mut target[idx];
9441 for s_op in s_ops.ops {
9442 if let Some(op_idx) = t_ops
9443 .ops
9444 .iter()
9445 .zip(0..)
9446 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9447 {
9448 let t_op = &mut t_ops.ops[op_idx];
9449 match (s_op.job_status, t_op.job_status) {
9450 (pending_op::JobStatus::Running, _) => {}
9451 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9452 (s_st, t_st) if s_st == t_st => {}
9453 _ => unreachable!(),
9454 }
9455 } else {
9456 t_ops.ops.push(s_op);
9457 }
9458 }
9459 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9460 } else {
9461 target.push(s_ops);
9462 }
9463 }
9464 target
9465}
9466
9467#[gpui::test]
9468async fn test_repository_pending_ops_staging(
9469 executor: gpui::BackgroundExecutor,
9470 cx: &mut gpui::TestAppContext,
9471) {
9472 init_test(cx);
9473
9474 let fs = FakeFs::new(executor);
9475 fs.insert_tree(
9476 path!("/root"),
9477 json!({
9478 "my-repo": {
9479 ".git": {},
9480 "a.txt": "a",
9481 }
9482
9483 }),
9484 )
9485 .await;
9486
9487 fs.set_status_for_repo(
9488 path!("/root/my-repo/.git").as_ref(),
9489 &[("a.txt", FileStatus::Untracked)],
9490 );
9491
9492 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9493 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9494 project.update(cx, |project, cx| {
9495 let pending_ops_all = pending_ops_all.clone();
9496 cx.subscribe(project.git_store(), move |_, _, e, _| {
9497 if let GitStoreEvent::RepositoryUpdated(
9498 _,
9499 RepositoryEvent::PendingOpsChanged { pending_ops },
9500 _,
9501 ) = e
9502 {
9503 let merged = merge_pending_ops_snapshots(
9504 pending_ops.items(()),
9505 pending_ops_all.lock().items(()),
9506 );
9507 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9508 }
9509 })
9510 .detach();
9511 });
9512 project
9513 .update(cx, |project, cx| project.git_scans_complete(cx))
9514 .await;
9515
9516 let repo = project.read_with(cx, |project, cx| {
9517 project.repositories(cx).values().next().unwrap().clone()
9518 });
9519
9520 // Ensure we have no pending ops for any of the untracked files
9521 repo.read_with(cx, |repo, _cx| {
9522 assert!(repo.pending_ops().next().is_none());
9523 });
9524
9525 let mut id = 1u16;
9526
9527 let mut assert_stage = async |path: RepoPath, stage| {
9528 let git_status = if stage {
9529 pending_op::GitStatus::Staged
9530 } else {
9531 pending_op::GitStatus::Unstaged
9532 };
9533 repo.update(cx, |repo, cx| {
9534 let task = if stage {
9535 repo.stage_entries(vec![path.clone()], cx)
9536 } else {
9537 repo.unstage_entries(vec![path.clone()], cx)
9538 };
9539 let ops = repo.pending_ops_for_path(&path).unwrap();
9540 assert_eq!(
9541 ops.ops.last(),
9542 Some(&pending_op::PendingOp {
9543 id: id.into(),
9544 git_status,
9545 job_status: pending_op::JobStatus::Running
9546 })
9547 );
9548 task
9549 })
9550 .await
9551 .unwrap();
9552
9553 repo.read_with(cx, |repo, _cx| {
9554 let ops = repo.pending_ops_for_path(&path).unwrap();
9555 assert_eq!(
9556 ops.ops.last(),
9557 Some(&pending_op::PendingOp {
9558 id: id.into(),
9559 git_status,
9560 job_status: pending_op::JobStatus::Finished
9561 })
9562 );
9563 });
9564
9565 id += 1;
9566 };
9567
9568 assert_stage(repo_path("a.txt"), true).await;
9569 assert_stage(repo_path("a.txt"), false).await;
9570 assert_stage(repo_path("a.txt"), true).await;
9571 assert_stage(repo_path("a.txt"), false).await;
9572 assert_stage(repo_path("a.txt"), true).await;
9573
9574 cx.run_until_parked();
9575
9576 assert_eq!(
9577 pending_ops_all
9578 .lock()
9579 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9580 .unwrap()
9581 .ops,
9582 vec![
9583 pending_op::PendingOp {
9584 id: 1u16.into(),
9585 git_status: pending_op::GitStatus::Staged,
9586 job_status: pending_op::JobStatus::Finished
9587 },
9588 pending_op::PendingOp {
9589 id: 2u16.into(),
9590 git_status: pending_op::GitStatus::Unstaged,
9591 job_status: pending_op::JobStatus::Finished
9592 },
9593 pending_op::PendingOp {
9594 id: 3u16.into(),
9595 git_status: pending_op::GitStatus::Staged,
9596 job_status: pending_op::JobStatus::Finished
9597 },
9598 pending_op::PendingOp {
9599 id: 4u16.into(),
9600 git_status: pending_op::GitStatus::Unstaged,
9601 job_status: pending_op::JobStatus::Finished
9602 },
9603 pending_op::PendingOp {
9604 id: 5u16.into(),
9605 git_status: pending_op::GitStatus::Staged,
9606 job_status: pending_op::JobStatus::Finished
9607 }
9608 ],
9609 );
9610
9611 repo.update(cx, |repo, _cx| {
9612 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9613
9614 assert_eq!(
9615 git_statuses,
9616 [StatusEntry {
9617 repo_path: repo_path("a.txt"),
9618 status: TrackedStatus {
9619 index_status: StatusCode::Added,
9620 worktree_status: StatusCode::Unmodified
9621 }
9622 .into(),
9623 diff_stat: Some(DiffStat {
9624 added: 1,
9625 deleted: 0,
9626 }),
9627 }]
9628 );
9629 });
9630}
9631
9632#[gpui::test]
9633async fn test_repository_pending_ops_long_running_staging(
9634 executor: gpui::BackgroundExecutor,
9635 cx: &mut gpui::TestAppContext,
9636) {
9637 init_test(cx);
9638
9639 let fs = FakeFs::new(executor);
9640 fs.insert_tree(
9641 path!("/root"),
9642 json!({
9643 "my-repo": {
9644 ".git": {},
9645 "a.txt": "a",
9646 }
9647
9648 }),
9649 )
9650 .await;
9651
9652 fs.set_status_for_repo(
9653 path!("/root/my-repo/.git").as_ref(),
9654 &[("a.txt", FileStatus::Untracked)],
9655 );
9656
9657 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9658 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9659 project.update(cx, |project, cx| {
9660 let pending_ops_all = pending_ops_all.clone();
9661 cx.subscribe(project.git_store(), move |_, _, e, _| {
9662 if let GitStoreEvent::RepositoryUpdated(
9663 _,
9664 RepositoryEvent::PendingOpsChanged { pending_ops },
9665 _,
9666 ) = e
9667 {
9668 let merged = merge_pending_ops_snapshots(
9669 pending_ops.items(()),
9670 pending_ops_all.lock().items(()),
9671 );
9672 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9673 }
9674 })
9675 .detach();
9676 });
9677
9678 project
9679 .update(cx, |project, cx| project.git_scans_complete(cx))
9680 .await;
9681
9682 let repo = project.read_with(cx, |project, cx| {
9683 project.repositories(cx).values().next().unwrap().clone()
9684 });
9685
9686 repo.update(cx, |repo, cx| {
9687 repo.stage_entries(vec![repo_path("a.txt")], cx)
9688 })
9689 .detach();
9690
9691 repo.update(cx, |repo, cx| {
9692 repo.stage_entries(vec![repo_path("a.txt")], cx)
9693 })
9694 .unwrap()
9695 .with_timeout(Duration::from_secs(1), &cx.executor())
9696 .await
9697 .unwrap();
9698
9699 cx.run_until_parked();
9700
9701 assert_eq!(
9702 pending_ops_all
9703 .lock()
9704 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9705 .unwrap()
9706 .ops,
9707 vec![
9708 pending_op::PendingOp {
9709 id: 1u16.into(),
9710 git_status: pending_op::GitStatus::Staged,
9711 job_status: pending_op::JobStatus::Skipped
9712 },
9713 pending_op::PendingOp {
9714 id: 2u16.into(),
9715 git_status: pending_op::GitStatus::Staged,
9716 job_status: pending_op::JobStatus::Finished
9717 }
9718 ],
9719 );
9720
9721 repo.update(cx, |repo, _cx| {
9722 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9723
9724 assert_eq!(
9725 git_statuses,
9726 [StatusEntry {
9727 repo_path: repo_path("a.txt"),
9728 status: TrackedStatus {
9729 index_status: StatusCode::Added,
9730 worktree_status: StatusCode::Unmodified
9731 }
9732 .into(),
9733 diff_stat: Some(DiffStat {
9734 added: 1,
9735 deleted: 0,
9736 }),
9737 }]
9738 );
9739 });
9740}
9741
9742#[gpui::test]
9743async fn test_repository_pending_ops_stage_all(
9744 executor: gpui::BackgroundExecutor,
9745 cx: &mut gpui::TestAppContext,
9746) {
9747 init_test(cx);
9748
9749 let fs = FakeFs::new(executor);
9750 fs.insert_tree(
9751 path!("/root"),
9752 json!({
9753 "my-repo": {
9754 ".git": {},
9755 "a.txt": "a",
9756 "b.txt": "b"
9757 }
9758
9759 }),
9760 )
9761 .await;
9762
9763 fs.set_status_for_repo(
9764 path!("/root/my-repo/.git").as_ref(),
9765 &[
9766 ("a.txt", FileStatus::Untracked),
9767 ("b.txt", FileStatus::Untracked),
9768 ],
9769 );
9770
9771 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9772 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9773 project.update(cx, |project, cx| {
9774 let pending_ops_all = pending_ops_all.clone();
9775 cx.subscribe(project.git_store(), move |_, _, e, _| {
9776 if let GitStoreEvent::RepositoryUpdated(
9777 _,
9778 RepositoryEvent::PendingOpsChanged { pending_ops },
9779 _,
9780 ) = e
9781 {
9782 let merged = merge_pending_ops_snapshots(
9783 pending_ops.items(()),
9784 pending_ops_all.lock().items(()),
9785 );
9786 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9787 }
9788 })
9789 .detach();
9790 });
9791 project
9792 .update(cx, |project, cx| project.git_scans_complete(cx))
9793 .await;
9794
9795 let repo = project.read_with(cx, |project, cx| {
9796 project.repositories(cx).values().next().unwrap().clone()
9797 });
9798
9799 repo.update(cx, |repo, cx| {
9800 repo.stage_entries(vec![repo_path("a.txt")], cx)
9801 })
9802 .await
9803 .unwrap();
9804 repo.update(cx, |repo, cx| repo.stage_all(cx))
9805 .await
9806 .unwrap();
9807 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9808 .await
9809 .unwrap();
9810
9811 cx.run_until_parked();
9812
9813 assert_eq!(
9814 pending_ops_all
9815 .lock()
9816 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9817 .unwrap()
9818 .ops,
9819 vec![
9820 pending_op::PendingOp {
9821 id: 1u16.into(),
9822 git_status: pending_op::GitStatus::Staged,
9823 job_status: pending_op::JobStatus::Finished
9824 },
9825 pending_op::PendingOp {
9826 id: 2u16.into(),
9827 git_status: pending_op::GitStatus::Unstaged,
9828 job_status: pending_op::JobStatus::Finished
9829 },
9830 ],
9831 );
9832 assert_eq!(
9833 pending_ops_all
9834 .lock()
9835 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9836 .unwrap()
9837 .ops,
9838 vec![
9839 pending_op::PendingOp {
9840 id: 1u16.into(),
9841 git_status: pending_op::GitStatus::Staged,
9842 job_status: pending_op::JobStatus::Finished
9843 },
9844 pending_op::PendingOp {
9845 id: 2u16.into(),
9846 git_status: pending_op::GitStatus::Unstaged,
9847 job_status: pending_op::JobStatus::Finished
9848 },
9849 ],
9850 );
9851
9852 repo.update(cx, |repo, _cx| {
9853 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9854
9855 assert_eq!(
9856 git_statuses,
9857 [
9858 StatusEntry {
9859 repo_path: repo_path("a.txt"),
9860 status: FileStatus::Untracked,
9861 diff_stat: None,
9862 },
9863 StatusEntry {
9864 repo_path: repo_path("b.txt"),
9865 status: FileStatus::Untracked,
9866 diff_stat: None,
9867 },
9868 ]
9869 );
9870 });
9871}
9872
9873#[gpui::test]
9874async fn test_repository_subfolder_git_status(
9875 executor: gpui::BackgroundExecutor,
9876 cx: &mut gpui::TestAppContext,
9877) {
9878 init_test(cx);
9879
9880 let fs = FakeFs::new(executor);
9881 fs.insert_tree(
9882 path!("/root"),
9883 json!({
9884 "my-repo": {
9885 ".git": {},
9886 "a.txt": "a",
9887 "sub-folder-1": {
9888 "sub-folder-2": {
9889 "c.txt": "cc",
9890 "d": {
9891 "e.txt": "eee"
9892 }
9893 },
9894 }
9895 },
9896 }),
9897 )
9898 .await;
9899
9900 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9901 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9902
9903 fs.set_status_for_repo(
9904 path!("/root/my-repo/.git").as_ref(),
9905 &[(E_TXT, FileStatus::Untracked)],
9906 );
9907
9908 let project = Project::test(
9909 fs.clone(),
9910 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9911 cx,
9912 )
9913 .await;
9914
9915 project
9916 .update(cx, |project, cx| project.git_scans_complete(cx))
9917 .await;
9918 cx.run_until_parked();
9919
9920 let repository = project.read_with(cx, |project, cx| {
9921 project.repositories(cx).values().next().unwrap().clone()
9922 });
9923
9924 // Ensure that the git status is loaded correctly
9925 repository.read_with(cx, |repository, _cx| {
9926 assert_eq!(
9927 repository.work_directory_abs_path,
9928 Path::new(path!("/root/my-repo")).into()
9929 );
9930
9931 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9932 assert_eq!(
9933 repository
9934 .status_for_path(&repo_path(E_TXT))
9935 .unwrap()
9936 .status,
9937 FileStatus::Untracked
9938 );
9939 });
9940
9941 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9942 project
9943 .update(cx, |project, cx| project.git_scans_complete(cx))
9944 .await;
9945 cx.run_until_parked();
9946
9947 repository.read_with(cx, |repository, _cx| {
9948 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9949 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9950 });
9951}
9952
9953// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9954#[cfg(any())]
9955#[gpui::test]
9956async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9957 init_test(cx);
9958 cx.executor().allow_parking();
9959
9960 let root = TempTree::new(json!({
9961 "project": {
9962 "a.txt": "a",
9963 },
9964 }));
9965 let root_path = root.path();
9966
9967 let repo = git_init(&root_path.join("project"));
9968 git_add("a.txt", &repo);
9969 git_commit("init", &repo);
9970
9971 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9972
9973 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9974 tree.flush_fs_events(cx).await;
9975 project
9976 .update(cx, |project, cx| project.git_scans_complete(cx))
9977 .await;
9978 cx.executor().run_until_parked();
9979
9980 let repository = project.read_with(cx, |project, cx| {
9981 project.repositories(cx).values().next().unwrap().clone()
9982 });
9983
9984 git_branch("other-branch", &repo);
9985 git_checkout("refs/heads/other-branch", &repo);
9986 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9987 git_add("a.txt", &repo);
9988 git_commit("capitalize", &repo);
9989 let commit = repo
9990 .head()
9991 .expect("Failed to get HEAD")
9992 .peel_to_commit()
9993 .expect("HEAD is not a commit");
9994 git_checkout("refs/heads/main", &repo);
9995 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9996 git_add("a.txt", &repo);
9997 git_commit("improve letter", &repo);
9998 git_cherry_pick(&commit, &repo);
9999 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10000 .expect("No CHERRY_PICK_HEAD");
10001 pretty_assertions::assert_eq!(
10002 git_status(&repo),
10003 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10004 );
10005 tree.flush_fs_events(cx).await;
10006 project
10007 .update(cx, |project, cx| project.git_scans_complete(cx))
10008 .await;
10009 cx.executor().run_until_parked();
10010 let conflicts = repository.update(cx, |repository, _| {
10011 repository
10012 .merge_conflicts
10013 .iter()
10014 .cloned()
10015 .collect::<Vec<_>>()
10016 });
10017 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10018
10019 git_add("a.txt", &repo);
10020 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10021 git_commit("whatevs", &repo);
10022 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10023 .expect("Failed to remove CHERRY_PICK_HEAD");
10024 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10025 tree.flush_fs_events(cx).await;
10026 let conflicts = repository.update(cx, |repository, _| {
10027 repository
10028 .merge_conflicts
10029 .iter()
10030 .cloned()
10031 .collect::<Vec<_>>()
10032 });
10033 pretty_assertions::assert_eq!(conflicts, []);
10034}
10035
10036#[gpui::test]
10037async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10038 init_test(cx);
10039 let fs = FakeFs::new(cx.background_executor.clone());
10040 fs.insert_tree(
10041 path!("/root"),
10042 json!({
10043 ".git": {},
10044 ".gitignore": "*.txt\n",
10045 "a.xml": "<a></a>",
10046 "b.txt": "Some text"
10047 }),
10048 )
10049 .await;
10050
10051 fs.set_head_and_index_for_repo(
10052 path!("/root/.git").as_ref(),
10053 &[
10054 (".gitignore", "*.txt\n".into()),
10055 ("a.xml", "<a></a>".into()),
10056 ],
10057 );
10058
10059 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10060
10061 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10062 tree.flush_fs_events(cx).await;
10063 project
10064 .update(cx, |project, cx| project.git_scans_complete(cx))
10065 .await;
10066 cx.executor().run_until_parked();
10067
10068 let repository = project.read_with(cx, |project, cx| {
10069 project.repositories(cx).values().next().unwrap().clone()
10070 });
10071
10072 // One file is unmodified, the other is ignored.
10073 cx.read(|cx| {
10074 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10075 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10076 });
10077
10078 // Change the gitignore, and stage the newly non-ignored file.
10079 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10080 .await
10081 .unwrap();
10082 fs.set_index_for_repo(
10083 Path::new(path!("/root/.git")),
10084 &[
10085 (".gitignore", "*.txt\n".into()),
10086 ("a.xml", "<a></a>".into()),
10087 ("b.txt", "Some text".into()),
10088 ],
10089 );
10090
10091 cx.executor().run_until_parked();
10092 cx.read(|cx| {
10093 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10094 assert_entry_git_state(
10095 tree.read(cx),
10096 repository.read(cx),
10097 "b.txt",
10098 Some(StatusCode::Added),
10099 false,
10100 );
10101 });
10102}
10103
10104// NOTE:
10105// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10106// a directory which some program has already open.
10107// This is a limitation of the Windows.
10108// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10109// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10110#[gpui::test]
10111#[cfg_attr(target_os = "windows", ignore)]
10112async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10113 init_test(cx);
10114 cx.executor().allow_parking();
10115 let root = TempTree::new(json!({
10116 "projects": {
10117 "project1": {
10118 "a": "",
10119 "b": "",
10120 }
10121 },
10122
10123 }));
10124 let root_path = root.path();
10125
10126 let repo = git_init(&root_path.join("projects/project1"));
10127 git_add("a", &repo);
10128 git_commit("init", &repo);
10129 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10130
10131 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10132
10133 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10134 tree.flush_fs_events(cx).await;
10135 project
10136 .update(cx, |project, cx| project.git_scans_complete(cx))
10137 .await;
10138 cx.executor().run_until_parked();
10139
10140 let repository = project.read_with(cx, |project, cx| {
10141 project.repositories(cx).values().next().unwrap().clone()
10142 });
10143
10144 repository.read_with(cx, |repository, _| {
10145 assert_eq!(
10146 repository.work_directory_abs_path.as_ref(),
10147 root_path.join("projects/project1").as_path()
10148 );
10149 assert_eq!(
10150 repository
10151 .status_for_path(&repo_path("a"))
10152 .map(|entry| entry.status),
10153 Some(StatusCode::Modified.worktree()),
10154 );
10155 assert_eq!(
10156 repository
10157 .status_for_path(&repo_path("b"))
10158 .map(|entry| entry.status),
10159 Some(FileStatus::Untracked),
10160 );
10161 });
10162
10163 std::fs::rename(
10164 root_path.join("projects/project1"),
10165 root_path.join("projects/project2"),
10166 )
10167 .unwrap();
10168 tree.flush_fs_events(cx).await;
10169
10170 repository.read_with(cx, |repository, _| {
10171 assert_eq!(
10172 repository.work_directory_abs_path.as_ref(),
10173 root_path.join("projects/project2").as_path()
10174 );
10175 assert_eq!(
10176 repository.status_for_path(&repo_path("a")).unwrap().status,
10177 StatusCode::Modified.worktree(),
10178 );
10179 assert_eq!(
10180 repository.status_for_path(&repo_path("b")).unwrap().status,
10181 FileStatus::Untracked,
10182 );
10183 });
10184}
10185
10186// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10187// you can't rename a directory which some program has already open. This is a
10188// limitation of the Windows. See:
10189// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10190// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10191#[gpui::test]
10192#[cfg_attr(target_os = "windows", ignore)]
10193async fn test_file_status(cx: &mut gpui::TestAppContext) {
10194 init_test(cx);
10195 cx.executor().allow_parking();
10196 const IGNORE_RULE: &str = "**/target";
10197
10198 let root = TempTree::new(json!({
10199 "project": {
10200 "a.txt": "a",
10201 "b.txt": "bb",
10202 "c": {
10203 "d": {
10204 "e.txt": "eee"
10205 }
10206 },
10207 "f.txt": "ffff",
10208 "target": {
10209 "build_file": "???"
10210 },
10211 ".gitignore": IGNORE_RULE
10212 },
10213
10214 }));
10215 let root_path = root.path();
10216
10217 const A_TXT: &str = "a.txt";
10218 const B_TXT: &str = "b.txt";
10219 const E_TXT: &str = "c/d/e.txt";
10220 const F_TXT: &str = "f.txt";
10221 const DOTGITIGNORE: &str = ".gitignore";
10222 const BUILD_FILE: &str = "target/build_file";
10223
10224 // Set up git repository before creating the worktree.
10225 let work_dir = root.path().join("project");
10226 let mut repo = git_init(work_dir.as_path());
10227 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10228 git_add(A_TXT, &repo);
10229 git_add(E_TXT, &repo);
10230 git_add(DOTGITIGNORE, &repo);
10231 git_commit("Initial commit", &repo);
10232
10233 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10234
10235 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10236 tree.flush_fs_events(cx).await;
10237 project
10238 .update(cx, |project, cx| project.git_scans_complete(cx))
10239 .await;
10240 cx.executor().run_until_parked();
10241
10242 let repository = project.read_with(cx, |project, cx| {
10243 project.repositories(cx).values().next().unwrap().clone()
10244 });
10245
10246 // Check that the right git state is observed on startup
10247 repository.read_with(cx, |repository, _cx| {
10248 assert_eq!(
10249 repository.work_directory_abs_path.as_ref(),
10250 root_path.join("project").as_path()
10251 );
10252
10253 assert_eq!(
10254 repository
10255 .status_for_path(&repo_path(B_TXT))
10256 .unwrap()
10257 .status,
10258 FileStatus::Untracked,
10259 );
10260 assert_eq!(
10261 repository
10262 .status_for_path(&repo_path(F_TXT))
10263 .unwrap()
10264 .status,
10265 FileStatus::Untracked,
10266 );
10267 });
10268
10269 // Modify a file in the working copy.
10270 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10271 tree.flush_fs_events(cx).await;
10272 project
10273 .update(cx, |project, cx| project.git_scans_complete(cx))
10274 .await;
10275 cx.executor().run_until_parked();
10276
10277 // The worktree detects that the file's git status has changed.
10278 repository.read_with(cx, |repository, _| {
10279 assert_eq!(
10280 repository
10281 .status_for_path(&repo_path(A_TXT))
10282 .unwrap()
10283 .status,
10284 StatusCode::Modified.worktree(),
10285 );
10286 });
10287
10288 // Create a commit in the git repository.
10289 git_add(A_TXT, &repo);
10290 git_add(B_TXT, &repo);
10291 git_commit("Committing modified and added", &repo);
10292 tree.flush_fs_events(cx).await;
10293 project
10294 .update(cx, |project, cx| project.git_scans_complete(cx))
10295 .await;
10296 cx.executor().run_until_parked();
10297
10298 // The worktree detects that the files' git status have changed.
10299 repository.read_with(cx, |repository, _cx| {
10300 assert_eq!(
10301 repository
10302 .status_for_path(&repo_path(F_TXT))
10303 .unwrap()
10304 .status,
10305 FileStatus::Untracked,
10306 );
10307 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10308 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10309 });
10310
10311 // Modify files in the working copy and perform git operations on other files.
10312 git_reset(0, &repo);
10313 git_remove_index(Path::new(B_TXT), &repo);
10314 git_stash(&mut repo);
10315 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10316 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10317 tree.flush_fs_events(cx).await;
10318 project
10319 .update(cx, |project, cx| project.git_scans_complete(cx))
10320 .await;
10321 cx.executor().run_until_parked();
10322
10323 // Check that more complex repo changes are tracked
10324 repository.read_with(cx, |repository, _cx| {
10325 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10326 assert_eq!(
10327 repository
10328 .status_for_path(&repo_path(B_TXT))
10329 .unwrap()
10330 .status,
10331 FileStatus::Untracked,
10332 );
10333 assert_eq!(
10334 repository
10335 .status_for_path(&repo_path(E_TXT))
10336 .unwrap()
10337 .status,
10338 StatusCode::Modified.worktree(),
10339 );
10340 });
10341
10342 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10343 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10344 std::fs::write(
10345 work_dir.join(DOTGITIGNORE),
10346 [IGNORE_RULE, "f.txt"].join("\n"),
10347 )
10348 .unwrap();
10349
10350 git_add(Path::new(DOTGITIGNORE), &repo);
10351 git_commit("Committing modified git ignore", &repo);
10352
10353 tree.flush_fs_events(cx).await;
10354 cx.executor().run_until_parked();
10355
10356 let mut renamed_dir_name = "first_directory/second_directory";
10357 const RENAMED_FILE: &str = "rf.txt";
10358
10359 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10360 std::fs::write(
10361 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10362 "new-contents",
10363 )
10364 .unwrap();
10365
10366 tree.flush_fs_events(cx).await;
10367 project
10368 .update(cx, |project, cx| project.git_scans_complete(cx))
10369 .await;
10370 cx.executor().run_until_parked();
10371
10372 repository.read_with(cx, |repository, _cx| {
10373 assert_eq!(
10374 repository
10375 .status_for_path(&RepoPath::from_rel_path(
10376 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10377 ))
10378 .unwrap()
10379 .status,
10380 FileStatus::Untracked,
10381 );
10382 });
10383
10384 renamed_dir_name = "new_first_directory/second_directory";
10385
10386 std::fs::rename(
10387 work_dir.join("first_directory"),
10388 work_dir.join("new_first_directory"),
10389 )
10390 .unwrap();
10391
10392 tree.flush_fs_events(cx).await;
10393 project
10394 .update(cx, |project, cx| project.git_scans_complete(cx))
10395 .await;
10396 cx.executor().run_until_parked();
10397
10398 repository.read_with(cx, |repository, _cx| {
10399 assert_eq!(
10400 repository
10401 .status_for_path(&RepoPath::from_rel_path(
10402 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10403 ))
10404 .unwrap()
10405 .status,
10406 FileStatus::Untracked,
10407 );
10408 });
10409}
10410
10411#[gpui::test]
10412#[ignore]
10413async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10414 init_test(cx);
10415 cx.executor().allow_parking();
10416
10417 const IGNORE_RULE: &str = "**/target";
10418
10419 let root = TempTree::new(json!({
10420 "project": {
10421 "src": {
10422 "main.rs": "fn main() {}"
10423 },
10424 "target": {
10425 "debug": {
10426 "important_text.txt": "important text",
10427 },
10428 },
10429 ".gitignore": IGNORE_RULE
10430 },
10431
10432 }));
10433 let root_path = root.path();
10434
10435 // Set up git repository before creating the worktree.
10436 let work_dir = root.path().join("project");
10437 let repo = git_init(work_dir.as_path());
10438 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10439 git_add("src/main.rs", &repo);
10440 git_add(".gitignore", &repo);
10441 git_commit("Initial commit", &repo);
10442
10443 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10444 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10445 let project_events = Arc::new(Mutex::new(Vec::new()));
10446 project.update(cx, |project, cx| {
10447 let repo_events = repository_updates.clone();
10448 cx.subscribe(project.git_store(), move |_, _, e, _| {
10449 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10450 repo_events.lock().push(e.clone());
10451 }
10452 })
10453 .detach();
10454 let project_events = project_events.clone();
10455 cx.subscribe_self(move |_, e, _| {
10456 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10457 project_events.lock().extend(
10458 updates
10459 .iter()
10460 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10461 .filter(|(path, _)| path != "fs-event-sentinel"),
10462 );
10463 }
10464 })
10465 .detach();
10466 });
10467
10468 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10469 tree.flush_fs_events(cx).await;
10470 tree.update(cx, |tree, cx| {
10471 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10472 })
10473 .await
10474 .unwrap();
10475 tree.update(cx, |tree, _| {
10476 assert_eq!(
10477 tree.entries(true, 0)
10478 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10479 .collect::<Vec<_>>(),
10480 vec![
10481 (rel_path(""), false),
10482 (rel_path("project/"), false),
10483 (rel_path("project/.gitignore"), false),
10484 (rel_path("project/src"), false),
10485 (rel_path("project/src/main.rs"), false),
10486 (rel_path("project/target"), true),
10487 (rel_path("project/target/debug"), true),
10488 (rel_path("project/target/debug/important_text.txt"), true),
10489 ]
10490 );
10491 });
10492
10493 assert_eq!(
10494 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10495 vec![RepositoryEvent::StatusesChanged,],
10496 "Initial worktree scan should produce a repo update event"
10497 );
10498 assert_eq!(
10499 project_events.lock().drain(..).collect::<Vec<_>>(),
10500 vec![
10501 ("project/target".to_string(), PathChange::Loaded),
10502 ("project/target/debug".to_string(), PathChange::Loaded),
10503 (
10504 "project/target/debug/important_text.txt".to_string(),
10505 PathChange::Loaded
10506 ),
10507 ],
10508 "Initial project changes should show that all not-ignored and all opened files are loaded"
10509 );
10510
10511 let deps_dir = work_dir.join("target").join("debug").join("deps");
10512 std::fs::create_dir_all(&deps_dir).unwrap();
10513 tree.flush_fs_events(cx).await;
10514 project
10515 .update(cx, |project, cx| project.git_scans_complete(cx))
10516 .await;
10517 cx.executor().run_until_parked();
10518 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10519 tree.flush_fs_events(cx).await;
10520 project
10521 .update(cx, |project, cx| project.git_scans_complete(cx))
10522 .await;
10523 cx.executor().run_until_parked();
10524 std::fs::remove_dir_all(&deps_dir).unwrap();
10525 tree.flush_fs_events(cx).await;
10526 project
10527 .update(cx, |project, cx| project.git_scans_complete(cx))
10528 .await;
10529 cx.executor().run_until_parked();
10530
10531 tree.update(cx, |tree, _| {
10532 assert_eq!(
10533 tree.entries(true, 0)
10534 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10535 .collect::<Vec<_>>(),
10536 vec![
10537 (rel_path(""), false),
10538 (rel_path("project/"), false),
10539 (rel_path("project/.gitignore"), false),
10540 (rel_path("project/src"), false),
10541 (rel_path("project/src/main.rs"), false),
10542 (rel_path("project/target"), true),
10543 (rel_path("project/target/debug"), true),
10544 (rel_path("project/target/debug/important_text.txt"), true),
10545 ],
10546 "No stray temp files should be left after the flycheck changes"
10547 );
10548 });
10549
10550 assert_eq!(
10551 repository_updates
10552 .lock()
10553 .iter()
10554 .cloned()
10555 .collect::<Vec<_>>(),
10556 Vec::new(),
10557 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10558 );
10559 assert_eq!(
10560 project_events.lock().as_slice(),
10561 vec![
10562 ("project/target/debug/deps".to_string(), PathChange::Added),
10563 ("project/target/debug/deps".to_string(), PathChange::Removed),
10564 ],
10565 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10566 No updates for more nested directories should happen as those are ignored",
10567 );
10568}
10569
10570// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10571// to different timings/ordering of events.
10572#[ignore]
10573#[gpui::test]
10574async fn test_odd_events_for_ignored_dirs(
10575 executor: BackgroundExecutor,
10576 cx: &mut gpui::TestAppContext,
10577) {
10578 init_test(cx);
10579 let fs = FakeFs::new(executor);
10580 fs.insert_tree(
10581 path!("/root"),
10582 json!({
10583 ".git": {},
10584 ".gitignore": "**/target/",
10585 "src": {
10586 "main.rs": "fn main() {}",
10587 },
10588 "target": {
10589 "debug": {
10590 "foo.txt": "foo",
10591 "deps": {}
10592 }
10593 }
10594 }),
10595 )
10596 .await;
10597 fs.set_head_and_index_for_repo(
10598 path!("/root/.git").as_ref(),
10599 &[
10600 (".gitignore", "**/target/".into()),
10601 ("src/main.rs", "fn main() {}".into()),
10602 ],
10603 );
10604
10605 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10606 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10607 let project_events = Arc::new(Mutex::new(Vec::new()));
10608 project.update(cx, |project, cx| {
10609 let repository_updates = repository_updates.clone();
10610 cx.subscribe(project.git_store(), move |_, _, e, _| {
10611 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10612 repository_updates.lock().push(e.clone());
10613 }
10614 })
10615 .detach();
10616 let project_events = project_events.clone();
10617 cx.subscribe_self(move |_, e, _| {
10618 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10619 project_events.lock().extend(
10620 updates
10621 .iter()
10622 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10623 .filter(|(path, _)| path != "fs-event-sentinel"),
10624 );
10625 }
10626 })
10627 .detach();
10628 });
10629
10630 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10631 tree.update(cx, |tree, cx| {
10632 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10633 })
10634 .await
10635 .unwrap();
10636 tree.flush_fs_events(cx).await;
10637 project
10638 .update(cx, |project, cx| project.git_scans_complete(cx))
10639 .await;
10640 cx.run_until_parked();
10641 tree.update(cx, |tree, _| {
10642 assert_eq!(
10643 tree.entries(true, 0)
10644 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10645 .collect::<Vec<_>>(),
10646 vec![
10647 (rel_path(""), false),
10648 (rel_path(".gitignore"), false),
10649 (rel_path("src"), false),
10650 (rel_path("src/main.rs"), false),
10651 (rel_path("target"), true),
10652 (rel_path("target/debug"), true),
10653 (rel_path("target/debug/deps"), true),
10654 (rel_path("target/debug/foo.txt"), true),
10655 ]
10656 );
10657 });
10658
10659 assert_eq!(
10660 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10661 vec![
10662 RepositoryEvent::BranchChanged,
10663 RepositoryEvent::StatusesChanged,
10664 RepositoryEvent::StatusesChanged,
10665 ],
10666 "Initial worktree scan should produce a repo update event"
10667 );
10668 assert_eq!(
10669 project_events.lock().drain(..).collect::<Vec<_>>(),
10670 vec![
10671 ("target".to_string(), PathChange::Loaded),
10672 ("target/debug".to_string(), PathChange::Loaded),
10673 ("target/debug/deps".to_string(), PathChange::Loaded),
10674 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10675 ],
10676 "All non-ignored entries and all opened firs should be getting a project event",
10677 );
10678
10679 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10680 // This may happen multiple times during a single flycheck, but once is enough for testing.
10681 fs.emit_fs_event("/root/target/debug/deps", None);
10682 tree.flush_fs_events(cx).await;
10683 project
10684 .update(cx, |project, cx| project.git_scans_complete(cx))
10685 .await;
10686 cx.executor().run_until_parked();
10687
10688 assert_eq!(
10689 repository_updates
10690 .lock()
10691 .iter()
10692 .cloned()
10693 .collect::<Vec<_>>(),
10694 Vec::new(),
10695 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10696 );
10697 assert_eq!(
10698 project_events.lock().as_slice(),
10699 Vec::new(),
10700 "No further project events should happen, as only ignored dirs received FS events",
10701 );
10702}
10703
10704#[gpui::test]
10705async fn test_repos_in_invisible_worktrees(
10706 executor: BackgroundExecutor,
10707 cx: &mut gpui::TestAppContext,
10708) {
10709 init_test(cx);
10710 let fs = FakeFs::new(executor);
10711 fs.insert_tree(
10712 path!("/root"),
10713 json!({
10714 "dir1": {
10715 ".git": {},
10716 "dep1": {
10717 ".git": {},
10718 "src": {
10719 "a.txt": "",
10720 },
10721 },
10722 "b.txt": "",
10723 },
10724 }),
10725 )
10726 .await;
10727
10728 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10729 let _visible_worktree =
10730 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10731 project
10732 .update(cx, |project, cx| project.git_scans_complete(cx))
10733 .await;
10734
10735 let repos = project.read_with(cx, |project, cx| {
10736 project
10737 .repositories(cx)
10738 .values()
10739 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10740 .collect::<Vec<_>>()
10741 });
10742 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10743
10744 let (_invisible_worktree, _) = project
10745 .update(cx, |project, cx| {
10746 project.worktree_store().update(cx, |worktree_store, cx| {
10747 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10748 })
10749 })
10750 .await
10751 .expect("failed to create worktree");
10752 project
10753 .update(cx, |project, cx| project.git_scans_complete(cx))
10754 .await;
10755
10756 let repos = project.read_with(cx, |project, cx| {
10757 project
10758 .repositories(cx)
10759 .values()
10760 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10761 .collect::<Vec<_>>()
10762 });
10763 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10764}
10765
10766#[gpui::test(iterations = 10)]
10767async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10768 init_test(cx);
10769 cx.update(|cx| {
10770 cx.update_global::<SettingsStore, _>(|store, cx| {
10771 store.update_user_settings(cx, |settings| {
10772 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10773 });
10774 });
10775 });
10776 let fs = FakeFs::new(cx.background_executor.clone());
10777 fs.insert_tree(
10778 path!("/root"),
10779 json!({
10780 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10781 "tree": {
10782 ".git": {},
10783 ".gitignore": "ignored-dir\n",
10784 "tracked-dir": {
10785 "tracked-file1": "",
10786 "ancestor-ignored-file1": "",
10787 },
10788 "ignored-dir": {
10789 "ignored-file1": ""
10790 }
10791 }
10792 }),
10793 )
10794 .await;
10795 fs.set_head_and_index_for_repo(
10796 path!("/root/tree/.git").as_ref(),
10797 &[
10798 (".gitignore", "ignored-dir\n".into()),
10799 ("tracked-dir/tracked-file1", "".into()),
10800 ],
10801 );
10802
10803 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10804
10805 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10806 tree.flush_fs_events(cx).await;
10807 project
10808 .update(cx, |project, cx| project.git_scans_complete(cx))
10809 .await;
10810 cx.executor().run_until_parked();
10811
10812 let repository = project.read_with(cx, |project, cx| {
10813 project.repositories(cx).values().next().unwrap().clone()
10814 });
10815
10816 tree.read_with(cx, |tree, _| {
10817 tree.as_local()
10818 .unwrap()
10819 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10820 })
10821 .recv()
10822 .await;
10823
10824 cx.read(|cx| {
10825 assert_entry_git_state(
10826 tree.read(cx),
10827 repository.read(cx),
10828 "tracked-dir/tracked-file1",
10829 None,
10830 false,
10831 );
10832 assert_entry_git_state(
10833 tree.read(cx),
10834 repository.read(cx),
10835 "tracked-dir/ancestor-ignored-file1",
10836 None,
10837 false,
10838 );
10839 assert_entry_git_state(
10840 tree.read(cx),
10841 repository.read(cx),
10842 "ignored-dir/ignored-file1",
10843 None,
10844 true,
10845 );
10846 });
10847
10848 fs.create_file(
10849 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10850 Default::default(),
10851 )
10852 .await
10853 .unwrap();
10854 fs.set_index_for_repo(
10855 path!("/root/tree/.git").as_ref(),
10856 &[
10857 (".gitignore", "ignored-dir\n".into()),
10858 ("tracked-dir/tracked-file1", "".into()),
10859 ("tracked-dir/tracked-file2", "".into()),
10860 ],
10861 );
10862 fs.create_file(
10863 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10864 Default::default(),
10865 )
10866 .await
10867 .unwrap();
10868 fs.create_file(
10869 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10870 Default::default(),
10871 )
10872 .await
10873 .unwrap();
10874
10875 cx.executor().run_until_parked();
10876 cx.read(|cx| {
10877 assert_entry_git_state(
10878 tree.read(cx),
10879 repository.read(cx),
10880 "tracked-dir/tracked-file2",
10881 Some(StatusCode::Added),
10882 false,
10883 );
10884 assert_entry_git_state(
10885 tree.read(cx),
10886 repository.read(cx),
10887 "tracked-dir/ancestor-ignored-file2",
10888 None,
10889 false,
10890 );
10891 assert_entry_git_state(
10892 tree.read(cx),
10893 repository.read(cx),
10894 "ignored-dir/ignored-file2",
10895 None,
10896 true,
10897 );
10898 assert!(
10899 tree.read(cx)
10900 .entry_for_path(&rel_path(".git"))
10901 .unwrap()
10902 .is_ignored
10903 );
10904 });
10905}
10906
10907#[gpui::test]
10908async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10909 init_test(cx);
10910
10911 let fs = FakeFs::new(cx.executor());
10912 fs.insert_tree(
10913 path!("/project"),
10914 json!({
10915 ".git": {
10916 "worktrees": {
10917 "some-worktree": {
10918 "commondir": "../..\n",
10919 // For is_git_dir
10920 "HEAD": "",
10921 "config": ""
10922 }
10923 },
10924 "modules": {
10925 "subdir": {
10926 "some-submodule": {
10927 // For is_git_dir
10928 "HEAD": "",
10929 "config": "",
10930 }
10931 }
10932 }
10933 },
10934 "src": {
10935 "a.txt": "A",
10936 },
10937 "some-worktree": {
10938 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10939 "src": {
10940 "b.txt": "B",
10941 }
10942 },
10943 "subdir": {
10944 "some-submodule": {
10945 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10946 "c.txt": "C",
10947 }
10948 }
10949 }),
10950 )
10951 .await;
10952
10953 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10954 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10955 scan_complete.await;
10956
10957 let mut repositories = project.update(cx, |project, cx| {
10958 project
10959 .repositories(cx)
10960 .values()
10961 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10962 .collect::<Vec<_>>()
10963 });
10964 repositories.sort();
10965 pretty_assertions::assert_eq!(
10966 repositories,
10967 [
10968 Path::new(path!("/project")).into(),
10969 Path::new(path!("/project/some-worktree")).into(),
10970 Path::new(path!("/project/subdir/some-submodule")).into(),
10971 ]
10972 );
10973
10974 // Generate a git-related event for the worktree and check that it's refreshed.
10975 fs.with_git_state(
10976 path!("/project/some-worktree/.git").as_ref(),
10977 true,
10978 |state| {
10979 state
10980 .head_contents
10981 .insert(repo_path("src/b.txt"), "b".to_owned());
10982 state
10983 .index_contents
10984 .insert(repo_path("src/b.txt"), "b".to_owned());
10985 },
10986 )
10987 .unwrap();
10988 cx.run_until_parked();
10989
10990 let buffer = project
10991 .update(cx, |project, cx| {
10992 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10993 })
10994 .await
10995 .unwrap();
10996 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10997 let (repo, _) = project
10998 .git_store()
10999 .read(cx)
11000 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11001 .unwrap();
11002 pretty_assertions::assert_eq!(
11003 repo.read(cx).work_directory_abs_path,
11004 Path::new(path!("/project/some-worktree")).into(),
11005 );
11006 let barrier = repo.update(cx, |repo, _| repo.barrier());
11007 (repo.clone(), barrier)
11008 });
11009 barrier.await.unwrap();
11010 worktree_repo.update(cx, |repo, _| {
11011 pretty_assertions::assert_eq!(
11012 repo.status_for_path(&repo_path("src/b.txt"))
11013 .unwrap()
11014 .status,
11015 StatusCode::Modified.worktree(),
11016 );
11017 });
11018
11019 // The same for the submodule.
11020 fs.with_git_state(
11021 path!("/project/subdir/some-submodule/.git").as_ref(),
11022 true,
11023 |state| {
11024 state
11025 .head_contents
11026 .insert(repo_path("c.txt"), "c".to_owned());
11027 state
11028 .index_contents
11029 .insert(repo_path("c.txt"), "c".to_owned());
11030 },
11031 )
11032 .unwrap();
11033 cx.run_until_parked();
11034
11035 let buffer = project
11036 .update(cx, |project, cx| {
11037 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11038 })
11039 .await
11040 .unwrap();
11041 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11042 let (repo, _) = project
11043 .git_store()
11044 .read(cx)
11045 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11046 .unwrap();
11047 pretty_assertions::assert_eq!(
11048 repo.read(cx).work_directory_abs_path,
11049 Path::new(path!("/project/subdir/some-submodule")).into(),
11050 );
11051 let barrier = repo.update(cx, |repo, _| repo.barrier());
11052 (repo.clone(), barrier)
11053 });
11054 barrier.await.unwrap();
11055 submodule_repo.update(cx, |repo, _| {
11056 pretty_assertions::assert_eq!(
11057 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11058 StatusCode::Modified.worktree(),
11059 );
11060 });
11061}
11062
11063#[gpui::test]
11064async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11065 init_test(cx);
11066 let fs = FakeFs::new(cx.background_executor.clone());
11067 fs.insert_tree(
11068 path!("/root"),
11069 json!({
11070 "project": {
11071 ".git": {},
11072 "child1": {
11073 "a.txt": "A",
11074 },
11075 "child2": {
11076 "b.txt": "B",
11077 }
11078 }
11079 }),
11080 )
11081 .await;
11082
11083 let project = Project::test(
11084 fs.clone(),
11085 [
11086 path!("/root/project/child1").as_ref(),
11087 path!("/root/project/child2").as_ref(),
11088 ],
11089 cx,
11090 )
11091 .await;
11092
11093 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11094 tree.flush_fs_events(cx).await;
11095 project
11096 .update(cx, |project, cx| project.git_scans_complete(cx))
11097 .await;
11098 cx.executor().run_until_parked();
11099
11100 let repos = project.read_with(cx, |project, cx| {
11101 project
11102 .repositories(cx)
11103 .values()
11104 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11105 .collect::<Vec<_>>()
11106 });
11107 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11108}
11109
11110#[gpui::test]
11111async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11112 init_test(cx);
11113
11114 let file_1_committed = String::from(r#"file_1_committed"#);
11115 let file_1_staged = String::from(r#"file_1_staged"#);
11116 let file_2_committed = String::from(r#"file_2_committed"#);
11117 let file_2_staged = String::from(r#"file_2_staged"#);
11118 let buffer_contents = String::from(r#"buffer"#);
11119
11120 let fs = FakeFs::new(cx.background_executor.clone());
11121 fs.insert_tree(
11122 path!("/dir"),
11123 json!({
11124 ".git": {},
11125 "src": {
11126 "file_1.rs": file_1_committed.clone(),
11127 "file_2.rs": file_2_committed.clone(),
11128 }
11129 }),
11130 )
11131 .await;
11132
11133 fs.set_head_for_repo(
11134 path!("/dir/.git").as_ref(),
11135 &[
11136 ("src/file_1.rs", file_1_committed.clone()),
11137 ("src/file_2.rs", file_2_committed.clone()),
11138 ],
11139 "deadbeef",
11140 );
11141 fs.set_index_for_repo(
11142 path!("/dir/.git").as_ref(),
11143 &[
11144 ("src/file_1.rs", file_1_staged.clone()),
11145 ("src/file_2.rs", file_2_staged.clone()),
11146 ],
11147 );
11148
11149 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11150
11151 let buffer = project
11152 .update(cx, |project, cx| {
11153 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11154 })
11155 .await
11156 .unwrap();
11157
11158 buffer.update(cx, |buffer, cx| {
11159 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11160 });
11161
11162 let unstaged_diff = project
11163 .update(cx, |project, cx| {
11164 project.open_unstaged_diff(buffer.clone(), cx)
11165 })
11166 .await
11167 .unwrap();
11168
11169 cx.run_until_parked();
11170
11171 unstaged_diff.update(cx, |unstaged_diff, cx| {
11172 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11173 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11174 });
11175
11176 // Save the buffer as `file_2.rs`, which should trigger the
11177 // `BufferChangedFilePath` event.
11178 project
11179 .update(cx, |project, cx| {
11180 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11181 let path = ProjectPath {
11182 worktree_id,
11183 path: rel_path("src/file_2.rs").into(),
11184 };
11185 project.save_buffer_as(buffer.clone(), path, cx)
11186 })
11187 .await
11188 .unwrap();
11189
11190 cx.run_until_parked();
11191
11192 // Verify that the diff bases have been updated to file_2's contents due to
11193 // the `BufferChangedFilePath` event being handled.
11194 unstaged_diff.update(cx, |unstaged_diff, cx| {
11195 let snapshot = buffer.read(cx).snapshot();
11196 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11197 assert_eq!(
11198 base_text, file_2_staged,
11199 "Diff bases should be automatically updated to file_2 staged content"
11200 );
11201
11202 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11203 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11204 });
11205
11206 let uncommitted_diff = project
11207 .update(cx, |project, cx| {
11208 project.open_uncommitted_diff(buffer.clone(), cx)
11209 })
11210 .await
11211 .unwrap();
11212
11213 cx.run_until_parked();
11214
11215 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11216 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11217 assert_eq!(
11218 base_text, file_2_committed,
11219 "Uncommitted diff should compare against file_2 committed content"
11220 );
11221 });
11222}
11223
11224async fn search(
11225 project: &Entity<Project>,
11226 query: SearchQuery,
11227 cx: &mut gpui::TestAppContext,
11228) -> Result<HashMap<String, Vec<Range<usize>>>> {
11229 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11230 let mut results = HashMap::default();
11231 while let Ok(search_result) = search_rx.rx.recv().await {
11232 match search_result {
11233 SearchResult::Buffer { buffer, ranges } => {
11234 results.entry(buffer).or_insert(ranges);
11235 }
11236 SearchResult::LimitReached => {}
11237 }
11238 }
11239 Ok(results
11240 .into_iter()
11241 .map(|(buffer, ranges)| {
11242 buffer.update(cx, |buffer, cx| {
11243 let path = buffer
11244 .file()
11245 .unwrap()
11246 .full_path(cx)
11247 .to_string_lossy()
11248 .to_string();
11249 let ranges = ranges
11250 .into_iter()
11251 .map(|range| range.to_offset(buffer))
11252 .collect::<Vec<_>>();
11253 (path, ranges)
11254 })
11255 })
11256 .collect())
11257}
11258
11259#[gpui::test]
11260async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11261 init_test(cx);
11262
11263 let fs = FakeFs::new(cx.executor());
11264
11265 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11266 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11267 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11268 fs.insert_tree(path!("/dir"), json!({})).await;
11269 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11270
11271 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11272
11273 let buffer = project
11274 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11275 .await
11276 .unwrap();
11277
11278 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11279 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11280 });
11281 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11282 assert_eq!(initial_text, "Hi");
11283 assert!(!initial_dirty);
11284
11285 let reload_receiver = buffer.update(cx, |buffer, cx| {
11286 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11287 });
11288 cx.executor().run_until_parked();
11289
11290 // Wait for reload to complete
11291 let _ = reload_receiver.await;
11292
11293 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11294 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11295 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11296 });
11297 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11298 assert_eq!(reloaded_text, "楈");
11299 assert!(!reloaded_dirty);
11300
11301 // Undo the reload
11302 buffer.update(cx, |buffer, cx| {
11303 buffer.undo(cx);
11304 });
11305
11306 buffer.read_with(cx, |buffer, _| {
11307 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11308 assert_eq!(buffer.text(), "Hi");
11309 assert!(!buffer.is_dirty());
11310 });
11311
11312 buffer.update(cx, |buffer, cx| {
11313 buffer.redo(cx);
11314 });
11315
11316 buffer.read_with(cx, |buffer, _| {
11317 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11318 assert_ne!(buffer.text(), "Hi");
11319 assert!(!buffer.is_dirty());
11320 });
11321}
11322
11323pub fn init_test(cx: &mut gpui::TestAppContext) {
11324 zlog::init_test();
11325
11326 cx.update(|cx| {
11327 let settings_store = SettingsStore::test(cx);
11328 cx.set_global(settings_store);
11329 release_channel::init(semver::Version::new(0, 0, 0), cx);
11330 });
11331}
11332
11333fn json_lang() -> Arc<Language> {
11334 Arc::new(Language::new(
11335 LanguageConfig {
11336 name: "JSON".into(),
11337 matcher: LanguageMatcher {
11338 path_suffixes: vec!["json".to_string()],
11339 ..Default::default()
11340 },
11341 ..Default::default()
11342 },
11343 None,
11344 ))
11345}
11346
11347fn js_lang() -> Arc<Language> {
11348 Arc::new(Language::new(
11349 LanguageConfig {
11350 name: "JavaScript".into(),
11351 matcher: LanguageMatcher {
11352 path_suffixes: vec!["js".to_string()],
11353 ..Default::default()
11354 },
11355 ..Default::default()
11356 },
11357 None,
11358 ))
11359}
11360
11361fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11362 struct PythonMootToolchainLister(Arc<FakeFs>);
11363 #[async_trait]
11364 impl ToolchainLister for PythonMootToolchainLister {
11365 async fn list(
11366 &self,
11367 worktree_root: PathBuf,
11368 subroot_relative_path: Arc<RelPath>,
11369 _: Option<HashMap<String, String>>,
11370 _: &dyn Fs,
11371 ) -> ToolchainList {
11372 // This lister will always return a path .venv directories within ancestors
11373 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11374 let mut toolchains = vec![];
11375 for ancestor in ancestors {
11376 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11377 if self.0.is_dir(&venv_path).await {
11378 toolchains.push(Toolchain {
11379 name: SharedString::new_static("Python Venv"),
11380 path: venv_path.to_string_lossy().into_owned().into(),
11381 language_name: LanguageName(SharedString::new_static("Python")),
11382 as_json: serde_json::Value::Null,
11383 })
11384 }
11385 }
11386 ToolchainList {
11387 toolchains,
11388 ..Default::default()
11389 }
11390 }
11391 async fn resolve(
11392 &self,
11393 _: PathBuf,
11394 _: Option<HashMap<String, String>>,
11395 _: &dyn Fs,
11396 ) -> anyhow::Result<Toolchain> {
11397 Err(anyhow::anyhow!("Not implemented"))
11398 }
11399 fn meta(&self) -> ToolchainMetadata {
11400 ToolchainMetadata {
11401 term: SharedString::new_static("Virtual Environment"),
11402 new_toolchain_placeholder: SharedString::new_static(
11403 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11404 ),
11405 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11406 }
11407 }
11408 fn activation_script(
11409 &self,
11410 _: &Toolchain,
11411 _: ShellKind,
11412 _: &gpui::App,
11413 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11414 Box::pin(async { vec![] })
11415 }
11416 }
11417 Arc::new(
11418 Language::new(
11419 LanguageConfig {
11420 name: "Python".into(),
11421 matcher: LanguageMatcher {
11422 path_suffixes: vec!["py".to_string()],
11423 ..Default::default()
11424 },
11425 ..Default::default()
11426 },
11427 None, // We're not testing Python parsing with this language.
11428 )
11429 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11430 "pyproject.toml",
11431 ))))
11432 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11433 )
11434}
11435
11436fn typescript_lang() -> Arc<Language> {
11437 Arc::new(Language::new(
11438 LanguageConfig {
11439 name: "TypeScript".into(),
11440 matcher: LanguageMatcher {
11441 path_suffixes: vec!["ts".to_string()],
11442 ..Default::default()
11443 },
11444 ..Default::default()
11445 },
11446 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11447 ))
11448}
11449
11450fn tsx_lang() -> Arc<Language> {
11451 Arc::new(Language::new(
11452 LanguageConfig {
11453 name: "tsx".into(),
11454 matcher: LanguageMatcher {
11455 path_suffixes: vec!["tsx".to_string()],
11456 ..Default::default()
11457 },
11458 ..Default::default()
11459 },
11460 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11461 ))
11462}
11463
11464fn get_all_tasks(
11465 project: &Entity<Project>,
11466 task_contexts: Arc<TaskContexts>,
11467 cx: &mut App,
11468) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11469 let new_tasks = project.update(cx, |project, cx| {
11470 project.task_store().update(cx, |task_store, cx| {
11471 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11472 this.used_and_current_resolved_tasks(task_contexts, cx)
11473 })
11474 })
11475 });
11476
11477 cx.background_spawn(async move {
11478 let (mut old, new) = new_tasks.await;
11479 old.extend(new);
11480 old
11481 })
11482}
11483
11484#[track_caller]
11485fn assert_entry_git_state(
11486 tree: &Worktree,
11487 repository: &Repository,
11488 path: &str,
11489 index_status: Option<StatusCode>,
11490 is_ignored: bool,
11491) {
11492 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11493 let entry = tree
11494 .entry_for_path(&rel_path(path))
11495 .unwrap_or_else(|| panic!("entry {path} not found"));
11496 let status = repository
11497 .status_for_path(&repo_path(path))
11498 .map(|entry| entry.status);
11499 let expected = index_status.map(|index_status| {
11500 TrackedStatus {
11501 index_status,
11502 worktree_status: StatusCode::Unmodified,
11503 }
11504 .into()
11505 });
11506 assert_eq!(
11507 status, expected,
11508 "expected {path} to have git status: {expected:?}"
11509 );
11510 assert_eq!(
11511 entry.is_ignored, is_ignored,
11512 "expected {path} to have is_ignored: {is_ignored}"
11513 );
11514}
11515
11516#[track_caller]
11517fn git_init(path: &Path) -> git2::Repository {
11518 let mut init_opts = RepositoryInitOptions::new();
11519 init_opts.initial_head("main");
11520 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11521}
11522
11523#[track_caller]
11524fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11525 let path = path.as_ref();
11526 let mut index = repo.index().expect("Failed to get index");
11527 index.add_path(path).expect("Failed to add file");
11528 index.write().expect("Failed to write index");
11529}
11530
11531#[track_caller]
11532fn git_remove_index(path: &Path, repo: &git2::Repository) {
11533 let mut index = repo.index().expect("Failed to get index");
11534 index.remove_path(path).expect("Failed to add file");
11535 index.write().expect("Failed to write index");
11536}
11537
11538#[track_caller]
11539fn git_commit(msg: &'static str, repo: &git2::Repository) {
11540 use git2::Signature;
11541
11542 let signature = Signature::now("test", "test@zed.dev").unwrap();
11543 let oid = repo.index().unwrap().write_tree().unwrap();
11544 let tree = repo.find_tree(oid).unwrap();
11545 if let Ok(head) = repo.head() {
11546 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11547
11548 let parent_commit = parent_obj.as_commit().unwrap();
11549
11550 repo.commit(
11551 Some("HEAD"),
11552 &signature,
11553 &signature,
11554 msg,
11555 &tree,
11556 &[parent_commit],
11557 )
11558 .expect("Failed to commit with parent");
11559 } else {
11560 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11561 .expect("Failed to commit");
11562 }
11563}
11564
11565#[cfg(any())]
11566#[track_caller]
11567fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11568 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11569}
11570
11571#[track_caller]
11572fn git_stash(repo: &mut git2::Repository) {
11573 use git2::Signature;
11574
11575 let signature = Signature::now("test", "test@zed.dev").unwrap();
11576 repo.stash_save(&signature, "N/A", None)
11577 .expect("Failed to stash");
11578}
11579
11580#[track_caller]
11581fn git_reset(offset: usize, repo: &git2::Repository) {
11582 let head = repo.head().expect("Couldn't get repo head");
11583 let object = head.peel(git2::ObjectType::Commit).unwrap();
11584 let commit = object.as_commit().unwrap();
11585 let new_head = commit
11586 .parents()
11587 .inspect(|parnet| {
11588 parnet.message();
11589 })
11590 .nth(offset)
11591 .expect("Not enough history");
11592 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11593 .expect("Could not reset");
11594}
11595
11596#[cfg(any())]
11597#[track_caller]
11598fn git_branch(name: &str, repo: &git2::Repository) {
11599 let head = repo
11600 .head()
11601 .expect("Couldn't get repo head")
11602 .peel_to_commit()
11603 .expect("HEAD is not a commit");
11604 repo.branch(name, &head, false).expect("Failed to commit");
11605}
11606
11607#[cfg(any())]
11608#[track_caller]
11609fn git_checkout(name: &str, repo: &git2::Repository) {
11610 repo.set_head(name).expect("Failed to set head");
11611 repo.checkout_head(None).expect("Failed to check out head");
11612}
11613
11614#[cfg(any())]
11615#[track_caller]
11616fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11617 repo.statuses(None)
11618 .unwrap()
11619 .iter()
11620 .map(|status| (status.path().unwrap().to_string(), status.status()))
11621 .collect()
11622}
11623
11624#[gpui::test]
11625async fn test_find_project_path_abs(
11626 background_executor: BackgroundExecutor,
11627 cx: &mut gpui::TestAppContext,
11628) {
11629 // find_project_path should work with absolute paths
11630 init_test(cx);
11631
11632 let fs = FakeFs::new(background_executor);
11633 fs.insert_tree(
11634 path!("/root"),
11635 json!({
11636 "project1": {
11637 "file1.txt": "content1",
11638 "subdir": {
11639 "file2.txt": "content2"
11640 }
11641 },
11642 "project2": {
11643 "file3.txt": "content3"
11644 }
11645 }),
11646 )
11647 .await;
11648
11649 let project = Project::test(
11650 fs.clone(),
11651 [
11652 path!("/root/project1").as_ref(),
11653 path!("/root/project2").as_ref(),
11654 ],
11655 cx,
11656 )
11657 .await;
11658
11659 // Make sure the worktrees are fully initialized
11660 project
11661 .update(cx, |project, cx| project.git_scans_complete(cx))
11662 .await;
11663 cx.run_until_parked();
11664
11665 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11666 project.read_with(cx, |project, cx| {
11667 let worktrees: Vec<_> = project.worktrees(cx).collect();
11668 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11669 let id1 = worktrees[0].read(cx).id();
11670 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11671 let id2 = worktrees[1].read(cx).id();
11672 (abs_path1, id1, abs_path2, id2)
11673 });
11674
11675 project.update(cx, |project, cx| {
11676 let abs_path = project1_abs_path.join("file1.txt");
11677 let found_path = project.find_project_path(abs_path, cx).unwrap();
11678 assert_eq!(found_path.worktree_id, project1_id);
11679 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11680
11681 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11682 let found_path = project.find_project_path(abs_path, cx).unwrap();
11683 assert_eq!(found_path.worktree_id, project1_id);
11684 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11685
11686 let abs_path = project2_abs_path.join("file3.txt");
11687 let found_path = project.find_project_path(abs_path, cx).unwrap();
11688 assert_eq!(found_path.worktree_id, project2_id);
11689 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11690
11691 let abs_path = project1_abs_path.join("nonexistent.txt");
11692 let found_path = project.find_project_path(abs_path, cx);
11693 assert!(
11694 found_path.is_some(),
11695 "Should find project path for nonexistent file in worktree"
11696 );
11697
11698 // Test with an absolute path outside any worktree
11699 let abs_path = Path::new("/some/other/path");
11700 let found_path = project.find_project_path(abs_path, cx);
11701 assert!(
11702 found_path.is_none(),
11703 "Should not find project path for path outside any worktree"
11704 );
11705 });
11706}
11707
11708#[gpui::test]
11709async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11710 init_test(cx);
11711
11712 let fs = FakeFs::new(cx.executor());
11713 fs.insert_tree(
11714 path!("/root"),
11715 json!({
11716 "a": {
11717 ".git": {},
11718 "src": {
11719 "main.rs": "fn main() {}",
11720 }
11721 },
11722 "b": {
11723 ".git": {},
11724 "src": {
11725 "main.rs": "fn main() {}",
11726 },
11727 "script": {
11728 "run.sh": "#!/bin/bash"
11729 }
11730 }
11731 }),
11732 )
11733 .await;
11734
11735 let project = Project::test(
11736 fs.clone(),
11737 [
11738 path!("/root/a").as_ref(),
11739 path!("/root/b/script").as_ref(),
11740 path!("/root/b").as_ref(),
11741 ],
11742 cx,
11743 )
11744 .await;
11745 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11746 scan_complete.await;
11747
11748 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11749 assert_eq!(worktrees.len(), 3);
11750
11751 let worktree_id_by_abs_path = worktrees
11752 .into_iter()
11753 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11754 .collect::<HashMap<_, _>>();
11755 let worktree_id = worktree_id_by_abs_path
11756 .get(Path::new(path!("/root/b/script")))
11757 .unwrap();
11758
11759 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11760 assert_eq!(repos.len(), 2);
11761
11762 project.update(cx, |project, cx| {
11763 project.remove_worktree(*worktree_id, cx);
11764 });
11765 cx.run_until_parked();
11766
11767 let mut repo_paths = project
11768 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11769 .values()
11770 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11771 .collect::<Vec<_>>();
11772 repo_paths.sort();
11773
11774 pretty_assertions::assert_eq!(
11775 repo_paths,
11776 [
11777 Path::new(path!("/root/a")).into(),
11778 Path::new(path!("/root/b")).into(),
11779 ]
11780 );
11781
11782 let active_repo_path = project
11783 .read_with(cx, |p, cx| {
11784 p.active_repository(cx)
11785 .map(|r| r.read(cx).work_directory_abs_path.clone())
11786 })
11787 .unwrap();
11788 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11789
11790 let worktree_id = worktree_id_by_abs_path
11791 .get(Path::new(path!("/root/a")))
11792 .unwrap();
11793 project.update(cx, |project, cx| {
11794 project.remove_worktree(*worktree_id, cx);
11795 });
11796 cx.run_until_parked();
11797
11798 let active_repo_path = project
11799 .read_with(cx, |p, cx| {
11800 p.active_repository(cx)
11801 .map(|r| r.read(cx).work_directory_abs_path.clone())
11802 })
11803 .unwrap();
11804 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11805
11806 let worktree_id = worktree_id_by_abs_path
11807 .get(Path::new(path!("/root/b")))
11808 .unwrap();
11809 project.update(cx, |project, cx| {
11810 project.remove_worktree(*worktree_id, cx);
11811 });
11812 cx.run_until_parked();
11813
11814 let active_repo_path = project.read_with(cx, |p, cx| {
11815 p.active_repository(cx)
11816 .map(|r| r.read(cx).work_directory_abs_path.clone())
11817 });
11818 assert!(active_repo_path.is_none());
11819}
11820
11821#[gpui::test]
11822async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11823 use DiffHunkSecondaryStatus::*;
11824 init_test(cx);
11825
11826 let committed_contents = r#"
11827 one
11828 two
11829 three
11830 "#
11831 .unindent();
11832 let file_contents = r#"
11833 one
11834 TWO
11835 three
11836 "#
11837 .unindent();
11838
11839 let fs = FakeFs::new(cx.background_executor.clone());
11840 fs.insert_tree(
11841 path!("/dir"),
11842 json!({
11843 ".git": {},
11844 "file.txt": file_contents.clone()
11845 }),
11846 )
11847 .await;
11848
11849 fs.set_head_and_index_for_repo(
11850 path!("/dir/.git").as_ref(),
11851 &[("file.txt", committed_contents.clone())],
11852 );
11853
11854 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11855
11856 let buffer = project
11857 .update(cx, |project, cx| {
11858 project.open_local_buffer(path!("/dir/file.txt"), cx)
11859 })
11860 .await
11861 .unwrap();
11862 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11863 let uncommitted_diff = project
11864 .update(cx, |project, cx| {
11865 project.open_uncommitted_diff(buffer.clone(), cx)
11866 })
11867 .await
11868 .unwrap();
11869
11870 // The hunk is initially unstaged.
11871 uncommitted_diff.read_with(cx, |diff, cx| {
11872 assert_hunks(
11873 diff.snapshot(cx).hunks(&snapshot),
11874 &snapshot,
11875 &diff.base_text_string(cx).unwrap(),
11876 &[(
11877 1..2,
11878 "two\n",
11879 "TWO\n",
11880 DiffHunkStatus::modified(HasSecondaryHunk),
11881 )],
11882 );
11883 });
11884
11885 // Get the repository handle.
11886 let repo = project.read_with(cx, |project, cx| {
11887 project.repositories(cx).values().next().unwrap().clone()
11888 });
11889
11890 // Stage the file.
11891 let stage_task = repo.update(cx, |repo, cx| {
11892 repo.stage_entries(vec![repo_path("file.txt")], cx)
11893 });
11894
11895 // Run a few ticks to let the job start and mark hunks as pending,
11896 // but don't run_until_parked which would complete the entire operation.
11897 for _ in 0..10 {
11898 cx.executor().tick();
11899 let [hunk]: [_; 1] = uncommitted_diff
11900 .read_with(cx, |diff, cx| {
11901 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11902 })
11903 .try_into()
11904 .unwrap();
11905 match hunk.secondary_status {
11906 HasSecondaryHunk => {}
11907 SecondaryHunkRemovalPending => break,
11908 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11909 _ => panic!("unexpected hunk state"),
11910 }
11911 }
11912 uncommitted_diff.read_with(cx, |diff, cx| {
11913 assert_hunks(
11914 diff.snapshot(cx).hunks(&snapshot),
11915 &snapshot,
11916 &diff.base_text_string(cx).unwrap(),
11917 &[(
11918 1..2,
11919 "two\n",
11920 "TWO\n",
11921 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11922 )],
11923 );
11924 });
11925
11926 // Let the staging complete.
11927 stage_task.await.unwrap();
11928 cx.run_until_parked();
11929
11930 // The hunk is now fully staged.
11931 uncommitted_diff.read_with(cx, |diff, cx| {
11932 assert_hunks(
11933 diff.snapshot(cx).hunks(&snapshot),
11934 &snapshot,
11935 &diff.base_text_string(cx).unwrap(),
11936 &[(
11937 1..2,
11938 "two\n",
11939 "TWO\n",
11940 DiffHunkStatus::modified(NoSecondaryHunk),
11941 )],
11942 );
11943 });
11944
11945 // Simulate a commit by updating HEAD to match the current file contents.
11946 // The FakeGitRepository's commit method is a no-op, so we need to manually
11947 // update HEAD to simulate the commit completing.
11948 fs.set_head_for_repo(
11949 path!("/dir/.git").as_ref(),
11950 &[("file.txt", file_contents.clone())],
11951 "newhead",
11952 );
11953 cx.run_until_parked();
11954
11955 // After committing, there are no more hunks.
11956 uncommitted_diff.read_with(cx, |diff, cx| {
11957 assert_hunks(
11958 diff.snapshot(cx).hunks(&snapshot),
11959 &snapshot,
11960 &diff.base_text_string(cx).unwrap(),
11961 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11962 );
11963 });
11964}
11965
11966#[gpui::test]
11967async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11968 init_test(cx);
11969
11970 // Configure read_only_files setting
11971 cx.update(|cx| {
11972 cx.update_global::<SettingsStore, _>(|store, cx| {
11973 store.update_user_settings(cx, |settings| {
11974 settings.project.worktree.read_only_files = Some(vec![
11975 "**/generated/**".to_string(),
11976 "**/*.gen.rs".to_string(),
11977 ]);
11978 });
11979 });
11980 });
11981
11982 let fs = FakeFs::new(cx.background_executor.clone());
11983 fs.insert_tree(
11984 path!("/root"),
11985 json!({
11986 "src": {
11987 "main.rs": "fn main() {}",
11988 "types.gen.rs": "// Generated file",
11989 },
11990 "generated": {
11991 "schema.rs": "// Auto-generated schema",
11992 }
11993 }),
11994 )
11995 .await;
11996
11997 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11998
11999 // Open a regular file - should be read-write
12000 let regular_buffer = project
12001 .update(cx, |project, cx| {
12002 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12003 })
12004 .await
12005 .unwrap();
12006
12007 regular_buffer.read_with(cx, |buffer, _| {
12008 assert!(!buffer.read_only(), "Regular file should not be read-only");
12009 });
12010
12011 // Open a file matching *.gen.rs pattern - should be read-only
12012 let gen_buffer = project
12013 .update(cx, |project, cx| {
12014 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12015 })
12016 .await
12017 .unwrap();
12018
12019 gen_buffer.read_with(cx, |buffer, _| {
12020 assert!(
12021 buffer.read_only(),
12022 "File matching *.gen.rs pattern should be read-only"
12023 );
12024 });
12025
12026 // Open a file in generated directory - should be read-only
12027 let generated_buffer = project
12028 .update(cx, |project, cx| {
12029 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12030 })
12031 .await
12032 .unwrap();
12033
12034 generated_buffer.read_with(cx, |buffer, _| {
12035 assert!(
12036 buffer.read_only(),
12037 "File in generated directory should be read-only"
12038 );
12039 });
12040}
12041
12042#[gpui::test]
12043async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12044 init_test(cx);
12045
12046 // Explicitly set read_only_files to empty (default behavior)
12047 cx.update(|cx| {
12048 cx.update_global::<SettingsStore, _>(|store, cx| {
12049 store.update_user_settings(cx, |settings| {
12050 settings.project.worktree.read_only_files = Some(vec![]);
12051 });
12052 });
12053 });
12054
12055 let fs = FakeFs::new(cx.background_executor.clone());
12056 fs.insert_tree(
12057 path!("/root"),
12058 json!({
12059 "src": {
12060 "main.rs": "fn main() {}",
12061 },
12062 "generated": {
12063 "schema.rs": "// Auto-generated schema",
12064 }
12065 }),
12066 )
12067 .await;
12068
12069 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12070
12071 // All files should be read-write when read_only_files is empty
12072 let main_buffer = project
12073 .update(cx, |project, cx| {
12074 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12075 })
12076 .await
12077 .unwrap();
12078
12079 main_buffer.read_with(cx, |buffer, _| {
12080 assert!(
12081 !buffer.read_only(),
12082 "Files should not be read-only when read_only_files is empty"
12083 );
12084 });
12085
12086 let generated_buffer = project
12087 .update(cx, |project, cx| {
12088 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12089 })
12090 .await
12091 .unwrap();
12092
12093 generated_buffer.read_with(cx, |buffer, _| {
12094 assert!(
12095 !buffer.read_only(),
12096 "Generated files should not be read-only when read_only_files is empty"
12097 );
12098 });
12099}
12100
12101#[gpui::test]
12102async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12103 init_test(cx);
12104
12105 // Configure to make lock files read-only
12106 cx.update(|cx| {
12107 cx.update_global::<SettingsStore, _>(|store, cx| {
12108 store.update_user_settings(cx, |settings| {
12109 settings.project.worktree.read_only_files = Some(vec![
12110 "**/*.lock".to_string(),
12111 "**/package-lock.json".to_string(),
12112 ]);
12113 });
12114 });
12115 });
12116
12117 let fs = FakeFs::new(cx.background_executor.clone());
12118 fs.insert_tree(
12119 path!("/root"),
12120 json!({
12121 "Cargo.lock": "# Lock file",
12122 "Cargo.toml": "[package]",
12123 "package-lock.json": "{}",
12124 "package.json": "{}",
12125 }),
12126 )
12127 .await;
12128
12129 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12130
12131 // Cargo.lock should be read-only
12132 let cargo_lock = project
12133 .update(cx, |project, cx| {
12134 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12135 })
12136 .await
12137 .unwrap();
12138
12139 cargo_lock.read_with(cx, |buffer, _| {
12140 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12141 });
12142
12143 // Cargo.toml should be read-write
12144 let cargo_toml = project
12145 .update(cx, |project, cx| {
12146 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12147 })
12148 .await
12149 .unwrap();
12150
12151 cargo_toml.read_with(cx, |buffer, _| {
12152 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12153 });
12154
12155 // package-lock.json should be read-only
12156 let package_lock = project
12157 .update(cx, |project, cx| {
12158 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12159 })
12160 .await
12161 .unwrap();
12162
12163 package_lock.read_with(cx, |buffer, _| {
12164 assert!(buffer.read_only(), "package-lock.json should be read-only");
12165 });
12166
12167 // package.json should be read-write
12168 let package_json = project
12169 .update(cx, |project, cx| {
12170 project.open_local_buffer(path!("/root/package.json"), cx)
12171 })
12172 .await
12173 .unwrap();
12174
12175 package_json.read_with(cx, |buffer, _| {
12176 assert!(!buffer.read_only(), "package.json should not be read-only");
12177 });
12178}
12179
12180mod disable_ai_settings_tests {
12181 use gpui::TestAppContext;
12182 use project::*;
12183 use settings::{Settings, SettingsStore};
12184
12185 #[gpui::test]
12186 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12187 cx.update(|cx| {
12188 settings::init(cx);
12189
12190 // Test 1: Default is false (AI enabled)
12191 assert!(
12192 !DisableAiSettings::get_global(cx).disable_ai,
12193 "Default should allow AI"
12194 );
12195 });
12196
12197 let disable_true = serde_json::json!({
12198 "disable_ai": true
12199 })
12200 .to_string();
12201 let disable_false = serde_json::json!({
12202 "disable_ai": false
12203 })
12204 .to_string();
12205
12206 cx.update_global::<SettingsStore, _>(|store, cx| {
12207 store.set_user_settings(&disable_false, cx).unwrap();
12208 store.set_global_settings(&disable_true, cx).unwrap();
12209 });
12210 cx.update(|cx| {
12211 assert!(
12212 DisableAiSettings::get_global(cx).disable_ai,
12213 "Local false cannot override global true"
12214 );
12215 });
12216
12217 cx.update_global::<SettingsStore, _>(|store, cx| {
12218 store.set_global_settings(&disable_false, cx).unwrap();
12219 store.set_user_settings(&disable_true, cx).unwrap();
12220 });
12221
12222 cx.update(|cx| {
12223 assert!(
12224 DisableAiSettings::get_global(cx).disable_ai,
12225 "Local false cannot override global true"
12226 );
12227 });
12228 }
12229
12230 #[gpui::test]
12231 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12232 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12233 use worktree::WorktreeId;
12234
12235 cx.update(|cx| {
12236 settings::init(cx);
12237
12238 // Default should allow AI
12239 assert!(
12240 !DisableAiSettings::get_global(cx).disable_ai,
12241 "Default should allow AI"
12242 );
12243 });
12244
12245 let worktree_id = WorktreeId::from_usize(1);
12246 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12247 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12248 };
12249 let project_path = rel_path("project");
12250 let settings_location = SettingsLocation {
12251 worktree_id,
12252 path: project_path.as_ref(),
12253 };
12254
12255 // Test: Project-level disable_ai=true should disable AI for files in that project
12256 cx.update_global::<SettingsStore, _>(|store, cx| {
12257 store
12258 .set_local_settings(
12259 worktree_id,
12260 LocalSettingsPath::InWorktree(project_path.clone()),
12261 LocalSettingsKind::Settings,
12262 Some(r#"{ "disable_ai": true }"#),
12263 cx,
12264 )
12265 .unwrap();
12266 });
12267
12268 cx.update(|cx| {
12269 let settings = DisableAiSettings::get(Some(settings_location), cx);
12270 assert!(
12271 settings.disable_ai,
12272 "Project-level disable_ai=true should disable AI for files in that project"
12273 );
12274 // Global should now also be true since project-level disable_ai is merged into global
12275 assert!(
12276 DisableAiSettings::get_global(cx).disable_ai,
12277 "Global setting should be affected by project-level disable_ai=true"
12278 );
12279 });
12280
12281 // Test: Setting project-level to false should allow AI for that project
12282 cx.update_global::<SettingsStore, _>(|store, cx| {
12283 store
12284 .set_local_settings(
12285 worktree_id,
12286 LocalSettingsPath::InWorktree(project_path.clone()),
12287 LocalSettingsKind::Settings,
12288 Some(r#"{ "disable_ai": false }"#),
12289 cx,
12290 )
12291 .unwrap();
12292 });
12293
12294 cx.update(|cx| {
12295 let settings = DisableAiSettings::get(Some(settings_location), cx);
12296 assert!(
12297 !settings.disable_ai,
12298 "Project-level disable_ai=false should allow AI"
12299 );
12300 // Global should also be false now
12301 assert!(
12302 !DisableAiSettings::get_global(cx).disable_ai,
12303 "Global setting should be false when project-level is false"
12304 );
12305 });
12306
12307 // Test: User-level true + project-level false = AI disabled (saturation)
12308 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12309 cx.update_global::<SettingsStore, _>(|store, cx| {
12310 store.set_user_settings(&disable_true, cx).unwrap();
12311 store
12312 .set_local_settings(
12313 worktree_id,
12314 LocalSettingsPath::InWorktree(project_path.clone()),
12315 LocalSettingsKind::Settings,
12316 Some(r#"{ "disable_ai": false }"#),
12317 cx,
12318 )
12319 .unwrap();
12320 });
12321
12322 cx.update(|cx| {
12323 let settings = DisableAiSettings::get(Some(settings_location), cx);
12324 assert!(
12325 settings.disable_ai,
12326 "Project-level false cannot override user-level true (SaturatingBool)"
12327 );
12328 });
12329 }
12330}