1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettings, LanguageSettingsContent, all_language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
243 let buffer = project
244 .update(cx, |project, cx| {
245 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
246 })
247 .await
248 .unwrap();
249 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
250 };
251
252 let settings_a = settings_for("a.rs", cx).await;
253 let settings_b = settings_for("b/b.rs", cx).await;
254 let settings_c = settings_for("c.js", cx).await;
255 let settings_d = settings_for("d/d.rs", cx).await;
256 let settings_readme = settings_for("README.json", cx).await;
257 // .editorconfig overrides .zed/settings
258 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
259 assert_eq!(settings_a.hard_tabs, true);
260 assert_eq!(settings_a.ensure_final_newline_on_save, true);
261 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
262 assert_eq!(settings_a.preferred_line_length, 120);
263
264 // .editorconfig in b/ overrides .editorconfig in root
265 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
266
267 // "indent_size" is not set, so "tab_width" is used
268 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
269 // .editorconfig in subdirectory overrides .editorconfig in root
270 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
271
272 // When max_line_length is "off", default to .zed/settings.json
273 assert_eq!(settings_b.preferred_line_length, 64);
274 assert_eq!(settings_c.preferred_line_length, 64);
275
276 // README.md should not be affected by .editorconfig's globe "*.rs"
277 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
278}
279
280#[gpui::test]
281async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
282 init_test(cx);
283
284 let fs = FakeFs::new(cx.executor());
285 fs.insert_tree(
286 path!("/grandparent"),
287 json!({
288 ".editorconfig": "[*]\nindent_size = 4\n",
289 "parent": {
290 ".editorconfig": "[*.rs]\nindent_size = 2\n",
291 "worktree": {
292 ".editorconfig": "[*.md]\nindent_size = 3\n",
293 "main.rs": "fn main() {}",
294 "README.md": "# README",
295 "other.txt": "other content",
296 }
297 }
298 }),
299 )
300 .await;
301
302 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
303
304 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
305 language_registry.add(rust_lang());
306 language_registry.add(markdown_lang());
307
308 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
309
310 cx.executor().run_until_parked();
311 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
312 let buffer = project
313 .update(cx, |project, cx| {
314 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
315 })
316 .await
317 .unwrap();
318 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
319 };
320
321 let settings_rs = settings_for("main.rs", cx).await;
322 let settings_md = settings_for("README.md", cx).await;
323 let settings_txt = settings_for("other.txt", cx).await;
324
325 // main.rs gets indent_size = 2 from parent's external .editorconfig
326 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
327
328 // README.md gets indent_size = 3 from internal worktree .editorconfig
329 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
330
331 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
332 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
333}
334
335#[gpui::test]
336async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
337 init_test(cx);
338
339 let fs = FakeFs::new(cx.executor());
340 fs.insert_tree(
341 path!("/worktree"),
342 json!({
343 ".editorconfig": "[*]\nindent_size = 99\n",
344 "src": {
345 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
346 "file.rs": "fn main() {}",
347 }
348 }),
349 )
350 .await;
351
352 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
353
354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
355 language_registry.add(rust_lang());
356
357 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
358
359 cx.executor().run_until_parked();
360
361 cx.update(|cx| {
362 let tree = worktree.read(cx);
363 let file_entry = tree
364 .entry_for_path(rel_path("src/file.rs"))
365 .unwrap()
366 .clone();
367 let file = File::for_entry(file_entry, worktree.clone());
368 let file_language = project
369 .read(cx)
370 .languages()
371 .load_language_for_file_path(file.path.as_std_path());
372 let file_language = cx
373 .foreground_executor()
374 .block_on(file_language)
375 .expect("Failed to get file language");
376 let file = file as _;
377 let settings = all_language_settings(Some(&file), cx)
378 .language(None, Some(&file_language.name()), cx)
379 .into_owned();
380
381 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
382 });
383}
384
385#[gpui::test]
386async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
387 init_test(cx);
388
389 let fs = FakeFs::new(cx.executor());
390 fs.insert_tree(
391 path!("/parent"),
392 json!({
393 ".editorconfig": "[*]\nindent_size = 99\n",
394 "worktree": {
395 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
396 "file.rs": "fn main() {}",
397 }
398 }),
399 )
400 .await;
401
402 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
403
404 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
405 language_registry.add(rust_lang());
406
407 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
408
409 cx.executor().run_until_parked();
410
411 let buffer = project
412 .update(cx, |project, cx| {
413 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
414 })
415 .await
416 .unwrap();
417
418 cx.update(|cx| {
419 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
420
421 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
422 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
423 });
424}
425
426#[gpui::test]
427async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
428 init_test(cx);
429
430 let fs = FakeFs::new(cx.executor());
431 fs.insert_tree(
432 path!("/grandparent"),
433 json!({
434 ".editorconfig": "[*]\nindent_size = 99\n",
435 "parent": {
436 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
437 "worktree": {
438 "file.rs": "fn main() {}",
439 }
440 }
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
446
447 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
448 language_registry.add(rust_lang());
449
450 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
451
452 cx.executor().run_until_parked();
453
454 let buffer = project
455 .update(cx, |project, cx| {
456 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
457 })
458 .await
459 .unwrap();
460
461 cx.update(|cx| {
462 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
463
464 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
465 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
466 });
467}
468
469#[gpui::test]
470async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
471 init_test(cx);
472
473 let fs = FakeFs::new(cx.executor());
474 fs.insert_tree(
475 path!("/parent"),
476 json!({
477 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
478 "worktree_a": {
479 "file.rs": "fn a() {}",
480 ".editorconfig": "[*]\ninsert_final_newline = true\n",
481 },
482 "worktree_b": {
483 "file.rs": "fn b() {}",
484 ".editorconfig": "[*]\ninsert_final_newline = false\n",
485 }
486 }),
487 )
488 .await;
489
490 let project = Project::test(
491 fs,
492 [
493 path!("/parent/worktree_a").as_ref(),
494 path!("/parent/worktree_b").as_ref(),
495 ],
496 cx,
497 )
498 .await;
499
500 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
501 language_registry.add(rust_lang());
502
503 cx.executor().run_until_parked();
504
505 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
506 assert_eq!(worktrees.len(), 2);
507
508 for worktree in worktrees {
509 let buffer = project
510 .update(cx, |project, cx| {
511 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
512 })
513 .await
514 .unwrap();
515
516 cx.update(|cx| {
517 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
518
519 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
520 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
521 });
522 }
523}
524
525#[gpui::test]
526async fn test_external_editorconfig_not_loaded_without_internal_config(
527 cx: &mut gpui::TestAppContext,
528) {
529 init_test(cx);
530
531 let fs = FakeFs::new(cx.executor());
532 fs.insert_tree(
533 path!("/parent"),
534 json!({
535 ".editorconfig": "[*]\nindent_size = 99\n",
536 "worktree": {
537 "file.rs": "fn main() {}",
538 }
539 }),
540 )
541 .await;
542
543 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
544
545 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
546 language_registry.add(rust_lang());
547
548 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
549
550 cx.executor().run_until_parked();
551
552 let buffer = project
553 .update(cx, |project, cx| {
554 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
555 })
556 .await
557 .unwrap();
558
559 cx.update(|cx| {
560 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
561
562 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
563 // because without an internal .editorconfig, external configs are not loaded
564 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
565 });
566}
567
568#[gpui::test]
569async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
570 init_test(cx);
571
572 let fs = FakeFs::new(cx.executor());
573 fs.insert_tree(
574 path!("/parent"),
575 json!({
576 ".editorconfig": "[*]\nindent_size = 4\n",
577 "worktree": {
578 ".editorconfig": "[*]\n",
579 "file.rs": "fn main() {}",
580 }
581 }),
582 )
583 .await;
584
585 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
586
587 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
588 language_registry.add(rust_lang());
589
590 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
591
592 cx.executor().run_until_parked();
593
594 let buffer = project
595 .update(cx, |project, cx| {
596 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
597 })
598 .await
599 .unwrap();
600
601 cx.update(|cx| {
602 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
603
604 // Test initial settings: tab_size = 4 from parent's external .editorconfig
605 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
606 });
607
608 fs.atomic_write(
609 PathBuf::from(path!("/parent/.editorconfig")),
610 "[*]\nindent_size = 8\n".to_owned(),
611 )
612 .await
613 .unwrap();
614
615 cx.executor().run_until_parked();
616
617 let buffer = project
618 .update(cx, |project, cx| {
619 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
620 })
621 .await
622 .unwrap();
623
624 cx.update(|cx| {
625 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
626
627 // Test settings updated: tab_size = 8
628 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
629 });
630}
631
632#[gpui::test]
633async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
634 init_test(cx);
635
636 let fs = FakeFs::new(cx.executor());
637 fs.insert_tree(
638 path!("/parent"),
639 json!({
640 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
641 "existing_worktree": {
642 ".editorconfig": "[*]\n",
643 "file.rs": "fn a() {}",
644 },
645 "new_worktree": {
646 ".editorconfig": "[*]\n",
647 "file.rs": "fn b() {}",
648 }
649 }),
650 )
651 .await;
652
653 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
654
655 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
656 language_registry.add(rust_lang());
657
658 cx.executor().run_until_parked();
659
660 let buffer = project
661 .update(cx, |project, cx| {
662 let id = project.worktrees(cx).next().unwrap().read(cx).id();
663 project.open_buffer((id, rel_path("file.rs")), cx)
664 })
665 .await
666 .unwrap();
667
668 cx.update(|cx| {
669 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
670
671 // Test existing worktree has tab_size = 7
672 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
673 });
674
675 let (new_worktree, _) = project
676 .update(cx, |project, cx| {
677 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
678 })
679 .await
680 .unwrap();
681
682 cx.executor().run_until_parked();
683
684 let buffer = project
685 .update(cx, |project, cx| {
686 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
687 })
688 .await
689 .unwrap();
690
691 cx.update(|cx| {
692 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
693
694 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
695 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
696 });
697}
698
699#[gpui::test]
700async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
701 init_test(cx);
702
703 let fs = FakeFs::new(cx.executor());
704 fs.insert_tree(
705 path!("/parent"),
706 json!({
707 ".editorconfig": "[*]\nindent_size = 6\n",
708 "worktree": {
709 ".editorconfig": "[*]\n",
710 "file.rs": "fn main() {}",
711 }
712 }),
713 )
714 .await;
715
716 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
717
718 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
719 language_registry.add(rust_lang());
720
721 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
722 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
723
724 cx.executor().run_until_parked();
725
726 cx.update(|cx| {
727 let store = cx.global::<SettingsStore>();
728 let (worktree_ids, external_paths, watcher_paths) =
729 store.editorconfig_store.read(cx).test_state();
730
731 // Test external config is loaded
732 assert!(worktree_ids.contains(&worktree_id));
733 assert!(!external_paths.is_empty());
734 assert!(!watcher_paths.is_empty());
735 });
736
737 project.update(cx, |project, cx| {
738 project.remove_worktree(worktree_id, cx);
739 });
740
741 cx.executor().run_until_parked();
742
743 cx.update(|cx| {
744 let store = cx.global::<SettingsStore>();
745 let (worktree_ids, external_paths, watcher_paths) =
746 store.editorconfig_store.read(cx).test_state();
747
748 // Test worktree state, external configs, and watchers all removed
749 assert!(!worktree_ids.contains(&worktree_id));
750 assert!(external_paths.is_empty());
751 assert!(watcher_paths.is_empty());
752 });
753}
754
755#[gpui::test]
756async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
757 cx: &mut gpui::TestAppContext,
758) {
759 init_test(cx);
760
761 let fs = FakeFs::new(cx.executor());
762 fs.insert_tree(
763 path!("/parent"),
764 json!({
765 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
766 "worktree_a": {
767 ".editorconfig": "[*]\n",
768 "file.rs": "fn a() {}",
769 },
770 "worktree_b": {
771 ".editorconfig": "[*]\n",
772 "file.rs": "fn b() {}",
773 }
774 }),
775 )
776 .await;
777
778 let project = Project::test(
779 fs,
780 [
781 path!("/parent/worktree_a").as_ref(),
782 path!("/parent/worktree_b").as_ref(),
783 ],
784 cx,
785 )
786 .await;
787
788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
789 language_registry.add(rust_lang());
790
791 cx.executor().run_until_parked();
792
793 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
794 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
795 assert_eq!(worktrees.len(), 2);
796
797 let worktree_a = &worktrees[0];
798 let worktree_b = &worktrees[1];
799 let worktree_a_id = worktree_a.read(cx).id();
800 let worktree_b_id = worktree_b.read(cx).id();
801 (worktree_a_id, worktree_b.clone(), worktree_b_id)
802 });
803
804 cx.update(|cx| {
805 let store = cx.global::<SettingsStore>();
806 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
807
808 // Test both worktrees have settings and share external config
809 assert!(worktree_ids.contains(&worktree_a_id));
810 assert!(worktree_ids.contains(&worktree_b_id));
811 assert_eq!(external_paths.len(), 1); // single shared external config
812 });
813
814 project.update(cx, |project, cx| {
815 project.remove_worktree(worktree_a_id, cx);
816 });
817
818 cx.executor().run_until_parked();
819
820 cx.update(|cx| {
821 let store = cx.global::<SettingsStore>();
822 let (worktree_ids, external_paths, watcher_paths) =
823 store.editorconfig_store.read(cx).test_state();
824
825 // Test worktree_a is gone but external config remains for worktree_b
826 assert!(!worktree_ids.contains(&worktree_a_id));
827 assert!(worktree_ids.contains(&worktree_b_id));
828 // External config should still exist because worktree_b uses it
829 assert_eq!(external_paths.len(), 1);
830 assert_eq!(watcher_paths.len(), 1);
831 });
832
833 let buffer = project
834 .update(cx, |project, cx| {
835 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
836 })
837 .await
838 .unwrap();
839
840 cx.update(|cx| {
841 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
842
843 // Test worktree_b still has correct settings
844 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
845 });
846}
847
848#[gpui::test]
849async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
850 init_test(cx);
851 cx.update(|cx| {
852 GitHostingProviderRegistry::default_global(cx);
853 git_hosting_providers::init(cx);
854 });
855
856 let fs = FakeFs::new(cx.executor());
857 let str_path = path!("/dir");
858 let path = Path::new(str_path);
859
860 fs.insert_tree(
861 path!("/dir"),
862 json!({
863 ".zed": {
864 "settings.json": r#"{
865 "git_hosting_providers": [
866 {
867 "provider": "gitlab",
868 "base_url": "https://google.com",
869 "name": "foo"
870 }
871 ]
872 }"#
873 },
874 }),
875 )
876 .await;
877
878 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
879 let (_worktree, _) =
880 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
881 cx.executor().run_until_parked();
882
883 cx.update(|cx| {
884 let provider = GitHostingProviderRegistry::global(cx);
885 assert!(
886 provider
887 .list_hosting_providers()
888 .into_iter()
889 .any(|provider| provider.name() == "foo")
890 );
891 });
892
893 fs.atomic_write(
894 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
895 "{}".into(),
896 )
897 .await
898 .unwrap();
899
900 cx.run_until_parked();
901
902 cx.update(|cx| {
903 let provider = GitHostingProviderRegistry::global(cx);
904 assert!(
905 !provider
906 .list_hosting_providers()
907 .into_iter()
908 .any(|provider| provider.name() == "foo")
909 );
910 });
911}
912
913#[gpui::test]
914async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916 TaskStore::init(None);
917
918 let fs = FakeFs::new(cx.executor());
919 fs.insert_tree(
920 path!("/dir"),
921 json!({
922 ".zed": {
923 "settings.json": r#"{ "tab_size": 8 }"#,
924 "tasks.json": r#"[{
925 "label": "cargo check all",
926 "command": "cargo",
927 "args": ["check", "--all"]
928 },]"#,
929 },
930 "a": {
931 "a.rs": "fn a() {\n A\n}"
932 },
933 "b": {
934 ".zed": {
935 "settings.json": r#"{ "tab_size": 2 }"#,
936 "tasks.json": r#"[{
937 "label": "cargo check",
938 "command": "cargo",
939 "args": ["check"]
940 },]"#,
941 },
942 "b.rs": "fn b() {\n B\n}"
943 }
944 }),
945 )
946 .await;
947
948 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
949 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
950
951 cx.executor().run_until_parked();
952 let worktree_id = cx.update(|cx| {
953 project.update(cx, |project, cx| {
954 project.worktrees(cx).next().unwrap().read(cx).id()
955 })
956 });
957
958 let mut task_contexts = TaskContexts::default();
959 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
960 let task_contexts = Arc::new(task_contexts);
961
962 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
963 id: worktree_id,
964 directory_in_worktree: rel_path(".zed").into(),
965 id_base: "local worktree tasks from directory \".zed\"".into(),
966 };
967
968 let buffer_a = project
969 .update(cx, |project, cx| {
970 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
971 })
972 .await
973 .unwrap();
974 let buffer_b = project
975 .update(cx, |project, cx| {
976 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
977 })
978 .await
979 .unwrap();
980 cx.update(|cx| {
981 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
982 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
983
984 assert_eq!(settings_a.tab_size.get(), 8);
985 assert_eq!(settings_b.tab_size.get(), 2);
986 });
987
988 let all_tasks = cx
989 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
990 .await
991 .into_iter()
992 .map(|(source_kind, task)| {
993 let resolved = task.resolved;
994 (
995 source_kind,
996 task.resolved_label,
997 resolved.args,
998 resolved.env,
999 )
1000 })
1001 .collect::<Vec<_>>();
1002 assert_eq!(
1003 all_tasks,
1004 vec![
1005 (
1006 TaskSourceKind::Worktree {
1007 id: worktree_id,
1008 directory_in_worktree: rel_path("b/.zed").into(),
1009 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1010 },
1011 "cargo check".to_string(),
1012 vec!["check".to_string()],
1013 HashMap::default(),
1014 ),
1015 (
1016 topmost_local_task_source_kind.clone(),
1017 "cargo check all".to_string(),
1018 vec!["check".to_string(), "--all".to_string()],
1019 HashMap::default(),
1020 ),
1021 ]
1022 );
1023
1024 let (_, resolved_task) = cx
1025 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1026 .await
1027 .into_iter()
1028 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1029 .expect("should have one global task");
1030 project.update(cx, |project, cx| {
1031 let task_inventory = project
1032 .task_store()
1033 .read(cx)
1034 .task_inventory()
1035 .cloned()
1036 .unwrap();
1037 task_inventory.update(cx, |inventory, _| {
1038 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1039 inventory
1040 .update_file_based_tasks(
1041 TaskSettingsLocation::Global(tasks_file()),
1042 Some(
1043 &json!([{
1044 "label": "cargo check unstable",
1045 "command": "cargo",
1046 "args": [
1047 "check",
1048 "--all",
1049 "--all-targets"
1050 ],
1051 "env": {
1052 "RUSTFLAGS": "-Zunstable-options"
1053 }
1054 }])
1055 .to_string(),
1056 ),
1057 )
1058 .unwrap();
1059 });
1060 });
1061 cx.run_until_parked();
1062
1063 let all_tasks = cx
1064 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1065 .await
1066 .into_iter()
1067 .map(|(source_kind, task)| {
1068 let resolved = task.resolved;
1069 (
1070 source_kind,
1071 task.resolved_label,
1072 resolved.args,
1073 resolved.env,
1074 )
1075 })
1076 .collect::<Vec<_>>();
1077 assert_eq!(
1078 all_tasks,
1079 vec![
1080 (
1081 topmost_local_task_source_kind.clone(),
1082 "cargo check all".to_string(),
1083 vec!["check".to_string(), "--all".to_string()],
1084 HashMap::default(),
1085 ),
1086 (
1087 TaskSourceKind::Worktree {
1088 id: worktree_id,
1089 directory_in_worktree: rel_path("b/.zed").into(),
1090 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1091 },
1092 "cargo check".to_string(),
1093 vec!["check".to_string()],
1094 HashMap::default(),
1095 ),
1096 (
1097 TaskSourceKind::AbsPath {
1098 abs_path: paths::tasks_file().clone(),
1099 id_base: "global tasks.json".into(),
1100 },
1101 "cargo check unstable".to_string(),
1102 vec![
1103 "check".to_string(),
1104 "--all".to_string(),
1105 "--all-targets".to_string(),
1106 ],
1107 HashMap::from_iter(Some((
1108 "RUSTFLAGS".to_string(),
1109 "-Zunstable-options".to_string()
1110 ))),
1111 ),
1112 ]
1113 );
1114}
1115
1116#[gpui::test]
1117async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1118 init_test(cx);
1119 TaskStore::init(None);
1120
1121 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1122 // event is emitted before we havd a chance to setup the event subscription.
1123 let fs = FakeFs::new(cx.executor());
1124 fs.insert_tree(
1125 path!("/dir"),
1126 json!({
1127 ".zed": {
1128 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1129 },
1130 "file.rs": ""
1131 }),
1132 )
1133 .await;
1134
1135 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1136 let saw_toast = Rc::new(RefCell::new(false));
1137
1138 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1139 // later assert that the `Event::Toast` even is emitted.
1140 fs.save(
1141 path!("/dir/.zed/tasks.json").as_ref(),
1142 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1143 Default::default(),
1144 )
1145 .await
1146 .unwrap();
1147
1148 project.update(cx, |_, cx| {
1149 let saw_toast = saw_toast.clone();
1150
1151 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1152 Event::Toast {
1153 notification_id,
1154 message,
1155 link: Some(ToastLink { url, .. }),
1156 } => {
1157 assert!(notification_id.starts_with("local-tasks-"));
1158 assert!(message.contains("ZED_FOO"));
1159 assert_eq!(*url, "https://zed.dev/docs/tasks");
1160 *saw_toast.borrow_mut() = true;
1161 }
1162 _ => {}
1163 })
1164 .detach();
1165 });
1166
1167 cx.run_until_parked();
1168 assert!(
1169 *saw_toast.borrow(),
1170 "Expected `Event::Toast` was never emitted"
1171 );
1172}
1173
1174#[gpui::test]
1175async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1176 init_test(cx);
1177 TaskStore::init(None);
1178
1179 let fs = FakeFs::new(cx.executor());
1180 fs.insert_tree(
1181 path!("/dir"),
1182 json!({
1183 ".zed": {
1184 "tasks.json": r#"[{
1185 "label": "test worktree root",
1186 "command": "echo $ZED_WORKTREE_ROOT"
1187 }]"#,
1188 },
1189 "a": {
1190 "a.rs": "fn a() {\n A\n}"
1191 },
1192 }),
1193 )
1194 .await;
1195
1196 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1197 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1198
1199 cx.executor().run_until_parked();
1200 let worktree_id = cx.update(|cx| {
1201 project.update(cx, |project, cx| {
1202 project.worktrees(cx).next().unwrap().read(cx).id()
1203 })
1204 });
1205
1206 let active_non_worktree_item_tasks = cx
1207 .update(|cx| {
1208 get_all_tasks(
1209 &project,
1210 Arc::new(TaskContexts {
1211 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1212 active_worktree_context: None,
1213 other_worktree_contexts: Vec::new(),
1214 lsp_task_sources: HashMap::default(),
1215 latest_selection: None,
1216 }),
1217 cx,
1218 )
1219 })
1220 .await;
1221 assert!(
1222 active_non_worktree_item_tasks.is_empty(),
1223 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1224 );
1225
1226 let active_worktree_tasks = cx
1227 .update(|cx| {
1228 get_all_tasks(
1229 &project,
1230 Arc::new(TaskContexts {
1231 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1232 active_worktree_context: Some((worktree_id, {
1233 let mut worktree_context = TaskContext::default();
1234 worktree_context
1235 .task_variables
1236 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1237 worktree_context
1238 })),
1239 other_worktree_contexts: Vec::new(),
1240 lsp_task_sources: HashMap::default(),
1241 latest_selection: None,
1242 }),
1243 cx,
1244 )
1245 })
1246 .await;
1247 assert_eq!(
1248 active_worktree_tasks
1249 .into_iter()
1250 .map(|(source_kind, task)| {
1251 let resolved = task.resolved;
1252 (source_kind, resolved.command.unwrap())
1253 })
1254 .collect::<Vec<_>>(),
1255 vec![(
1256 TaskSourceKind::Worktree {
1257 id: worktree_id,
1258 directory_in_worktree: rel_path(".zed").into(),
1259 id_base: "local worktree tasks from directory \".zed\"".into(),
1260 },
1261 "echo /dir".to_string(),
1262 )]
1263 );
1264}
1265
1266#[gpui::test]
1267async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1268 cx: &mut gpui::TestAppContext,
1269) {
1270 pub(crate) struct PyprojectTomlManifestProvider;
1271
1272 impl ManifestProvider for PyprojectTomlManifestProvider {
1273 fn name(&self) -> ManifestName {
1274 SharedString::new_static("pyproject.toml").into()
1275 }
1276
1277 fn search(
1278 &self,
1279 ManifestQuery {
1280 path,
1281 depth,
1282 delegate,
1283 }: ManifestQuery,
1284 ) -> Option<Arc<RelPath>> {
1285 for path in path.ancestors().take(depth) {
1286 let p = path.join(rel_path("pyproject.toml"));
1287 if delegate.exists(&p, Some(false)) {
1288 return Some(path.into());
1289 }
1290 }
1291
1292 None
1293 }
1294 }
1295
1296 init_test(cx);
1297 let fs = FakeFs::new(cx.executor());
1298
1299 fs.insert_tree(
1300 path!("/the-root"),
1301 json!({
1302 ".zed": {
1303 "settings.json": r#"
1304 {
1305 "languages": {
1306 "Python": {
1307 "language_servers": ["ty"]
1308 }
1309 }
1310 }"#
1311 },
1312 "project-a": {
1313 ".venv": {},
1314 "file.py": "",
1315 "pyproject.toml": ""
1316 },
1317 "project-b": {
1318 ".venv": {},
1319 "source_file.py":"",
1320 "another_file.py": "",
1321 "pyproject.toml": ""
1322 }
1323 }),
1324 )
1325 .await;
1326 cx.update(|cx| {
1327 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1328 });
1329
1330 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1331 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1332 let _fake_python_server = language_registry.register_fake_lsp(
1333 "Python",
1334 FakeLspAdapter {
1335 name: "ty",
1336 capabilities: lsp::ServerCapabilities {
1337 ..Default::default()
1338 },
1339 ..Default::default()
1340 },
1341 );
1342
1343 language_registry.add(python_lang(fs.clone()));
1344 let (first_buffer, _handle) = project
1345 .update(cx, |project, cx| {
1346 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1347 })
1348 .await
1349 .unwrap();
1350 cx.executor().run_until_parked();
1351 let servers = project.update(cx, |project, cx| {
1352 project.lsp_store().update(cx, |this, cx| {
1353 first_buffer.update(cx, |buffer, cx| {
1354 this.running_language_servers_for_local_buffer(buffer, cx)
1355 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1356 .collect::<Vec<_>>()
1357 })
1358 })
1359 });
1360 cx.executor().run_until_parked();
1361 assert_eq!(servers.len(), 1);
1362 let (adapter, server) = servers.into_iter().next().unwrap();
1363 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1364 assert_eq!(server.server_id(), LanguageServerId(0));
1365 // `workspace_folders` are set to the rooting point.
1366 assert_eq!(
1367 server.workspace_folders(),
1368 BTreeSet::from_iter(
1369 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1370 )
1371 );
1372
1373 let (second_project_buffer, _other_handle) = project
1374 .update(cx, |project, cx| {
1375 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1376 })
1377 .await
1378 .unwrap();
1379 cx.executor().run_until_parked();
1380 let servers = project.update(cx, |project, cx| {
1381 project.lsp_store().update(cx, |this, cx| {
1382 second_project_buffer.update(cx, |buffer, cx| {
1383 this.running_language_servers_for_local_buffer(buffer, cx)
1384 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1385 .collect::<Vec<_>>()
1386 })
1387 })
1388 });
1389 cx.executor().run_until_parked();
1390 assert_eq!(servers.len(), 1);
1391 let (adapter, server) = servers.into_iter().next().unwrap();
1392 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1393 // We're not using venvs at all here, so both folders should fall under the same root.
1394 assert_eq!(server.server_id(), LanguageServerId(0));
1395 // Now, let's select a different toolchain for one of subprojects.
1396
1397 let Toolchains {
1398 toolchains: available_toolchains_for_b,
1399 root_path,
1400 ..
1401 } = project
1402 .update(cx, |this, cx| {
1403 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1404 this.available_toolchains(
1405 ProjectPath {
1406 worktree_id,
1407 path: rel_path("project-b/source_file.py").into(),
1408 },
1409 LanguageName::new_static("Python"),
1410 cx,
1411 )
1412 })
1413 .await
1414 .expect("A toolchain to be discovered");
1415 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1416 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1417 let currently_active_toolchain = project
1418 .update(cx, |this, cx| {
1419 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1420 this.active_toolchain(
1421 ProjectPath {
1422 worktree_id,
1423 path: rel_path("project-b/source_file.py").into(),
1424 },
1425 LanguageName::new_static("Python"),
1426 cx,
1427 )
1428 })
1429 .await;
1430
1431 assert!(currently_active_toolchain.is_none());
1432 let _ = project
1433 .update(cx, |this, cx| {
1434 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1435 this.activate_toolchain(
1436 ProjectPath {
1437 worktree_id,
1438 path: root_path,
1439 },
1440 available_toolchains_for_b
1441 .toolchains
1442 .into_iter()
1443 .next()
1444 .unwrap(),
1445 cx,
1446 )
1447 })
1448 .await
1449 .unwrap();
1450 cx.run_until_parked();
1451 let servers = project.update(cx, |project, cx| {
1452 project.lsp_store().update(cx, |this, cx| {
1453 second_project_buffer.update(cx, |buffer, cx| {
1454 this.running_language_servers_for_local_buffer(buffer, cx)
1455 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1456 .collect::<Vec<_>>()
1457 })
1458 })
1459 });
1460 cx.executor().run_until_parked();
1461 assert_eq!(servers.len(), 1);
1462 let (adapter, server) = servers.into_iter().next().unwrap();
1463 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1464 // There's a new language server in town.
1465 assert_eq!(server.server_id(), LanguageServerId(1));
1466}
1467
1468#[gpui::test]
1469async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1470 init_test(cx);
1471
1472 let fs = FakeFs::new(cx.executor());
1473 fs.insert_tree(
1474 path!("/dir"),
1475 json!({
1476 "test.rs": "const A: i32 = 1;",
1477 "test2.rs": "",
1478 "Cargo.toml": "a = 1",
1479 "package.json": "{\"a\": 1}",
1480 }),
1481 )
1482 .await;
1483
1484 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1485 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1486
1487 let mut fake_rust_servers = language_registry.register_fake_lsp(
1488 "Rust",
1489 FakeLspAdapter {
1490 name: "the-rust-language-server",
1491 capabilities: lsp::ServerCapabilities {
1492 completion_provider: Some(lsp::CompletionOptions {
1493 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1494 ..Default::default()
1495 }),
1496 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1497 lsp::TextDocumentSyncOptions {
1498 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1499 ..Default::default()
1500 },
1501 )),
1502 ..Default::default()
1503 },
1504 ..Default::default()
1505 },
1506 );
1507 let mut fake_json_servers = language_registry.register_fake_lsp(
1508 "JSON",
1509 FakeLspAdapter {
1510 name: "the-json-language-server",
1511 capabilities: lsp::ServerCapabilities {
1512 completion_provider: Some(lsp::CompletionOptions {
1513 trigger_characters: Some(vec![":".to_string()]),
1514 ..Default::default()
1515 }),
1516 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1517 lsp::TextDocumentSyncOptions {
1518 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1519 ..Default::default()
1520 },
1521 )),
1522 ..Default::default()
1523 },
1524 ..Default::default()
1525 },
1526 );
1527
1528 // Open a buffer without an associated language server.
1529 let (toml_buffer, _handle) = project
1530 .update(cx, |project, cx| {
1531 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1532 })
1533 .await
1534 .unwrap();
1535
1536 // Open a buffer with an associated language server before the language for it has been loaded.
1537 let (rust_buffer, _handle2) = project
1538 .update(cx, |project, cx| {
1539 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1540 })
1541 .await
1542 .unwrap();
1543 rust_buffer.update(cx, |buffer, _| {
1544 assert_eq!(buffer.language().map(|l| l.name()), None);
1545 });
1546
1547 // Now we add the languages to the project, and ensure they get assigned to all
1548 // the relevant open buffers.
1549 language_registry.add(json_lang());
1550 language_registry.add(rust_lang());
1551 cx.executor().run_until_parked();
1552 rust_buffer.update(cx, |buffer, _| {
1553 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1554 });
1555
1556 // A server is started up, and it is notified about Rust files.
1557 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1558 assert_eq!(
1559 fake_rust_server
1560 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1561 .await
1562 .text_document,
1563 lsp::TextDocumentItem {
1564 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1565 version: 0,
1566 text: "const A: i32 = 1;".to_string(),
1567 language_id: "rust".to_string(),
1568 }
1569 );
1570
1571 // The buffer is configured based on the language server's capabilities.
1572 rust_buffer.update(cx, |buffer, _| {
1573 assert_eq!(
1574 buffer
1575 .completion_triggers()
1576 .iter()
1577 .cloned()
1578 .collect::<Vec<_>>(),
1579 &[".".to_string(), "::".to_string()]
1580 );
1581 });
1582 toml_buffer.update(cx, |buffer, _| {
1583 assert!(buffer.completion_triggers().is_empty());
1584 });
1585
1586 // Edit a buffer. The changes are reported to the language server.
1587 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1588 assert_eq!(
1589 fake_rust_server
1590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1591 .await
1592 .text_document,
1593 lsp::VersionedTextDocumentIdentifier::new(
1594 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1595 1
1596 )
1597 );
1598
1599 // Open a third buffer with a different associated language server.
1600 let (json_buffer, _json_handle) = project
1601 .update(cx, |project, cx| {
1602 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1603 })
1604 .await
1605 .unwrap();
1606
1607 // A json language server is started up and is only notified about the json buffer.
1608 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1609 assert_eq!(
1610 fake_json_server
1611 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1612 .await
1613 .text_document,
1614 lsp::TextDocumentItem {
1615 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1616 version: 0,
1617 text: "{\"a\": 1}".to_string(),
1618 language_id: "json".to_string(),
1619 }
1620 );
1621
1622 // This buffer is configured based on the second language server's
1623 // capabilities.
1624 json_buffer.update(cx, |buffer, _| {
1625 assert_eq!(
1626 buffer
1627 .completion_triggers()
1628 .iter()
1629 .cloned()
1630 .collect::<Vec<_>>(),
1631 &[":".to_string()]
1632 );
1633 });
1634
1635 // When opening another buffer whose language server is already running,
1636 // it is also configured based on the existing language server's capabilities.
1637 let (rust_buffer2, _handle4) = project
1638 .update(cx, |project, cx| {
1639 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1640 })
1641 .await
1642 .unwrap();
1643 rust_buffer2.update(cx, |buffer, _| {
1644 assert_eq!(
1645 buffer
1646 .completion_triggers()
1647 .iter()
1648 .cloned()
1649 .collect::<Vec<_>>(),
1650 &[".".to_string(), "::".to_string()]
1651 );
1652 });
1653
1654 // Changes are reported only to servers matching the buffer's language.
1655 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1656 rust_buffer2.update(cx, |buffer, cx| {
1657 buffer.edit([(0..0, "let x = 1;")], None, cx)
1658 });
1659 assert_eq!(
1660 fake_rust_server
1661 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1662 .await
1663 .text_document,
1664 lsp::VersionedTextDocumentIdentifier::new(
1665 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1666 1
1667 )
1668 );
1669
1670 // Save notifications are reported to all servers.
1671 project
1672 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1673 .await
1674 .unwrap();
1675 assert_eq!(
1676 fake_rust_server
1677 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1678 .await
1679 .text_document,
1680 lsp::TextDocumentIdentifier::new(
1681 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1682 )
1683 );
1684 assert_eq!(
1685 fake_json_server
1686 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1687 .await
1688 .text_document,
1689 lsp::TextDocumentIdentifier::new(
1690 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1691 )
1692 );
1693
1694 // Renames are reported only to servers matching the buffer's language.
1695 fs.rename(
1696 Path::new(path!("/dir/test2.rs")),
1697 Path::new(path!("/dir/test3.rs")),
1698 Default::default(),
1699 )
1700 .await
1701 .unwrap();
1702 assert_eq!(
1703 fake_rust_server
1704 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1705 .await
1706 .text_document,
1707 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1708 );
1709 assert_eq!(
1710 fake_rust_server
1711 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1712 .await
1713 .text_document,
1714 lsp::TextDocumentItem {
1715 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1716 version: 0,
1717 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1718 language_id: "rust".to_string(),
1719 },
1720 );
1721
1722 rust_buffer2.update(cx, |buffer, cx| {
1723 buffer.update_diagnostics(
1724 LanguageServerId(0),
1725 DiagnosticSet::from_sorted_entries(
1726 vec![DiagnosticEntry {
1727 diagnostic: Default::default(),
1728 range: Anchor::MIN..Anchor::MAX,
1729 }],
1730 &buffer.snapshot(),
1731 ),
1732 cx,
1733 );
1734 assert_eq!(
1735 buffer
1736 .snapshot()
1737 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1738 .count(),
1739 1
1740 );
1741 });
1742
1743 // When the rename changes the extension of the file, the buffer gets closed on the old
1744 // language server and gets opened on the new one.
1745 fs.rename(
1746 Path::new(path!("/dir/test3.rs")),
1747 Path::new(path!("/dir/test3.json")),
1748 Default::default(),
1749 )
1750 .await
1751 .unwrap();
1752 assert_eq!(
1753 fake_rust_server
1754 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1755 .await
1756 .text_document,
1757 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1758 );
1759 assert_eq!(
1760 fake_json_server
1761 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1762 .await
1763 .text_document,
1764 lsp::TextDocumentItem {
1765 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1766 version: 0,
1767 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1768 language_id: "json".to_string(),
1769 },
1770 );
1771
1772 // We clear the diagnostics, since the language has changed.
1773 rust_buffer2.update(cx, |buffer, _| {
1774 assert_eq!(
1775 buffer
1776 .snapshot()
1777 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1778 .count(),
1779 0
1780 );
1781 });
1782
1783 // The renamed file's version resets after changing language server.
1784 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1785 assert_eq!(
1786 fake_json_server
1787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1788 .await
1789 .text_document,
1790 lsp::VersionedTextDocumentIdentifier::new(
1791 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1792 1
1793 )
1794 );
1795
1796 // Restart language servers
1797 project.update(cx, |project, cx| {
1798 project.restart_language_servers_for_buffers(
1799 vec![rust_buffer.clone(), json_buffer.clone()],
1800 HashSet::default(),
1801 cx,
1802 );
1803 });
1804
1805 let mut rust_shutdown_requests = fake_rust_server
1806 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1807 let mut json_shutdown_requests = fake_json_server
1808 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1809 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1810
1811 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1812 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1813
1814 // Ensure rust document is reopened in new rust language server
1815 assert_eq!(
1816 fake_rust_server
1817 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1818 .await
1819 .text_document,
1820 lsp::TextDocumentItem {
1821 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1822 version: 0,
1823 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1824 language_id: "rust".to_string(),
1825 }
1826 );
1827
1828 // Ensure json documents are reopened in new json language server
1829 assert_set_eq!(
1830 [
1831 fake_json_server
1832 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1833 .await
1834 .text_document,
1835 fake_json_server
1836 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1837 .await
1838 .text_document,
1839 ],
1840 [
1841 lsp::TextDocumentItem {
1842 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1843 version: 0,
1844 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1845 language_id: "json".to_string(),
1846 },
1847 lsp::TextDocumentItem {
1848 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1849 version: 0,
1850 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1851 language_id: "json".to_string(),
1852 }
1853 ]
1854 );
1855
1856 // Close notifications are reported only to servers matching the buffer's language.
1857 cx.update(|_| drop(_json_handle));
1858 let close_message = lsp::DidCloseTextDocumentParams {
1859 text_document: lsp::TextDocumentIdentifier::new(
1860 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1861 ),
1862 };
1863 assert_eq!(
1864 fake_json_server
1865 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1866 .await,
1867 close_message,
1868 );
1869}
1870
1871#[gpui::test]
1872async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1873 init_test(cx);
1874
1875 let settings_json_contents = json!({
1876 "languages": {
1877 "Rust": {
1878 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1879 }
1880 },
1881 "lsp": {
1882 "my_fake_lsp": {
1883 "binary": {
1884 // file exists, so this is treated as a relative path
1885 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1886 }
1887 },
1888 "lsp_on_path": {
1889 "binary": {
1890 // file doesn't exist, so it will fall back on PATH env var
1891 "path": path!("lsp_on_path.exe").to_string(),
1892 }
1893 }
1894 },
1895 });
1896
1897 let fs = FakeFs::new(cx.executor());
1898 fs.insert_tree(
1899 path!("/the-root"),
1900 json!({
1901 ".zed": {
1902 "settings.json": settings_json_contents.to_string(),
1903 },
1904 ".relative_path": {
1905 "to": {
1906 "my_fake_lsp.exe": "",
1907 },
1908 },
1909 "src": {
1910 "main.rs": "",
1911 }
1912 }),
1913 )
1914 .await;
1915
1916 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1917 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1918 language_registry.add(rust_lang());
1919
1920 let mut my_fake_lsp = language_registry.register_fake_lsp(
1921 "Rust",
1922 FakeLspAdapter {
1923 name: "my_fake_lsp",
1924 ..Default::default()
1925 },
1926 );
1927 let mut lsp_on_path = language_registry.register_fake_lsp(
1928 "Rust",
1929 FakeLspAdapter {
1930 name: "lsp_on_path",
1931 ..Default::default()
1932 },
1933 );
1934
1935 cx.run_until_parked();
1936
1937 // Start the language server by opening a buffer with a compatible file extension.
1938 project
1939 .update(cx, |project, cx| {
1940 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1941 })
1942 .await
1943 .unwrap();
1944
1945 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1946 assert_eq!(
1947 lsp_path.to_string_lossy(),
1948 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1949 );
1950
1951 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1952 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1953}
1954
1955#[gpui::test]
1956async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1957 init_test(cx);
1958
1959 let settings_json_contents = json!({
1960 "languages": {
1961 "Rust": {
1962 "language_servers": ["tilde_lsp"]
1963 }
1964 },
1965 "lsp": {
1966 "tilde_lsp": {
1967 "binary": {
1968 "path": "~/.local/bin/rust-analyzer",
1969 }
1970 }
1971 },
1972 });
1973
1974 let fs = FakeFs::new(cx.executor());
1975 fs.insert_tree(
1976 path!("/root"),
1977 json!({
1978 ".zed": {
1979 "settings.json": settings_json_contents.to_string(),
1980 },
1981 "src": {
1982 "main.rs": "fn main() {}",
1983 }
1984 }),
1985 )
1986 .await;
1987
1988 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1989 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1990 language_registry.add(rust_lang());
1991
1992 let mut tilde_lsp = language_registry.register_fake_lsp(
1993 "Rust",
1994 FakeLspAdapter {
1995 name: "tilde_lsp",
1996 ..Default::default()
1997 },
1998 );
1999 cx.run_until_parked();
2000
2001 project
2002 .update(cx, |project, cx| {
2003 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2004 })
2005 .await
2006 .unwrap();
2007
2008 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2009 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2010 assert_eq!(
2011 lsp_path, expected_path,
2012 "Tilde path should expand to home directory"
2013 );
2014}
2015
2016#[gpui::test]
2017async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2018 init_test(cx);
2019
2020 let fs = FakeFs::new(cx.executor());
2021 fs.insert_tree(
2022 path!("/the-root"),
2023 json!({
2024 ".gitignore": "target\n",
2025 "Cargo.lock": "",
2026 "src": {
2027 "a.rs": "",
2028 "b.rs": "",
2029 },
2030 "target": {
2031 "x": {
2032 "out": {
2033 "x.rs": ""
2034 }
2035 },
2036 "y": {
2037 "out": {
2038 "y.rs": "",
2039 }
2040 },
2041 "z": {
2042 "out": {
2043 "z.rs": ""
2044 }
2045 }
2046 }
2047 }),
2048 )
2049 .await;
2050 fs.insert_tree(
2051 path!("/the-registry"),
2052 json!({
2053 "dep1": {
2054 "src": {
2055 "dep1.rs": "",
2056 }
2057 },
2058 "dep2": {
2059 "src": {
2060 "dep2.rs": "",
2061 }
2062 },
2063 }),
2064 )
2065 .await;
2066 fs.insert_tree(
2067 path!("/the/stdlib"),
2068 json!({
2069 "LICENSE": "",
2070 "src": {
2071 "string.rs": "",
2072 }
2073 }),
2074 )
2075 .await;
2076
2077 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2078 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2079 (project.languages().clone(), project.lsp_store())
2080 });
2081 language_registry.add(rust_lang());
2082 let mut fake_servers = language_registry.register_fake_lsp(
2083 "Rust",
2084 FakeLspAdapter {
2085 name: "the-language-server",
2086 ..Default::default()
2087 },
2088 );
2089
2090 cx.executor().run_until_parked();
2091
2092 // Start the language server by opening a buffer with a compatible file extension.
2093 project
2094 .update(cx, |project, cx| {
2095 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2096 })
2097 .await
2098 .unwrap();
2099
2100 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2101 project.update(cx, |project, cx| {
2102 let worktree = project.worktrees(cx).next().unwrap();
2103 assert_eq!(
2104 worktree
2105 .read(cx)
2106 .snapshot()
2107 .entries(true, 0)
2108 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2109 .collect::<Vec<_>>(),
2110 &[
2111 ("", false),
2112 (".gitignore", false),
2113 ("Cargo.lock", false),
2114 ("src", false),
2115 ("src/a.rs", false),
2116 ("src/b.rs", false),
2117 ("target", true),
2118 ]
2119 );
2120 });
2121
2122 let prev_read_dir_count = fs.read_dir_call_count();
2123
2124 let fake_server = fake_servers.next().await.unwrap();
2125 cx.executor().run_until_parked();
2126 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2127 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2128 id
2129 });
2130
2131 // Simulate jumping to a definition in a dependency outside of the worktree.
2132 let _out_of_worktree_buffer = project
2133 .update(cx, |project, cx| {
2134 project.open_local_buffer_via_lsp(
2135 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2136 server_id,
2137 cx,
2138 )
2139 })
2140 .await
2141 .unwrap();
2142
2143 // Keep track of the FS events reported to the language server.
2144 let file_changes = Arc::new(Mutex::new(Vec::new()));
2145 fake_server
2146 .request::<lsp::request::RegisterCapability>(
2147 lsp::RegistrationParams {
2148 registrations: vec![lsp::Registration {
2149 id: Default::default(),
2150 method: "workspace/didChangeWatchedFiles".to_string(),
2151 register_options: serde_json::to_value(
2152 lsp::DidChangeWatchedFilesRegistrationOptions {
2153 watchers: vec![
2154 lsp::FileSystemWatcher {
2155 glob_pattern: lsp::GlobPattern::String(
2156 path!("/the-root/Cargo.toml").to_string(),
2157 ),
2158 kind: None,
2159 },
2160 lsp::FileSystemWatcher {
2161 glob_pattern: lsp::GlobPattern::String(
2162 path!("/the-root/src/*.{rs,c}").to_string(),
2163 ),
2164 kind: None,
2165 },
2166 lsp::FileSystemWatcher {
2167 glob_pattern: lsp::GlobPattern::String(
2168 path!("/the-root/target/y/**/*.rs").to_string(),
2169 ),
2170 kind: None,
2171 },
2172 lsp::FileSystemWatcher {
2173 glob_pattern: lsp::GlobPattern::String(
2174 path!("/the/stdlib/src/**/*.rs").to_string(),
2175 ),
2176 kind: None,
2177 },
2178 lsp::FileSystemWatcher {
2179 glob_pattern: lsp::GlobPattern::String(
2180 path!("**/Cargo.lock").to_string(),
2181 ),
2182 kind: None,
2183 },
2184 ],
2185 },
2186 )
2187 .ok(),
2188 }],
2189 },
2190 DEFAULT_LSP_REQUEST_TIMEOUT,
2191 )
2192 .await
2193 .into_response()
2194 .unwrap();
2195 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2196 let file_changes = file_changes.clone();
2197 move |params, _| {
2198 let mut file_changes = file_changes.lock();
2199 file_changes.extend(params.changes);
2200 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2201 }
2202 });
2203
2204 cx.executor().run_until_parked();
2205 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2206 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2207
2208 let mut new_watched_paths = fs.watched_paths();
2209 new_watched_paths.retain(|path| {
2210 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2211 });
2212 assert_eq!(
2213 &new_watched_paths,
2214 &[
2215 Path::new(path!("/the-root")),
2216 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2217 Path::new(path!("/the/stdlib/src"))
2218 ]
2219 );
2220
2221 // Now the language server has asked us to watch an ignored directory path,
2222 // so we recursively load it.
2223 project.update(cx, |project, cx| {
2224 let worktree = project.visible_worktrees(cx).next().unwrap();
2225 assert_eq!(
2226 worktree
2227 .read(cx)
2228 .snapshot()
2229 .entries(true, 0)
2230 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2231 .collect::<Vec<_>>(),
2232 &[
2233 ("", false),
2234 (".gitignore", false),
2235 ("Cargo.lock", false),
2236 ("src", false),
2237 ("src/a.rs", false),
2238 ("src/b.rs", false),
2239 ("target", true),
2240 ("target/x", true),
2241 ("target/y", true),
2242 ("target/y/out", true),
2243 ("target/y/out/y.rs", true),
2244 ("target/z", true),
2245 ]
2246 );
2247 });
2248
2249 // Perform some file system mutations, two of which match the watched patterns,
2250 // and one of which does not.
2251 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2252 .await
2253 .unwrap();
2254 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2255 .await
2256 .unwrap();
2257 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2258 .await
2259 .unwrap();
2260 fs.create_file(
2261 path!("/the-root/target/x/out/x2.rs").as_ref(),
2262 Default::default(),
2263 )
2264 .await
2265 .unwrap();
2266 fs.create_file(
2267 path!("/the-root/target/y/out/y2.rs").as_ref(),
2268 Default::default(),
2269 )
2270 .await
2271 .unwrap();
2272 fs.save(
2273 path!("/the-root/Cargo.lock").as_ref(),
2274 &"".into(),
2275 Default::default(),
2276 )
2277 .await
2278 .unwrap();
2279 fs.save(
2280 path!("/the-stdlib/LICENSE").as_ref(),
2281 &"".into(),
2282 Default::default(),
2283 )
2284 .await
2285 .unwrap();
2286 fs.save(
2287 path!("/the/stdlib/src/string.rs").as_ref(),
2288 &"".into(),
2289 Default::default(),
2290 )
2291 .await
2292 .unwrap();
2293
2294 // The language server receives events for the FS mutations that match its watch patterns.
2295 cx.executor().run_until_parked();
2296 assert_eq!(
2297 &*file_changes.lock(),
2298 &[
2299 lsp::FileEvent {
2300 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2301 typ: lsp::FileChangeType::CHANGED,
2302 },
2303 lsp::FileEvent {
2304 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2305 typ: lsp::FileChangeType::DELETED,
2306 },
2307 lsp::FileEvent {
2308 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2309 typ: lsp::FileChangeType::CREATED,
2310 },
2311 lsp::FileEvent {
2312 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2313 typ: lsp::FileChangeType::CREATED,
2314 },
2315 lsp::FileEvent {
2316 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2317 typ: lsp::FileChangeType::CHANGED,
2318 },
2319 ]
2320 );
2321}
2322
2323#[gpui::test]
2324async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2325 init_test(cx);
2326
2327 let fs = FakeFs::new(cx.executor());
2328 fs.insert_tree(
2329 path!("/dir"),
2330 json!({
2331 "a.rs": "let a = 1;",
2332 "b.rs": "let b = 2;"
2333 }),
2334 )
2335 .await;
2336
2337 let project = Project::test(
2338 fs,
2339 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2340 cx,
2341 )
2342 .await;
2343 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2344
2345 let buffer_a = project
2346 .update(cx, |project, cx| {
2347 project.open_local_buffer(path!("/dir/a.rs"), cx)
2348 })
2349 .await
2350 .unwrap();
2351 let buffer_b = project
2352 .update(cx, |project, cx| {
2353 project.open_local_buffer(path!("/dir/b.rs"), cx)
2354 })
2355 .await
2356 .unwrap();
2357
2358 lsp_store.update(cx, |lsp_store, cx| {
2359 lsp_store
2360 .update_diagnostics(
2361 LanguageServerId(0),
2362 lsp::PublishDiagnosticsParams {
2363 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2364 version: None,
2365 diagnostics: vec![lsp::Diagnostic {
2366 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2367 severity: Some(lsp::DiagnosticSeverity::ERROR),
2368 message: "error 1".to_string(),
2369 ..Default::default()
2370 }],
2371 },
2372 None,
2373 DiagnosticSourceKind::Pushed,
2374 &[],
2375 cx,
2376 )
2377 .unwrap();
2378 lsp_store
2379 .update_diagnostics(
2380 LanguageServerId(0),
2381 lsp::PublishDiagnosticsParams {
2382 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2383 version: None,
2384 diagnostics: vec![lsp::Diagnostic {
2385 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2386 severity: Some(DiagnosticSeverity::WARNING),
2387 message: "error 2".to_string(),
2388 ..Default::default()
2389 }],
2390 },
2391 None,
2392 DiagnosticSourceKind::Pushed,
2393 &[],
2394 cx,
2395 )
2396 .unwrap();
2397 });
2398
2399 buffer_a.update(cx, |buffer, _| {
2400 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2401 assert_eq!(
2402 chunks
2403 .iter()
2404 .map(|(s, d)| (s.as_str(), *d))
2405 .collect::<Vec<_>>(),
2406 &[
2407 ("let ", None),
2408 ("a", Some(DiagnosticSeverity::ERROR)),
2409 (" = 1;", None),
2410 ]
2411 );
2412 });
2413 buffer_b.update(cx, |buffer, _| {
2414 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2415 assert_eq!(
2416 chunks
2417 .iter()
2418 .map(|(s, d)| (s.as_str(), *d))
2419 .collect::<Vec<_>>(),
2420 &[
2421 ("let ", None),
2422 ("b", Some(DiagnosticSeverity::WARNING)),
2423 (" = 2;", None),
2424 ]
2425 );
2426 });
2427}
2428
2429#[gpui::test]
2430async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2431 init_test(cx);
2432
2433 let fs = FakeFs::new(cx.executor());
2434 fs.insert_tree(
2435 path!("/root"),
2436 json!({
2437 "dir": {
2438 ".git": {
2439 "HEAD": "ref: refs/heads/main",
2440 },
2441 ".gitignore": "b.rs",
2442 "a.rs": "let a = 1;",
2443 "b.rs": "let b = 2;",
2444 },
2445 "other.rs": "let b = c;"
2446 }),
2447 )
2448 .await;
2449
2450 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2451 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2452 let (worktree, _) = project
2453 .update(cx, |project, cx| {
2454 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2455 })
2456 .await
2457 .unwrap();
2458 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2459
2460 let (worktree, _) = project
2461 .update(cx, |project, cx| {
2462 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2463 })
2464 .await
2465 .unwrap();
2466 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2467
2468 let server_id = LanguageServerId(0);
2469 lsp_store.update(cx, |lsp_store, cx| {
2470 lsp_store
2471 .update_diagnostics(
2472 server_id,
2473 lsp::PublishDiagnosticsParams {
2474 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2475 version: None,
2476 diagnostics: vec![lsp::Diagnostic {
2477 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2478 severity: Some(lsp::DiagnosticSeverity::ERROR),
2479 message: "unused variable 'b'".to_string(),
2480 ..Default::default()
2481 }],
2482 },
2483 None,
2484 DiagnosticSourceKind::Pushed,
2485 &[],
2486 cx,
2487 )
2488 .unwrap();
2489 lsp_store
2490 .update_diagnostics(
2491 server_id,
2492 lsp::PublishDiagnosticsParams {
2493 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2494 version: None,
2495 diagnostics: vec![lsp::Diagnostic {
2496 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2497 severity: Some(lsp::DiagnosticSeverity::ERROR),
2498 message: "unknown variable 'c'".to_string(),
2499 ..Default::default()
2500 }],
2501 },
2502 None,
2503 DiagnosticSourceKind::Pushed,
2504 &[],
2505 cx,
2506 )
2507 .unwrap();
2508 });
2509
2510 let main_ignored_buffer = project
2511 .update(cx, |project, cx| {
2512 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2513 })
2514 .await
2515 .unwrap();
2516 main_ignored_buffer.update(cx, |buffer, _| {
2517 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2518 assert_eq!(
2519 chunks
2520 .iter()
2521 .map(|(s, d)| (s.as_str(), *d))
2522 .collect::<Vec<_>>(),
2523 &[
2524 ("let ", None),
2525 ("b", Some(DiagnosticSeverity::ERROR)),
2526 (" = 2;", None),
2527 ],
2528 "Gigitnored buffers should still get in-buffer diagnostics",
2529 );
2530 });
2531 let other_buffer = project
2532 .update(cx, |project, cx| {
2533 project.open_buffer((other_worktree_id, rel_path("")), cx)
2534 })
2535 .await
2536 .unwrap();
2537 other_buffer.update(cx, |buffer, _| {
2538 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2539 assert_eq!(
2540 chunks
2541 .iter()
2542 .map(|(s, d)| (s.as_str(), *d))
2543 .collect::<Vec<_>>(),
2544 &[
2545 ("let b = ", None),
2546 ("c", Some(DiagnosticSeverity::ERROR)),
2547 (";", None),
2548 ],
2549 "Buffers from hidden projects should still get in-buffer diagnostics"
2550 );
2551 });
2552
2553 project.update(cx, |project, cx| {
2554 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2555 assert_eq!(
2556 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2557 vec![(
2558 ProjectPath {
2559 worktree_id: main_worktree_id,
2560 path: rel_path("b.rs").into(),
2561 },
2562 server_id,
2563 DiagnosticSummary {
2564 error_count: 1,
2565 warning_count: 0,
2566 }
2567 )]
2568 );
2569 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2570 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2571 });
2572}
2573
2574#[gpui::test]
2575async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2576 init_test(cx);
2577
2578 let progress_token = "the-progress-token";
2579
2580 let fs = FakeFs::new(cx.executor());
2581 fs.insert_tree(
2582 path!("/dir"),
2583 json!({
2584 "a.rs": "fn a() { A }",
2585 "b.rs": "const y: i32 = 1",
2586 }),
2587 )
2588 .await;
2589
2590 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2591 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2592
2593 language_registry.add(rust_lang());
2594 let mut fake_servers = language_registry.register_fake_lsp(
2595 "Rust",
2596 FakeLspAdapter {
2597 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2598 disk_based_diagnostics_sources: vec!["disk".into()],
2599 ..Default::default()
2600 },
2601 );
2602
2603 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2604
2605 // Cause worktree to start the fake language server
2606 let _ = project
2607 .update(cx, |project, cx| {
2608 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2609 })
2610 .await
2611 .unwrap();
2612
2613 let mut events = cx.events(&project);
2614
2615 let fake_server = fake_servers.next().await.unwrap();
2616 assert_eq!(
2617 events.next().await.unwrap(),
2618 Event::LanguageServerAdded(
2619 LanguageServerId(0),
2620 fake_server.server.name(),
2621 Some(worktree_id)
2622 ),
2623 );
2624
2625 fake_server
2626 .start_progress(format!("{}/0", progress_token))
2627 .await;
2628 assert_eq!(
2629 events.next().await.unwrap(),
2630 Event::DiskBasedDiagnosticsStarted {
2631 language_server_id: LanguageServerId(0),
2632 }
2633 );
2634
2635 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2636 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2637 version: None,
2638 diagnostics: vec![lsp::Diagnostic {
2639 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2640 severity: Some(lsp::DiagnosticSeverity::ERROR),
2641 message: "undefined variable 'A'".to_string(),
2642 ..Default::default()
2643 }],
2644 });
2645 assert_eq!(
2646 events.next().await.unwrap(),
2647 Event::DiagnosticsUpdated {
2648 language_server_id: LanguageServerId(0),
2649 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2650 }
2651 );
2652
2653 fake_server.end_progress(format!("{}/0", progress_token));
2654 assert_eq!(
2655 events.next().await.unwrap(),
2656 Event::DiskBasedDiagnosticsFinished {
2657 language_server_id: LanguageServerId(0)
2658 }
2659 );
2660
2661 let buffer = project
2662 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2663 .await
2664 .unwrap();
2665
2666 buffer.update(cx, |buffer, _| {
2667 let snapshot = buffer.snapshot();
2668 let diagnostics = snapshot
2669 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2670 .collect::<Vec<_>>();
2671 assert_eq!(
2672 diagnostics,
2673 &[DiagnosticEntryRef {
2674 range: Point::new(0, 9)..Point::new(0, 10),
2675 diagnostic: &Diagnostic {
2676 severity: lsp::DiagnosticSeverity::ERROR,
2677 message: "undefined variable 'A'".to_string(),
2678 group_id: 0,
2679 is_primary: true,
2680 source_kind: DiagnosticSourceKind::Pushed,
2681 ..Diagnostic::default()
2682 }
2683 }]
2684 )
2685 });
2686
2687 // Ensure publishing empty diagnostics twice only results in one update event.
2688 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2689 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2690 version: None,
2691 diagnostics: Default::default(),
2692 });
2693 assert_eq!(
2694 events.next().await.unwrap(),
2695 Event::DiagnosticsUpdated {
2696 language_server_id: LanguageServerId(0),
2697 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2698 }
2699 );
2700
2701 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2702 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2703 version: None,
2704 diagnostics: Default::default(),
2705 });
2706 cx.executor().run_until_parked();
2707 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2708}
2709
2710#[gpui::test]
2711async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2712 init_test(cx);
2713
2714 let progress_token = "the-progress-token";
2715
2716 let fs = FakeFs::new(cx.executor());
2717 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2718
2719 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2720
2721 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2722 language_registry.add(rust_lang());
2723 let mut fake_servers = language_registry.register_fake_lsp(
2724 "Rust",
2725 FakeLspAdapter {
2726 name: "the-language-server",
2727 disk_based_diagnostics_sources: vec!["disk".into()],
2728 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2729 ..FakeLspAdapter::default()
2730 },
2731 );
2732
2733 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2734
2735 let (buffer, _handle) = project
2736 .update(cx, |project, cx| {
2737 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2738 })
2739 .await
2740 .unwrap();
2741 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2742 // Simulate diagnostics starting to update.
2743 let fake_server = fake_servers.next().await.unwrap();
2744 cx.executor().run_until_parked();
2745 fake_server.start_progress(progress_token).await;
2746
2747 // Restart the server before the diagnostics finish updating.
2748 project.update(cx, |project, cx| {
2749 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2750 });
2751 let mut events = cx.events(&project);
2752
2753 // Simulate the newly started server sending more diagnostics.
2754 let fake_server = fake_servers.next().await.unwrap();
2755 cx.executor().run_until_parked();
2756 assert_eq!(
2757 events.next().await.unwrap(),
2758 Event::LanguageServerRemoved(LanguageServerId(0))
2759 );
2760 assert_eq!(
2761 events.next().await.unwrap(),
2762 Event::LanguageServerAdded(
2763 LanguageServerId(1),
2764 fake_server.server.name(),
2765 Some(worktree_id)
2766 )
2767 );
2768 fake_server.start_progress(progress_token).await;
2769 assert_eq!(
2770 events.next().await.unwrap(),
2771 Event::LanguageServerBufferRegistered {
2772 server_id: LanguageServerId(1),
2773 buffer_id,
2774 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2775 name: Some(fake_server.server.name())
2776 }
2777 );
2778 assert_eq!(
2779 events.next().await.unwrap(),
2780 Event::DiskBasedDiagnosticsStarted {
2781 language_server_id: LanguageServerId(1)
2782 }
2783 );
2784 project.update(cx, |project, cx| {
2785 assert_eq!(
2786 project
2787 .language_servers_running_disk_based_diagnostics(cx)
2788 .collect::<Vec<_>>(),
2789 [LanguageServerId(1)]
2790 );
2791 });
2792
2793 // All diagnostics are considered done, despite the old server's diagnostic
2794 // task never completing.
2795 fake_server.end_progress(progress_token);
2796 assert_eq!(
2797 events.next().await.unwrap(),
2798 Event::DiskBasedDiagnosticsFinished {
2799 language_server_id: LanguageServerId(1)
2800 }
2801 );
2802 project.update(cx, |project, cx| {
2803 assert_eq!(
2804 project
2805 .language_servers_running_disk_based_diagnostics(cx)
2806 .collect::<Vec<_>>(),
2807 [] as [language::LanguageServerId; 0]
2808 );
2809 });
2810}
2811
2812#[gpui::test]
2813async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2814 init_test(cx);
2815
2816 let fs = FakeFs::new(cx.executor());
2817 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2818
2819 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2820
2821 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2822 language_registry.add(rust_lang());
2823 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2824
2825 let (buffer, _) = project
2826 .update(cx, |project, cx| {
2827 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2828 })
2829 .await
2830 .unwrap();
2831
2832 // Publish diagnostics
2833 let fake_server = fake_servers.next().await.unwrap();
2834 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2835 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2836 version: None,
2837 diagnostics: vec![lsp::Diagnostic {
2838 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2839 severity: Some(lsp::DiagnosticSeverity::ERROR),
2840 message: "the message".to_string(),
2841 ..Default::default()
2842 }],
2843 });
2844
2845 cx.executor().run_until_parked();
2846 buffer.update(cx, |buffer, _| {
2847 assert_eq!(
2848 buffer
2849 .snapshot()
2850 .diagnostics_in_range::<_, usize>(0..1, false)
2851 .map(|entry| entry.diagnostic.message.clone())
2852 .collect::<Vec<_>>(),
2853 ["the message".to_string()]
2854 );
2855 });
2856 project.update(cx, |project, cx| {
2857 assert_eq!(
2858 project.diagnostic_summary(false, cx),
2859 DiagnosticSummary {
2860 error_count: 1,
2861 warning_count: 0,
2862 }
2863 );
2864 });
2865
2866 project.update(cx, |project, cx| {
2867 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2868 });
2869
2870 // The diagnostics are cleared.
2871 cx.executor().run_until_parked();
2872 buffer.update(cx, |buffer, _| {
2873 assert_eq!(
2874 buffer
2875 .snapshot()
2876 .diagnostics_in_range::<_, usize>(0..1, false)
2877 .map(|entry| entry.diagnostic.message.clone())
2878 .collect::<Vec<_>>(),
2879 Vec::<String>::new(),
2880 );
2881 });
2882 project.update(cx, |project, cx| {
2883 assert_eq!(
2884 project.diagnostic_summary(false, cx),
2885 DiagnosticSummary {
2886 error_count: 0,
2887 warning_count: 0,
2888 }
2889 );
2890 });
2891}
2892
2893#[gpui::test]
2894async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2895 init_test(cx);
2896
2897 let fs = FakeFs::new(cx.executor());
2898 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2899
2900 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2902
2903 language_registry.add(rust_lang());
2904 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2905
2906 let (buffer, _handle) = project
2907 .update(cx, |project, cx| {
2908 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2909 })
2910 .await
2911 .unwrap();
2912
2913 // Before restarting the server, report diagnostics with an unknown buffer version.
2914 let fake_server = fake_servers.next().await.unwrap();
2915 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2916 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2917 version: Some(10000),
2918 diagnostics: Vec::new(),
2919 });
2920 cx.executor().run_until_parked();
2921 project.update(cx, |project, cx| {
2922 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2923 });
2924
2925 let mut fake_server = fake_servers.next().await.unwrap();
2926 let notification = fake_server
2927 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2928 .await
2929 .text_document;
2930 assert_eq!(notification.version, 0);
2931}
2932
2933#[gpui::test]
2934async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2935 init_test(cx);
2936
2937 let progress_token = "the-progress-token";
2938
2939 let fs = FakeFs::new(cx.executor());
2940 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2941
2942 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2943
2944 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2945 language_registry.add(rust_lang());
2946 let mut fake_servers = language_registry.register_fake_lsp(
2947 "Rust",
2948 FakeLspAdapter {
2949 name: "the-language-server",
2950 disk_based_diagnostics_sources: vec!["disk".into()],
2951 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2952 ..Default::default()
2953 },
2954 );
2955
2956 let (buffer, _handle) = project
2957 .update(cx, |project, cx| {
2958 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2959 })
2960 .await
2961 .unwrap();
2962
2963 // Simulate diagnostics starting to update.
2964 let mut fake_server = fake_servers.next().await.unwrap();
2965 fake_server
2966 .start_progress_with(
2967 "another-token",
2968 lsp::WorkDoneProgressBegin {
2969 cancellable: Some(false),
2970 ..Default::default()
2971 },
2972 DEFAULT_LSP_REQUEST_TIMEOUT,
2973 )
2974 .await;
2975 // Ensure progress notification is fully processed before starting the next one
2976 cx.executor().run_until_parked();
2977
2978 fake_server
2979 .start_progress_with(
2980 progress_token,
2981 lsp::WorkDoneProgressBegin {
2982 cancellable: Some(true),
2983 ..Default::default()
2984 },
2985 DEFAULT_LSP_REQUEST_TIMEOUT,
2986 )
2987 .await;
2988 // Ensure progress notification is fully processed before cancelling
2989 cx.executor().run_until_parked();
2990
2991 project.update(cx, |project, cx| {
2992 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2993 });
2994 cx.executor().run_until_parked();
2995
2996 let cancel_notification = fake_server
2997 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2998 .await;
2999 assert_eq!(
3000 cancel_notification.token,
3001 NumberOrString::String(progress_token.into())
3002 );
3003}
3004
3005#[gpui::test]
3006async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3007 init_test(cx);
3008
3009 let fs = FakeFs::new(cx.executor());
3010 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3011 .await;
3012
3013 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3014 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3015
3016 let mut fake_rust_servers = language_registry.register_fake_lsp(
3017 "Rust",
3018 FakeLspAdapter {
3019 name: "rust-lsp",
3020 ..Default::default()
3021 },
3022 );
3023 let mut fake_js_servers = language_registry.register_fake_lsp(
3024 "JavaScript",
3025 FakeLspAdapter {
3026 name: "js-lsp",
3027 ..Default::default()
3028 },
3029 );
3030 language_registry.add(rust_lang());
3031 language_registry.add(js_lang());
3032
3033 let _rs_buffer = project
3034 .update(cx, |project, cx| {
3035 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3036 })
3037 .await
3038 .unwrap();
3039 let _js_buffer = project
3040 .update(cx, |project, cx| {
3041 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3042 })
3043 .await
3044 .unwrap();
3045
3046 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3047 assert_eq!(
3048 fake_rust_server_1
3049 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3050 .await
3051 .text_document
3052 .uri
3053 .as_str(),
3054 uri!("file:///dir/a.rs")
3055 );
3056
3057 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3058 assert_eq!(
3059 fake_js_server
3060 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3061 .await
3062 .text_document
3063 .uri
3064 .as_str(),
3065 uri!("file:///dir/b.js")
3066 );
3067
3068 // Disable Rust language server, ensuring only that server gets stopped.
3069 cx.update(|cx| {
3070 SettingsStore::update_global(cx, |settings, cx| {
3071 settings.update_user_settings(cx, |settings| {
3072 settings.languages_mut().insert(
3073 "Rust".into(),
3074 LanguageSettingsContent {
3075 enable_language_server: Some(false),
3076 ..Default::default()
3077 },
3078 );
3079 });
3080 })
3081 });
3082 fake_rust_server_1
3083 .receive_notification::<lsp::notification::Exit>()
3084 .await;
3085
3086 // Enable Rust and disable JavaScript language servers, ensuring that the
3087 // former gets started again and that the latter stops.
3088 cx.update(|cx| {
3089 SettingsStore::update_global(cx, |settings, cx| {
3090 settings.update_user_settings(cx, |settings| {
3091 settings.languages_mut().insert(
3092 "Rust".into(),
3093 LanguageSettingsContent {
3094 enable_language_server: Some(true),
3095 ..Default::default()
3096 },
3097 );
3098 settings.languages_mut().insert(
3099 "JavaScript".into(),
3100 LanguageSettingsContent {
3101 enable_language_server: Some(false),
3102 ..Default::default()
3103 },
3104 );
3105 });
3106 })
3107 });
3108 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3109 assert_eq!(
3110 fake_rust_server_2
3111 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3112 .await
3113 .text_document
3114 .uri
3115 .as_str(),
3116 uri!("file:///dir/a.rs")
3117 );
3118 fake_js_server
3119 .receive_notification::<lsp::notification::Exit>()
3120 .await;
3121}
3122
3123#[gpui::test(iterations = 3)]
3124async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3125 init_test(cx);
3126
3127 let text = "
3128 fn a() { A }
3129 fn b() { BB }
3130 fn c() { CCC }
3131 "
3132 .unindent();
3133
3134 let fs = FakeFs::new(cx.executor());
3135 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3136
3137 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3138 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3139
3140 language_registry.add(rust_lang());
3141 let mut fake_servers = language_registry.register_fake_lsp(
3142 "Rust",
3143 FakeLspAdapter {
3144 disk_based_diagnostics_sources: vec!["disk".into()],
3145 ..Default::default()
3146 },
3147 );
3148
3149 let buffer = project
3150 .update(cx, |project, cx| {
3151 project.open_local_buffer(path!("/dir/a.rs"), cx)
3152 })
3153 .await
3154 .unwrap();
3155
3156 let _handle = project.update(cx, |project, cx| {
3157 project.register_buffer_with_language_servers(&buffer, cx)
3158 });
3159
3160 let mut fake_server = fake_servers.next().await.unwrap();
3161 let open_notification = fake_server
3162 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3163 .await;
3164
3165 // Edit the buffer, moving the content down
3166 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3167 let change_notification_1 = fake_server
3168 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3169 .await;
3170 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3171
3172 // Report some diagnostics for the initial version of the buffer
3173 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3174 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3175 version: Some(open_notification.text_document.version),
3176 diagnostics: vec![
3177 lsp::Diagnostic {
3178 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3179 severity: Some(DiagnosticSeverity::ERROR),
3180 message: "undefined variable 'A'".to_string(),
3181 source: Some("disk".to_string()),
3182 ..Default::default()
3183 },
3184 lsp::Diagnostic {
3185 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3186 severity: Some(DiagnosticSeverity::ERROR),
3187 message: "undefined variable 'BB'".to_string(),
3188 source: Some("disk".to_string()),
3189 ..Default::default()
3190 },
3191 lsp::Diagnostic {
3192 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3193 severity: Some(DiagnosticSeverity::ERROR),
3194 source: Some("disk".to_string()),
3195 message: "undefined variable 'CCC'".to_string(),
3196 ..Default::default()
3197 },
3198 ],
3199 });
3200
3201 // The diagnostics have moved down since they were created.
3202 cx.executor().run_until_parked();
3203 buffer.update(cx, |buffer, _| {
3204 assert_eq!(
3205 buffer
3206 .snapshot()
3207 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3208 .collect::<Vec<_>>(),
3209 &[
3210 DiagnosticEntry {
3211 range: Point::new(3, 9)..Point::new(3, 11),
3212 diagnostic: Diagnostic {
3213 source: Some("disk".into()),
3214 severity: DiagnosticSeverity::ERROR,
3215 message: "undefined variable 'BB'".to_string(),
3216 is_disk_based: true,
3217 group_id: 1,
3218 is_primary: true,
3219 source_kind: DiagnosticSourceKind::Pushed,
3220 ..Diagnostic::default()
3221 },
3222 },
3223 DiagnosticEntry {
3224 range: Point::new(4, 9)..Point::new(4, 12),
3225 diagnostic: Diagnostic {
3226 source: Some("disk".into()),
3227 severity: DiagnosticSeverity::ERROR,
3228 message: "undefined variable 'CCC'".to_string(),
3229 is_disk_based: true,
3230 group_id: 2,
3231 is_primary: true,
3232 source_kind: DiagnosticSourceKind::Pushed,
3233 ..Diagnostic::default()
3234 }
3235 }
3236 ]
3237 );
3238 assert_eq!(
3239 chunks_with_diagnostics(buffer, 0..buffer.len()),
3240 [
3241 ("\n\nfn a() { ".to_string(), None),
3242 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3243 (" }\nfn b() { ".to_string(), None),
3244 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3245 (" }\nfn c() { ".to_string(), None),
3246 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3247 (" }\n".to_string(), None),
3248 ]
3249 );
3250 assert_eq!(
3251 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3252 [
3253 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3254 (" }\nfn c() { ".to_string(), None),
3255 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3256 ]
3257 );
3258 });
3259
3260 // Ensure overlapping diagnostics are highlighted correctly.
3261 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3262 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3263 version: Some(open_notification.text_document.version),
3264 diagnostics: vec![
3265 lsp::Diagnostic {
3266 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3267 severity: Some(DiagnosticSeverity::ERROR),
3268 message: "undefined variable 'A'".to_string(),
3269 source: Some("disk".to_string()),
3270 ..Default::default()
3271 },
3272 lsp::Diagnostic {
3273 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3274 severity: Some(DiagnosticSeverity::WARNING),
3275 message: "unreachable statement".to_string(),
3276 source: Some("disk".to_string()),
3277 ..Default::default()
3278 },
3279 ],
3280 });
3281
3282 cx.executor().run_until_parked();
3283 buffer.update(cx, |buffer, _| {
3284 assert_eq!(
3285 buffer
3286 .snapshot()
3287 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3288 .collect::<Vec<_>>(),
3289 &[
3290 DiagnosticEntry {
3291 range: Point::new(2, 9)..Point::new(2, 12),
3292 diagnostic: Diagnostic {
3293 source: Some("disk".into()),
3294 severity: DiagnosticSeverity::WARNING,
3295 message: "unreachable statement".to_string(),
3296 is_disk_based: true,
3297 group_id: 4,
3298 is_primary: true,
3299 source_kind: DiagnosticSourceKind::Pushed,
3300 ..Diagnostic::default()
3301 }
3302 },
3303 DiagnosticEntry {
3304 range: Point::new(2, 9)..Point::new(2, 10),
3305 diagnostic: Diagnostic {
3306 source: Some("disk".into()),
3307 severity: DiagnosticSeverity::ERROR,
3308 message: "undefined variable 'A'".to_string(),
3309 is_disk_based: true,
3310 group_id: 3,
3311 is_primary: true,
3312 source_kind: DiagnosticSourceKind::Pushed,
3313 ..Diagnostic::default()
3314 },
3315 }
3316 ]
3317 );
3318 assert_eq!(
3319 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3320 [
3321 ("fn a() { ".to_string(), None),
3322 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3323 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3324 ("\n".to_string(), None),
3325 ]
3326 );
3327 assert_eq!(
3328 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3329 [
3330 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3331 ("\n".to_string(), None),
3332 ]
3333 );
3334 });
3335
3336 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3337 // changes since the last save.
3338 buffer.update(cx, |buffer, cx| {
3339 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3340 buffer.edit(
3341 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3342 None,
3343 cx,
3344 );
3345 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3346 });
3347 let change_notification_2 = fake_server
3348 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3349 .await;
3350 assert!(
3351 change_notification_2.text_document.version > change_notification_1.text_document.version
3352 );
3353
3354 // Handle out-of-order diagnostics
3355 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3356 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3357 version: Some(change_notification_2.text_document.version),
3358 diagnostics: vec![
3359 lsp::Diagnostic {
3360 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3361 severity: Some(DiagnosticSeverity::ERROR),
3362 message: "undefined variable 'BB'".to_string(),
3363 source: Some("disk".to_string()),
3364 ..Default::default()
3365 },
3366 lsp::Diagnostic {
3367 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3368 severity: Some(DiagnosticSeverity::WARNING),
3369 message: "undefined variable 'A'".to_string(),
3370 source: Some("disk".to_string()),
3371 ..Default::default()
3372 },
3373 ],
3374 });
3375
3376 cx.executor().run_until_parked();
3377 buffer.update(cx, |buffer, _| {
3378 assert_eq!(
3379 buffer
3380 .snapshot()
3381 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3382 .collect::<Vec<_>>(),
3383 &[
3384 DiagnosticEntry {
3385 range: Point::new(2, 21)..Point::new(2, 22),
3386 diagnostic: Diagnostic {
3387 source: Some("disk".into()),
3388 severity: DiagnosticSeverity::WARNING,
3389 message: "undefined variable 'A'".to_string(),
3390 is_disk_based: true,
3391 group_id: 6,
3392 is_primary: true,
3393 source_kind: DiagnosticSourceKind::Pushed,
3394 ..Diagnostic::default()
3395 }
3396 },
3397 DiagnosticEntry {
3398 range: Point::new(3, 9)..Point::new(3, 14),
3399 diagnostic: Diagnostic {
3400 source: Some("disk".into()),
3401 severity: DiagnosticSeverity::ERROR,
3402 message: "undefined variable 'BB'".to_string(),
3403 is_disk_based: true,
3404 group_id: 5,
3405 is_primary: true,
3406 source_kind: DiagnosticSourceKind::Pushed,
3407 ..Diagnostic::default()
3408 },
3409 }
3410 ]
3411 );
3412 });
3413}
3414
3415#[gpui::test]
3416async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3417 init_test(cx);
3418
3419 let text = concat!(
3420 "let one = ;\n", //
3421 "let two = \n",
3422 "let three = 3;\n",
3423 );
3424
3425 let fs = FakeFs::new(cx.executor());
3426 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3427
3428 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3429 let buffer = project
3430 .update(cx, |project, cx| {
3431 project.open_local_buffer(path!("/dir/a.rs"), cx)
3432 })
3433 .await
3434 .unwrap();
3435
3436 project.update(cx, |project, cx| {
3437 project.lsp_store().update(cx, |lsp_store, cx| {
3438 lsp_store
3439 .update_diagnostic_entries(
3440 LanguageServerId(0),
3441 PathBuf::from(path!("/dir/a.rs")),
3442 None,
3443 None,
3444 vec![
3445 DiagnosticEntry {
3446 range: Unclipped(PointUtf16::new(0, 10))
3447 ..Unclipped(PointUtf16::new(0, 10)),
3448 diagnostic: Diagnostic {
3449 severity: DiagnosticSeverity::ERROR,
3450 message: "syntax error 1".to_string(),
3451 source_kind: DiagnosticSourceKind::Pushed,
3452 ..Diagnostic::default()
3453 },
3454 },
3455 DiagnosticEntry {
3456 range: Unclipped(PointUtf16::new(1, 10))
3457 ..Unclipped(PointUtf16::new(1, 10)),
3458 diagnostic: Diagnostic {
3459 severity: DiagnosticSeverity::ERROR,
3460 message: "syntax error 2".to_string(),
3461 source_kind: DiagnosticSourceKind::Pushed,
3462 ..Diagnostic::default()
3463 },
3464 },
3465 ],
3466 cx,
3467 )
3468 .unwrap();
3469 })
3470 });
3471
3472 // An empty range is extended forward to include the following character.
3473 // At the end of a line, an empty range is extended backward to include
3474 // the preceding character.
3475 buffer.update(cx, |buffer, _| {
3476 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3477 assert_eq!(
3478 chunks
3479 .iter()
3480 .map(|(s, d)| (s.as_str(), *d))
3481 .collect::<Vec<_>>(),
3482 &[
3483 ("let one = ", None),
3484 (";", Some(DiagnosticSeverity::ERROR)),
3485 ("\nlet two =", None),
3486 (" ", Some(DiagnosticSeverity::ERROR)),
3487 ("\nlet three = 3;\n", None)
3488 ]
3489 );
3490 });
3491}
3492
3493#[gpui::test]
3494async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3495 init_test(cx);
3496
3497 let fs = FakeFs::new(cx.executor());
3498 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3499 .await;
3500
3501 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3502 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3503
3504 lsp_store.update(cx, |lsp_store, cx| {
3505 lsp_store
3506 .update_diagnostic_entries(
3507 LanguageServerId(0),
3508 Path::new(path!("/dir/a.rs")).to_owned(),
3509 None,
3510 None,
3511 vec![DiagnosticEntry {
3512 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3513 diagnostic: Diagnostic {
3514 severity: DiagnosticSeverity::ERROR,
3515 is_primary: true,
3516 message: "syntax error a1".to_string(),
3517 source_kind: DiagnosticSourceKind::Pushed,
3518 ..Diagnostic::default()
3519 },
3520 }],
3521 cx,
3522 )
3523 .unwrap();
3524 lsp_store
3525 .update_diagnostic_entries(
3526 LanguageServerId(1),
3527 Path::new(path!("/dir/a.rs")).to_owned(),
3528 None,
3529 None,
3530 vec![DiagnosticEntry {
3531 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3532 diagnostic: Diagnostic {
3533 severity: DiagnosticSeverity::ERROR,
3534 is_primary: true,
3535 message: "syntax error b1".to_string(),
3536 source_kind: DiagnosticSourceKind::Pushed,
3537 ..Diagnostic::default()
3538 },
3539 }],
3540 cx,
3541 )
3542 .unwrap();
3543
3544 assert_eq!(
3545 lsp_store.diagnostic_summary(false, cx),
3546 DiagnosticSummary {
3547 error_count: 2,
3548 warning_count: 0,
3549 }
3550 );
3551 });
3552}
3553
3554#[gpui::test]
3555async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3556 init_test(cx);
3557
3558 let text = "
3559 fn a() {
3560 f1();
3561 }
3562 fn b() {
3563 f2();
3564 }
3565 fn c() {
3566 f3();
3567 }
3568 "
3569 .unindent();
3570
3571 let fs = FakeFs::new(cx.executor());
3572 fs.insert_tree(
3573 path!("/dir"),
3574 json!({
3575 "a.rs": text.clone(),
3576 }),
3577 )
3578 .await;
3579
3580 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3581 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3582
3583 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3584 language_registry.add(rust_lang());
3585 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3586
3587 let (buffer, _handle) = project
3588 .update(cx, |project, cx| {
3589 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3590 })
3591 .await
3592 .unwrap();
3593
3594 let mut fake_server = fake_servers.next().await.unwrap();
3595 let lsp_document_version = fake_server
3596 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3597 .await
3598 .text_document
3599 .version;
3600
3601 // Simulate editing the buffer after the language server computes some edits.
3602 buffer.update(cx, |buffer, cx| {
3603 buffer.edit(
3604 [(
3605 Point::new(0, 0)..Point::new(0, 0),
3606 "// above first function\n",
3607 )],
3608 None,
3609 cx,
3610 );
3611 buffer.edit(
3612 [(
3613 Point::new(2, 0)..Point::new(2, 0),
3614 " // inside first function\n",
3615 )],
3616 None,
3617 cx,
3618 );
3619 buffer.edit(
3620 [(
3621 Point::new(6, 4)..Point::new(6, 4),
3622 "// inside second function ",
3623 )],
3624 None,
3625 cx,
3626 );
3627
3628 assert_eq!(
3629 buffer.text(),
3630 "
3631 // above first function
3632 fn a() {
3633 // inside first function
3634 f1();
3635 }
3636 fn b() {
3637 // inside second function f2();
3638 }
3639 fn c() {
3640 f3();
3641 }
3642 "
3643 .unindent()
3644 );
3645 });
3646
3647 let edits = lsp_store
3648 .update(cx, |lsp_store, cx| {
3649 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3650 &buffer,
3651 vec![
3652 // replace body of first function
3653 lsp::TextEdit {
3654 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3655 new_text: "
3656 fn a() {
3657 f10();
3658 }
3659 "
3660 .unindent(),
3661 },
3662 // edit inside second function
3663 lsp::TextEdit {
3664 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3665 new_text: "00".into(),
3666 },
3667 // edit inside third function via two distinct edits
3668 lsp::TextEdit {
3669 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3670 new_text: "4000".into(),
3671 },
3672 lsp::TextEdit {
3673 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3674 new_text: "".into(),
3675 },
3676 ],
3677 LanguageServerId(0),
3678 Some(lsp_document_version),
3679 cx,
3680 )
3681 })
3682 .await
3683 .unwrap();
3684
3685 buffer.update(cx, |buffer, cx| {
3686 for (range, new_text) in edits {
3687 buffer.edit([(range, new_text)], None, cx);
3688 }
3689 assert_eq!(
3690 buffer.text(),
3691 "
3692 // above first function
3693 fn a() {
3694 // inside first function
3695 f10();
3696 }
3697 fn b() {
3698 // inside second function f200();
3699 }
3700 fn c() {
3701 f4000();
3702 }
3703 "
3704 .unindent()
3705 );
3706 });
3707}
3708
3709#[gpui::test]
3710async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3711 init_test(cx);
3712
3713 let text = "
3714 use a::b;
3715 use a::c;
3716
3717 fn f() {
3718 b();
3719 c();
3720 }
3721 "
3722 .unindent();
3723
3724 let fs = FakeFs::new(cx.executor());
3725 fs.insert_tree(
3726 path!("/dir"),
3727 json!({
3728 "a.rs": text.clone(),
3729 }),
3730 )
3731 .await;
3732
3733 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3734 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3735 let buffer = project
3736 .update(cx, |project, cx| {
3737 project.open_local_buffer(path!("/dir/a.rs"), cx)
3738 })
3739 .await
3740 .unwrap();
3741
3742 // Simulate the language server sending us a small edit in the form of a very large diff.
3743 // Rust-analyzer does this when performing a merge-imports code action.
3744 let edits = lsp_store
3745 .update(cx, |lsp_store, cx| {
3746 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3747 &buffer,
3748 [
3749 // Replace the first use statement without editing the semicolon.
3750 lsp::TextEdit {
3751 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3752 new_text: "a::{b, c}".into(),
3753 },
3754 // Reinsert the remainder of the file between the semicolon and the final
3755 // newline of the file.
3756 lsp::TextEdit {
3757 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3758 new_text: "\n\n".into(),
3759 },
3760 lsp::TextEdit {
3761 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3762 new_text: "
3763 fn f() {
3764 b();
3765 c();
3766 }"
3767 .unindent(),
3768 },
3769 // Delete everything after the first newline of the file.
3770 lsp::TextEdit {
3771 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3772 new_text: "".into(),
3773 },
3774 ],
3775 LanguageServerId(0),
3776 None,
3777 cx,
3778 )
3779 })
3780 .await
3781 .unwrap();
3782
3783 buffer.update(cx, |buffer, cx| {
3784 let edits = edits
3785 .into_iter()
3786 .map(|(range, text)| {
3787 (
3788 range.start.to_point(buffer)..range.end.to_point(buffer),
3789 text,
3790 )
3791 })
3792 .collect::<Vec<_>>();
3793
3794 assert_eq!(
3795 edits,
3796 [
3797 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3798 (Point::new(1, 0)..Point::new(2, 0), "".into())
3799 ]
3800 );
3801
3802 for (range, new_text) in edits {
3803 buffer.edit([(range, new_text)], None, cx);
3804 }
3805 assert_eq!(
3806 buffer.text(),
3807 "
3808 use a::{b, c};
3809
3810 fn f() {
3811 b();
3812 c();
3813 }
3814 "
3815 .unindent()
3816 );
3817 });
3818}
3819
3820#[gpui::test]
3821async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3822 cx: &mut gpui::TestAppContext,
3823) {
3824 init_test(cx);
3825
3826 let text = "Path()";
3827
3828 let fs = FakeFs::new(cx.executor());
3829 fs.insert_tree(
3830 path!("/dir"),
3831 json!({
3832 "a.rs": text
3833 }),
3834 )
3835 .await;
3836
3837 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3838 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3839 let buffer = project
3840 .update(cx, |project, cx| {
3841 project.open_local_buffer(path!("/dir/a.rs"), cx)
3842 })
3843 .await
3844 .unwrap();
3845
3846 // Simulate the language server sending us a pair of edits at the same location,
3847 // with an insertion following a replacement (which violates the LSP spec).
3848 let edits = lsp_store
3849 .update(cx, |lsp_store, cx| {
3850 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3851 &buffer,
3852 [
3853 lsp::TextEdit {
3854 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3855 new_text: "Path".into(),
3856 },
3857 lsp::TextEdit {
3858 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3859 new_text: "from path import Path\n\n\n".into(),
3860 },
3861 ],
3862 LanguageServerId(0),
3863 None,
3864 cx,
3865 )
3866 })
3867 .await
3868 .unwrap();
3869
3870 buffer.update(cx, |buffer, cx| {
3871 buffer.edit(edits, None, cx);
3872 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3873 });
3874}
3875
3876#[gpui::test]
3877async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3878 init_test(cx);
3879
3880 let text = "
3881 use a::b;
3882 use a::c;
3883
3884 fn f() {
3885 b();
3886 c();
3887 }
3888 "
3889 .unindent();
3890
3891 let fs = FakeFs::new(cx.executor());
3892 fs.insert_tree(
3893 path!("/dir"),
3894 json!({
3895 "a.rs": text.clone(),
3896 }),
3897 )
3898 .await;
3899
3900 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3901 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3902 let buffer = project
3903 .update(cx, |project, cx| {
3904 project.open_local_buffer(path!("/dir/a.rs"), cx)
3905 })
3906 .await
3907 .unwrap();
3908
3909 // Simulate the language server sending us edits in a non-ordered fashion,
3910 // with ranges sometimes being inverted or pointing to invalid locations.
3911 let edits = lsp_store
3912 .update(cx, |lsp_store, cx| {
3913 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3914 &buffer,
3915 [
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3918 new_text: "\n\n".into(),
3919 },
3920 lsp::TextEdit {
3921 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3922 new_text: "a::{b, c}".into(),
3923 },
3924 lsp::TextEdit {
3925 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3926 new_text: "".into(),
3927 },
3928 lsp::TextEdit {
3929 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3930 new_text: "
3931 fn f() {
3932 b();
3933 c();
3934 }"
3935 .unindent(),
3936 },
3937 ],
3938 LanguageServerId(0),
3939 None,
3940 cx,
3941 )
3942 })
3943 .await
3944 .unwrap();
3945
3946 buffer.update(cx, |buffer, cx| {
3947 let edits = edits
3948 .into_iter()
3949 .map(|(range, text)| {
3950 (
3951 range.start.to_point(buffer)..range.end.to_point(buffer),
3952 text,
3953 )
3954 })
3955 .collect::<Vec<_>>();
3956
3957 assert_eq!(
3958 edits,
3959 [
3960 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3961 (Point::new(1, 0)..Point::new(2, 0), "".into())
3962 ]
3963 );
3964
3965 for (range, new_text) in edits {
3966 buffer.edit([(range, new_text)], None, cx);
3967 }
3968 assert_eq!(
3969 buffer.text(),
3970 "
3971 use a::{b, c};
3972
3973 fn f() {
3974 b();
3975 c();
3976 }
3977 "
3978 .unindent()
3979 );
3980 });
3981}
3982
3983fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3984 buffer: &Buffer,
3985 range: Range<T>,
3986) -> Vec<(String, Option<DiagnosticSeverity>)> {
3987 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3988 for chunk in buffer.snapshot().chunks(range, true) {
3989 if chunks
3990 .last()
3991 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3992 {
3993 chunks.last_mut().unwrap().0.push_str(chunk.text);
3994 } else {
3995 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3996 }
3997 }
3998 chunks
3999}
4000
4001#[gpui::test(iterations = 10)]
4002async fn test_definition(cx: &mut gpui::TestAppContext) {
4003 init_test(cx);
4004
4005 let fs = FakeFs::new(cx.executor());
4006 fs.insert_tree(
4007 path!("/dir"),
4008 json!({
4009 "a.rs": "const fn a() { A }",
4010 "b.rs": "const y: i32 = crate::a()",
4011 }),
4012 )
4013 .await;
4014
4015 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4016
4017 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4018 language_registry.add(rust_lang());
4019 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4020
4021 let (buffer, _handle) = project
4022 .update(cx, |project, cx| {
4023 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4024 })
4025 .await
4026 .unwrap();
4027
4028 let fake_server = fake_servers.next().await.unwrap();
4029 cx.executor().run_until_parked();
4030
4031 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4032 let params = params.text_document_position_params;
4033 assert_eq!(
4034 params.text_document.uri.to_file_path().unwrap(),
4035 Path::new(path!("/dir/b.rs")),
4036 );
4037 assert_eq!(params.position, lsp::Position::new(0, 22));
4038
4039 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4040 lsp::Location::new(
4041 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4042 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4043 ),
4044 )))
4045 });
4046 let mut definitions = project
4047 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4048 .await
4049 .unwrap()
4050 .unwrap();
4051
4052 // Assert no new language server started
4053 cx.executor().run_until_parked();
4054 assert!(fake_servers.try_next().is_err());
4055
4056 assert_eq!(definitions.len(), 1);
4057 let definition = definitions.pop().unwrap();
4058 cx.update(|cx| {
4059 let target_buffer = definition.target.buffer.read(cx);
4060 assert_eq!(
4061 target_buffer
4062 .file()
4063 .unwrap()
4064 .as_local()
4065 .unwrap()
4066 .abs_path(cx),
4067 Path::new(path!("/dir/a.rs")),
4068 );
4069 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4070 assert_eq!(
4071 list_worktrees(&project, cx),
4072 [
4073 (path!("/dir/a.rs").as_ref(), false),
4074 (path!("/dir/b.rs").as_ref(), true)
4075 ],
4076 );
4077
4078 drop(definition);
4079 });
4080 cx.update(|cx| {
4081 assert_eq!(
4082 list_worktrees(&project, cx),
4083 [(path!("/dir/b.rs").as_ref(), true)]
4084 );
4085 });
4086
4087 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4088 project
4089 .read(cx)
4090 .worktrees(cx)
4091 .map(|worktree| {
4092 let worktree = worktree.read(cx);
4093 (
4094 worktree.as_local().unwrap().abs_path().as_ref(),
4095 worktree.is_visible(),
4096 )
4097 })
4098 .collect::<Vec<_>>()
4099 }
4100}
4101
4102#[gpui::test]
4103async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4104 init_test(cx);
4105
4106 let fs = FakeFs::new(cx.executor());
4107 fs.insert_tree(
4108 path!("/dir"),
4109 json!({
4110 "a.ts": "",
4111 }),
4112 )
4113 .await;
4114
4115 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4116
4117 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4118 language_registry.add(typescript_lang());
4119 let mut fake_language_servers = language_registry.register_fake_lsp(
4120 "TypeScript",
4121 FakeLspAdapter {
4122 capabilities: lsp::ServerCapabilities {
4123 completion_provider: Some(lsp::CompletionOptions {
4124 trigger_characters: Some(vec![".".to_string()]),
4125 ..Default::default()
4126 }),
4127 ..Default::default()
4128 },
4129 ..Default::default()
4130 },
4131 );
4132
4133 let (buffer, _handle) = project
4134 .update(cx, |p, cx| {
4135 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4136 })
4137 .await
4138 .unwrap();
4139
4140 let fake_server = fake_language_servers.next().await.unwrap();
4141 cx.executor().run_until_parked();
4142
4143 // When text_edit exists, it takes precedence over insert_text and label
4144 let text = "let a = obj.fqn";
4145 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4146 let completions = project.update(cx, |project, cx| {
4147 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4148 });
4149
4150 fake_server
4151 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4152 Ok(Some(lsp::CompletionResponse::Array(vec![
4153 lsp::CompletionItem {
4154 label: "labelText".into(),
4155 insert_text: Some("insertText".into()),
4156 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4157 range: lsp::Range::new(
4158 lsp::Position::new(0, text.len() as u32 - 3),
4159 lsp::Position::new(0, text.len() as u32),
4160 ),
4161 new_text: "textEditText".into(),
4162 })),
4163 ..Default::default()
4164 },
4165 ])))
4166 })
4167 .next()
4168 .await;
4169
4170 let completions = completions
4171 .await
4172 .unwrap()
4173 .into_iter()
4174 .flat_map(|response| response.completions)
4175 .collect::<Vec<_>>();
4176 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4177
4178 assert_eq!(completions.len(), 1);
4179 assert_eq!(completions[0].new_text, "textEditText");
4180 assert_eq!(
4181 completions[0].replace_range.to_offset(&snapshot),
4182 text.len() - 3..text.len()
4183 );
4184}
4185
4186#[gpui::test]
4187async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4188 init_test(cx);
4189
4190 let fs = FakeFs::new(cx.executor());
4191 fs.insert_tree(
4192 path!("/dir"),
4193 json!({
4194 "a.ts": "",
4195 }),
4196 )
4197 .await;
4198
4199 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4200
4201 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4202 language_registry.add(typescript_lang());
4203 let mut fake_language_servers = language_registry.register_fake_lsp(
4204 "TypeScript",
4205 FakeLspAdapter {
4206 capabilities: lsp::ServerCapabilities {
4207 completion_provider: Some(lsp::CompletionOptions {
4208 trigger_characters: Some(vec![".".to_string()]),
4209 ..Default::default()
4210 }),
4211 ..Default::default()
4212 },
4213 ..Default::default()
4214 },
4215 );
4216
4217 let (buffer, _handle) = project
4218 .update(cx, |p, cx| {
4219 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4220 })
4221 .await
4222 .unwrap();
4223
4224 let fake_server = fake_language_servers.next().await.unwrap();
4225 cx.executor().run_until_parked();
4226 let text = "let a = obj.fqn";
4227
4228 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4229 {
4230 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4231 let completions = project.update(cx, |project, cx| {
4232 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4233 });
4234
4235 fake_server
4236 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4237 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4238 is_incomplete: false,
4239 item_defaults: Some(lsp::CompletionListItemDefaults {
4240 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4241 lsp::Range::new(
4242 lsp::Position::new(0, text.len() as u32 - 3),
4243 lsp::Position::new(0, text.len() as u32),
4244 ),
4245 )),
4246 ..Default::default()
4247 }),
4248 items: vec![lsp::CompletionItem {
4249 label: "labelText".into(),
4250 text_edit_text: Some("textEditText".into()),
4251 text_edit: None,
4252 ..Default::default()
4253 }],
4254 })))
4255 })
4256 .next()
4257 .await;
4258
4259 let completions = completions
4260 .await
4261 .unwrap()
4262 .into_iter()
4263 .flat_map(|response| response.completions)
4264 .collect::<Vec<_>>();
4265 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4266
4267 assert_eq!(completions.len(), 1);
4268 assert_eq!(completions[0].new_text, "textEditText");
4269 assert_eq!(
4270 completions[0].replace_range.to_offset(&snapshot),
4271 text.len() - 3..text.len()
4272 );
4273 }
4274
4275 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4276 {
4277 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4278 let completions = project.update(cx, |project, cx| {
4279 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4280 });
4281
4282 fake_server
4283 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4284 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4285 is_incomplete: false,
4286 item_defaults: Some(lsp::CompletionListItemDefaults {
4287 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4288 lsp::Range::new(
4289 lsp::Position::new(0, text.len() as u32 - 3),
4290 lsp::Position::new(0, text.len() as u32),
4291 ),
4292 )),
4293 ..Default::default()
4294 }),
4295 items: vec![lsp::CompletionItem {
4296 label: "labelText".into(),
4297 text_edit_text: None,
4298 insert_text: Some("irrelevant".into()),
4299 text_edit: None,
4300 ..Default::default()
4301 }],
4302 })))
4303 })
4304 .next()
4305 .await;
4306
4307 let completions = completions
4308 .await
4309 .unwrap()
4310 .into_iter()
4311 .flat_map(|response| response.completions)
4312 .collect::<Vec<_>>();
4313 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4314
4315 assert_eq!(completions.len(), 1);
4316 assert_eq!(completions[0].new_text, "labelText");
4317 assert_eq!(
4318 completions[0].replace_range.to_offset(&snapshot),
4319 text.len() - 3..text.len()
4320 );
4321 }
4322}
4323
4324#[gpui::test]
4325async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4326 init_test(cx);
4327
4328 let fs = FakeFs::new(cx.executor());
4329 fs.insert_tree(
4330 path!("/dir"),
4331 json!({
4332 "a.ts": "",
4333 }),
4334 )
4335 .await;
4336
4337 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4338
4339 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4340 language_registry.add(typescript_lang());
4341 let mut fake_language_servers = language_registry.register_fake_lsp(
4342 "TypeScript",
4343 FakeLspAdapter {
4344 capabilities: lsp::ServerCapabilities {
4345 completion_provider: Some(lsp::CompletionOptions {
4346 trigger_characters: Some(vec![":".to_string()]),
4347 ..Default::default()
4348 }),
4349 ..Default::default()
4350 },
4351 ..Default::default()
4352 },
4353 );
4354
4355 let (buffer, _handle) = project
4356 .update(cx, |p, cx| {
4357 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4358 })
4359 .await
4360 .unwrap();
4361
4362 let fake_server = fake_language_servers.next().await.unwrap();
4363 cx.executor().run_until_parked();
4364
4365 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4366 let text = "let a = b.fqn";
4367 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4368 let completions = project.update(cx, |project, cx| {
4369 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4370 });
4371
4372 fake_server
4373 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4374 Ok(Some(lsp::CompletionResponse::Array(vec![
4375 lsp::CompletionItem {
4376 label: "fullyQualifiedName?".into(),
4377 insert_text: Some("fullyQualifiedName".into()),
4378 ..Default::default()
4379 },
4380 ])))
4381 })
4382 .next()
4383 .await;
4384 let completions = completions
4385 .await
4386 .unwrap()
4387 .into_iter()
4388 .flat_map(|response| response.completions)
4389 .collect::<Vec<_>>();
4390 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4391 assert_eq!(completions.len(), 1);
4392 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4393 assert_eq!(
4394 completions[0].replace_range.to_offset(&snapshot),
4395 text.len() - 3..text.len()
4396 );
4397
4398 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4399 let text = "let a = \"atoms/cmp\"";
4400 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4401 let completions = project.update(cx, |project, cx| {
4402 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4403 });
4404
4405 fake_server
4406 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4407 Ok(Some(lsp::CompletionResponse::Array(vec![
4408 lsp::CompletionItem {
4409 label: "component".into(),
4410 ..Default::default()
4411 },
4412 ])))
4413 })
4414 .next()
4415 .await;
4416 let completions = completions
4417 .await
4418 .unwrap()
4419 .into_iter()
4420 .flat_map(|response| response.completions)
4421 .collect::<Vec<_>>();
4422 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4423 assert_eq!(completions.len(), 1);
4424 assert_eq!(completions[0].new_text, "component");
4425 assert_eq!(
4426 completions[0].replace_range.to_offset(&snapshot),
4427 text.len() - 4..text.len() - 1
4428 );
4429}
4430
4431#[gpui::test]
4432async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4433 init_test(cx);
4434
4435 let fs = FakeFs::new(cx.executor());
4436 fs.insert_tree(
4437 path!("/dir"),
4438 json!({
4439 "a.ts": "",
4440 }),
4441 )
4442 .await;
4443
4444 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4445
4446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4447 language_registry.add(typescript_lang());
4448 let mut fake_language_servers = language_registry.register_fake_lsp(
4449 "TypeScript",
4450 FakeLspAdapter {
4451 capabilities: lsp::ServerCapabilities {
4452 completion_provider: Some(lsp::CompletionOptions {
4453 trigger_characters: Some(vec![":".to_string()]),
4454 ..Default::default()
4455 }),
4456 ..Default::default()
4457 },
4458 ..Default::default()
4459 },
4460 );
4461
4462 let (buffer, _handle) = project
4463 .update(cx, |p, cx| {
4464 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4465 })
4466 .await
4467 .unwrap();
4468
4469 let fake_server = fake_language_servers.next().await.unwrap();
4470 cx.executor().run_until_parked();
4471
4472 let text = "let a = b.fqn";
4473 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4474 let completions = project.update(cx, |project, cx| {
4475 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4476 });
4477
4478 fake_server
4479 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4480 Ok(Some(lsp::CompletionResponse::Array(vec![
4481 lsp::CompletionItem {
4482 label: "fullyQualifiedName?".into(),
4483 insert_text: Some("fully\rQualified\r\nName".into()),
4484 ..Default::default()
4485 },
4486 ])))
4487 })
4488 .next()
4489 .await;
4490 let completions = completions
4491 .await
4492 .unwrap()
4493 .into_iter()
4494 .flat_map(|response| response.completions)
4495 .collect::<Vec<_>>();
4496 assert_eq!(completions.len(), 1);
4497 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4498}
4499
4500#[gpui::test(iterations = 10)]
4501async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4502 init_test(cx);
4503
4504 let fs = FakeFs::new(cx.executor());
4505 fs.insert_tree(
4506 path!("/dir"),
4507 json!({
4508 "a.ts": "a",
4509 }),
4510 )
4511 .await;
4512
4513 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4514
4515 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4516 language_registry.add(typescript_lang());
4517 let mut fake_language_servers = language_registry.register_fake_lsp(
4518 "TypeScript",
4519 FakeLspAdapter {
4520 capabilities: lsp::ServerCapabilities {
4521 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4522 lsp::CodeActionOptions {
4523 resolve_provider: Some(true),
4524 ..lsp::CodeActionOptions::default()
4525 },
4526 )),
4527 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4528 commands: vec!["_the/command".to_string()],
4529 ..lsp::ExecuteCommandOptions::default()
4530 }),
4531 ..lsp::ServerCapabilities::default()
4532 },
4533 ..FakeLspAdapter::default()
4534 },
4535 );
4536
4537 let (buffer, _handle) = project
4538 .update(cx, |p, cx| {
4539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4540 })
4541 .await
4542 .unwrap();
4543
4544 let fake_server = fake_language_servers.next().await.unwrap();
4545 cx.executor().run_until_parked();
4546
4547 // Language server returns code actions that contain commands, and not edits.
4548 let actions = project.update(cx, |project, cx| {
4549 project.code_actions(&buffer, 0..0, None, cx)
4550 });
4551 fake_server
4552 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4553 Ok(Some(vec![
4554 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4555 title: "The code action".into(),
4556 data: Some(serde_json::json!({
4557 "command": "_the/command",
4558 })),
4559 ..lsp::CodeAction::default()
4560 }),
4561 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4562 title: "two".into(),
4563 ..lsp::CodeAction::default()
4564 }),
4565 ]))
4566 })
4567 .next()
4568 .await;
4569
4570 let action = actions.await.unwrap().unwrap()[0].clone();
4571 let apply = project.update(cx, |project, cx| {
4572 project.apply_code_action(buffer.clone(), action, true, cx)
4573 });
4574
4575 // Resolving the code action does not populate its edits. In absence of
4576 // edits, we must execute the given command.
4577 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4578 |mut action, _| async move {
4579 if action.data.is_some() {
4580 action.command = Some(lsp::Command {
4581 title: "The command".into(),
4582 command: "_the/command".into(),
4583 arguments: Some(vec![json!("the-argument")]),
4584 });
4585 }
4586 Ok(action)
4587 },
4588 );
4589
4590 // While executing the command, the language server sends the editor
4591 // a `workspaceEdit` request.
4592 fake_server
4593 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4594 let fake = fake_server.clone();
4595 move |params, _| {
4596 assert_eq!(params.command, "_the/command");
4597 let fake = fake.clone();
4598 async move {
4599 fake.server
4600 .request::<lsp::request::ApplyWorkspaceEdit>(
4601 lsp::ApplyWorkspaceEditParams {
4602 label: None,
4603 edit: lsp::WorkspaceEdit {
4604 changes: Some(
4605 [(
4606 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4607 vec![lsp::TextEdit {
4608 range: lsp::Range::new(
4609 lsp::Position::new(0, 0),
4610 lsp::Position::new(0, 0),
4611 ),
4612 new_text: "X".into(),
4613 }],
4614 )]
4615 .into_iter()
4616 .collect(),
4617 ),
4618 ..Default::default()
4619 },
4620 },
4621 DEFAULT_LSP_REQUEST_TIMEOUT,
4622 )
4623 .await
4624 .into_response()
4625 .unwrap();
4626 Ok(Some(json!(null)))
4627 }
4628 }
4629 })
4630 .next()
4631 .await;
4632
4633 // Applying the code action returns a project transaction containing the edits
4634 // sent by the language server in its `workspaceEdit` request.
4635 let transaction = apply.await.unwrap();
4636 assert!(transaction.0.contains_key(&buffer));
4637 buffer.update(cx, |buffer, cx| {
4638 assert_eq!(buffer.text(), "Xa");
4639 buffer.undo(cx);
4640 assert_eq!(buffer.text(), "a");
4641 });
4642}
4643
4644#[gpui::test]
4645async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4646 init_test(cx);
4647 let fs = FakeFs::new(cx.background_executor.clone());
4648 let expected_contents = "content";
4649 fs.as_fake()
4650 .insert_tree(
4651 "/root",
4652 json!({
4653 "test.txt": expected_contents
4654 }),
4655 )
4656 .await;
4657
4658 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4659
4660 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4661 let worktree = project.worktrees(cx).next().unwrap();
4662 let entry_id = worktree
4663 .read(cx)
4664 .entry_for_path(rel_path("test.txt"))
4665 .unwrap()
4666 .id;
4667 (worktree, entry_id)
4668 });
4669 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4670 let _result = project
4671 .update(cx, |project, cx| {
4672 project.rename_entry(
4673 entry_id,
4674 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4675 cx,
4676 )
4677 })
4678 .await
4679 .unwrap();
4680 worktree.read_with(cx, |worktree, _| {
4681 assert!(
4682 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4683 "Old file should have been removed"
4684 );
4685 assert!(
4686 worktree
4687 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4688 .is_some(),
4689 "Whole directory hierarchy and the new file should have been created"
4690 );
4691 });
4692 assert_eq!(
4693 worktree
4694 .update(cx, |worktree, cx| {
4695 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4696 })
4697 .await
4698 .unwrap()
4699 .text,
4700 expected_contents,
4701 "Moved file's contents should be preserved"
4702 );
4703
4704 let entry_id = worktree.read_with(cx, |worktree, _| {
4705 worktree
4706 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4707 .unwrap()
4708 .id
4709 });
4710
4711 let _result = project
4712 .update(cx, |project, cx| {
4713 project.rename_entry(
4714 entry_id,
4715 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4716 cx,
4717 )
4718 })
4719 .await
4720 .unwrap();
4721 worktree.read_with(cx, |worktree, _| {
4722 assert!(
4723 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4724 "First file should not reappear"
4725 );
4726 assert!(
4727 worktree
4728 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4729 .is_none(),
4730 "Old file should have been removed"
4731 );
4732 assert!(
4733 worktree
4734 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4735 .is_some(),
4736 "No error should have occurred after moving into existing directory"
4737 );
4738 });
4739 assert_eq!(
4740 worktree
4741 .update(cx, |worktree, cx| {
4742 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4743 })
4744 .await
4745 .unwrap()
4746 .text,
4747 expected_contents,
4748 "Moved file's contents should be preserved"
4749 );
4750}
4751
4752#[gpui::test(iterations = 10)]
4753async fn test_save_file(cx: &mut gpui::TestAppContext) {
4754 init_test(cx);
4755
4756 let fs = FakeFs::new(cx.executor());
4757 fs.insert_tree(
4758 path!("/dir"),
4759 json!({
4760 "file1": "the old contents",
4761 }),
4762 )
4763 .await;
4764
4765 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4766 let buffer = project
4767 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4768 .await
4769 .unwrap();
4770 buffer.update(cx, |buffer, cx| {
4771 assert_eq!(buffer.text(), "the old contents");
4772 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4773 });
4774
4775 project
4776 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4777 .await
4778 .unwrap();
4779
4780 let new_text = fs
4781 .load(Path::new(path!("/dir/file1")))
4782 .await
4783 .unwrap()
4784 .replace("\r\n", "\n");
4785 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4786}
4787
4788#[gpui::test(iterations = 10)]
4789async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4790 // Issue: #24349
4791 init_test(cx);
4792
4793 let fs = FakeFs::new(cx.executor());
4794 fs.insert_tree(path!("/dir"), json!({})).await;
4795
4796 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4797 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4798
4799 language_registry.add(rust_lang());
4800 let mut fake_rust_servers = language_registry.register_fake_lsp(
4801 "Rust",
4802 FakeLspAdapter {
4803 name: "the-rust-language-server",
4804 capabilities: lsp::ServerCapabilities {
4805 completion_provider: Some(lsp::CompletionOptions {
4806 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4807 ..Default::default()
4808 }),
4809 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4810 lsp::TextDocumentSyncOptions {
4811 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4812 ..Default::default()
4813 },
4814 )),
4815 ..Default::default()
4816 },
4817 ..Default::default()
4818 },
4819 );
4820
4821 let buffer = project
4822 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4823 .unwrap()
4824 .await;
4825 project.update(cx, |this, cx| {
4826 this.register_buffer_with_language_servers(&buffer, cx);
4827 buffer.update(cx, |buffer, cx| {
4828 assert!(!this.has_language_servers_for(buffer, cx));
4829 })
4830 });
4831
4832 project
4833 .update(cx, |this, cx| {
4834 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4835 this.save_buffer_as(
4836 buffer.clone(),
4837 ProjectPath {
4838 worktree_id,
4839 path: rel_path("file.rs").into(),
4840 },
4841 cx,
4842 )
4843 })
4844 .await
4845 .unwrap();
4846 // A server is started up, and it is notified about Rust files.
4847 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4848 assert_eq!(
4849 fake_rust_server
4850 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4851 .await
4852 .text_document,
4853 lsp::TextDocumentItem {
4854 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4855 version: 0,
4856 text: "".to_string(),
4857 language_id: "rust".to_string(),
4858 }
4859 );
4860
4861 project.update(cx, |this, cx| {
4862 buffer.update(cx, |buffer, cx| {
4863 assert!(this.has_language_servers_for(buffer, cx));
4864 })
4865 });
4866}
4867
4868#[gpui::test(iterations = 30)]
4869async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4870 init_test(cx);
4871
4872 let fs = FakeFs::new(cx.executor());
4873 fs.insert_tree(
4874 path!("/dir"),
4875 json!({
4876 "file1": "the original contents",
4877 }),
4878 )
4879 .await;
4880
4881 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4882 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4883 let buffer = project
4884 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4885 .await
4886 .unwrap();
4887
4888 // Change the buffer's file on disk, and then wait for the file change
4889 // to be detected by the worktree, so that the buffer starts reloading.
4890 fs.save(
4891 path!("/dir/file1").as_ref(),
4892 &"the first contents".into(),
4893 Default::default(),
4894 )
4895 .await
4896 .unwrap();
4897 worktree.next_event(cx).await;
4898
4899 // Change the buffer's file again. Depending on the random seed, the
4900 // previous file change may still be in progress.
4901 fs.save(
4902 path!("/dir/file1").as_ref(),
4903 &"the second contents".into(),
4904 Default::default(),
4905 )
4906 .await
4907 .unwrap();
4908 worktree.next_event(cx).await;
4909
4910 cx.executor().run_until_parked();
4911 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4912 buffer.read_with(cx, |buffer, _| {
4913 assert_eq!(buffer.text(), on_disk_text);
4914 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4915 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4916 });
4917}
4918
4919#[gpui::test(iterations = 30)]
4920async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4921 init_test(cx);
4922
4923 let fs = FakeFs::new(cx.executor());
4924 fs.insert_tree(
4925 path!("/dir"),
4926 json!({
4927 "file1": "the original contents",
4928 }),
4929 )
4930 .await;
4931
4932 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4933 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4934 let buffer = project
4935 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4936 .await
4937 .unwrap();
4938
4939 // Change the buffer's file on disk, and then wait for the file change
4940 // to be detected by the worktree, so that the buffer starts reloading.
4941 fs.save(
4942 path!("/dir/file1").as_ref(),
4943 &"the first contents".into(),
4944 Default::default(),
4945 )
4946 .await
4947 .unwrap();
4948 worktree.next_event(cx).await;
4949
4950 cx.executor()
4951 .spawn(cx.executor().simulate_random_delay())
4952 .await;
4953
4954 // Perform a noop edit, causing the buffer's version to increase.
4955 buffer.update(cx, |buffer, cx| {
4956 buffer.edit([(0..0, " ")], None, cx);
4957 buffer.undo(cx);
4958 });
4959
4960 cx.executor().run_until_parked();
4961 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4962 buffer.read_with(cx, |buffer, _| {
4963 let buffer_text = buffer.text();
4964 if buffer_text == on_disk_text {
4965 assert!(
4966 !buffer.is_dirty() && !buffer.has_conflict(),
4967 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4968 );
4969 }
4970 // If the file change occurred while the buffer was processing the first
4971 // change, the buffer will be in a conflicting state.
4972 else {
4973 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4974 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4975 }
4976 });
4977}
4978
4979#[gpui::test]
4980async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4981 init_test(cx);
4982
4983 let fs = FakeFs::new(cx.executor());
4984 fs.insert_tree(
4985 path!("/dir"),
4986 json!({
4987 "file1": "the old contents",
4988 }),
4989 )
4990 .await;
4991
4992 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4993 let buffer = project
4994 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4995 .await
4996 .unwrap();
4997 buffer.update(cx, |buffer, cx| {
4998 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4999 });
5000
5001 project
5002 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5003 .await
5004 .unwrap();
5005
5006 let new_text = fs
5007 .load(Path::new(path!("/dir/file1")))
5008 .await
5009 .unwrap()
5010 .replace("\r\n", "\n");
5011 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5012}
5013
5014#[gpui::test]
5015async fn test_save_as(cx: &mut gpui::TestAppContext) {
5016 init_test(cx);
5017
5018 let fs = FakeFs::new(cx.executor());
5019 fs.insert_tree("/dir", json!({})).await;
5020
5021 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5022
5023 let languages = project.update(cx, |project, _| project.languages().clone());
5024 languages.add(rust_lang());
5025
5026 let buffer = project.update(cx, |project, cx| {
5027 project.create_local_buffer("", None, false, cx)
5028 });
5029 buffer.update(cx, |buffer, cx| {
5030 buffer.edit([(0..0, "abc")], None, cx);
5031 assert!(buffer.is_dirty());
5032 assert!(!buffer.has_conflict());
5033 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5034 });
5035 project
5036 .update(cx, |project, cx| {
5037 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5038 let path = ProjectPath {
5039 worktree_id,
5040 path: rel_path("file1.rs").into(),
5041 };
5042 project.save_buffer_as(buffer.clone(), path, cx)
5043 })
5044 .await
5045 .unwrap();
5046 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5047
5048 cx.executor().run_until_parked();
5049 buffer.update(cx, |buffer, cx| {
5050 assert_eq!(
5051 buffer.file().unwrap().full_path(cx),
5052 Path::new("dir/file1.rs")
5053 );
5054 assert!(!buffer.is_dirty());
5055 assert!(!buffer.has_conflict());
5056 assert_eq!(buffer.language().unwrap().name(), "Rust");
5057 });
5058
5059 let opened_buffer = project
5060 .update(cx, |project, cx| {
5061 project.open_local_buffer("/dir/file1.rs", cx)
5062 })
5063 .await
5064 .unwrap();
5065 assert_eq!(opened_buffer, buffer);
5066}
5067
5068#[gpui::test]
5069async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5070 init_test(cx);
5071
5072 let fs = FakeFs::new(cx.executor());
5073 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5074
5075 fs.insert_tree(
5076 path!("/dir"),
5077 json!({
5078 "data_a.txt": "data about a"
5079 }),
5080 )
5081 .await;
5082
5083 let buffer = project
5084 .update(cx, |project, cx| {
5085 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5086 })
5087 .await
5088 .unwrap();
5089
5090 buffer.update(cx, |buffer, cx| {
5091 buffer.edit([(11..12, "b")], None, cx);
5092 });
5093
5094 // Save buffer's contents as a new file and confirm that the buffer's now
5095 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5096 // file associated with the buffer has now been updated to `data_b.txt`
5097 project
5098 .update(cx, |project, cx| {
5099 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5100 let new_path = ProjectPath {
5101 worktree_id,
5102 path: rel_path("data_b.txt").into(),
5103 };
5104
5105 project.save_buffer_as(buffer.clone(), new_path, cx)
5106 })
5107 .await
5108 .unwrap();
5109
5110 buffer.update(cx, |buffer, cx| {
5111 assert_eq!(
5112 buffer.file().unwrap().full_path(cx),
5113 Path::new("dir/data_b.txt")
5114 )
5115 });
5116
5117 // Open the original `data_a.txt` file, confirming that its contents are
5118 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5119 let original_buffer = project
5120 .update(cx, |project, cx| {
5121 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5122 })
5123 .await
5124 .unwrap();
5125
5126 original_buffer.update(cx, |buffer, cx| {
5127 assert_eq!(buffer.text(), "data about a");
5128 assert_eq!(
5129 buffer.file().unwrap().full_path(cx),
5130 Path::new("dir/data_a.txt")
5131 )
5132 });
5133}
5134
5135#[gpui::test(retries = 5)]
5136async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5137 use worktree::WorktreeModelHandle as _;
5138
5139 init_test(cx);
5140 cx.executor().allow_parking();
5141
5142 let dir = TempTree::new(json!({
5143 "a": {
5144 "file1": "",
5145 "file2": "",
5146 "file3": "",
5147 },
5148 "b": {
5149 "c": {
5150 "file4": "",
5151 "file5": "",
5152 }
5153 }
5154 }));
5155
5156 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5157
5158 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5159 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5160 async move { buffer.await.unwrap() }
5161 };
5162 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5163 project.update(cx, |project, cx| {
5164 let tree = project.worktrees(cx).next().unwrap();
5165 tree.read(cx)
5166 .entry_for_path(rel_path(path))
5167 .unwrap_or_else(|| panic!("no entry for path {}", path))
5168 .id
5169 })
5170 };
5171
5172 let buffer2 = buffer_for_path("a/file2", cx).await;
5173 let buffer3 = buffer_for_path("a/file3", cx).await;
5174 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5175 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5176
5177 let file2_id = id_for_path("a/file2", cx);
5178 let file3_id = id_for_path("a/file3", cx);
5179 let file4_id = id_for_path("b/c/file4", cx);
5180
5181 // Create a remote copy of this worktree.
5182 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5183 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5184
5185 let updates = Arc::new(Mutex::new(Vec::new()));
5186 tree.update(cx, |tree, cx| {
5187 let updates = updates.clone();
5188 tree.observe_updates(0, cx, move |update| {
5189 updates.lock().push(update);
5190 async { true }
5191 });
5192 });
5193
5194 let remote = cx.update(|cx| {
5195 Worktree::remote(
5196 0,
5197 ReplicaId::REMOTE_SERVER,
5198 metadata,
5199 project.read(cx).client().into(),
5200 project.read(cx).path_style(cx),
5201 cx,
5202 )
5203 });
5204
5205 cx.executor().run_until_parked();
5206
5207 cx.update(|cx| {
5208 assert!(!buffer2.read(cx).is_dirty());
5209 assert!(!buffer3.read(cx).is_dirty());
5210 assert!(!buffer4.read(cx).is_dirty());
5211 assert!(!buffer5.read(cx).is_dirty());
5212 });
5213
5214 // Rename and delete files and directories.
5215 tree.flush_fs_events(cx).await;
5216 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5217 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5218 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5219 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5220 tree.flush_fs_events(cx).await;
5221
5222 cx.update(|app| {
5223 assert_eq!(
5224 tree.read(app).paths().collect::<Vec<_>>(),
5225 vec![
5226 rel_path("a"),
5227 rel_path("a/file1"),
5228 rel_path("a/file2.new"),
5229 rel_path("b"),
5230 rel_path("d"),
5231 rel_path("d/file3"),
5232 rel_path("d/file4"),
5233 ]
5234 );
5235 });
5236
5237 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5238 assert_eq!(id_for_path("d/file3", cx), file3_id);
5239 assert_eq!(id_for_path("d/file4", cx), file4_id);
5240
5241 cx.update(|cx| {
5242 assert_eq!(
5243 buffer2.read(cx).file().unwrap().path().as_ref(),
5244 rel_path("a/file2.new")
5245 );
5246 assert_eq!(
5247 buffer3.read(cx).file().unwrap().path().as_ref(),
5248 rel_path("d/file3")
5249 );
5250 assert_eq!(
5251 buffer4.read(cx).file().unwrap().path().as_ref(),
5252 rel_path("d/file4")
5253 );
5254 assert_eq!(
5255 buffer5.read(cx).file().unwrap().path().as_ref(),
5256 rel_path("b/c/file5")
5257 );
5258
5259 assert_matches!(
5260 buffer2.read(cx).file().unwrap().disk_state(),
5261 DiskState::Present { .. }
5262 );
5263 assert_matches!(
5264 buffer3.read(cx).file().unwrap().disk_state(),
5265 DiskState::Present { .. }
5266 );
5267 assert_matches!(
5268 buffer4.read(cx).file().unwrap().disk_state(),
5269 DiskState::Present { .. }
5270 );
5271 assert_eq!(
5272 buffer5.read(cx).file().unwrap().disk_state(),
5273 DiskState::Deleted
5274 );
5275 });
5276
5277 // Update the remote worktree. Check that it becomes consistent with the
5278 // local worktree.
5279 cx.executor().run_until_parked();
5280
5281 remote.update(cx, |remote, _| {
5282 for update in updates.lock().drain(..) {
5283 remote.as_remote_mut().unwrap().update_from_remote(update);
5284 }
5285 });
5286 cx.executor().run_until_parked();
5287 remote.update(cx, |remote, _| {
5288 assert_eq!(
5289 remote.paths().collect::<Vec<_>>(),
5290 vec![
5291 rel_path("a"),
5292 rel_path("a/file1"),
5293 rel_path("a/file2.new"),
5294 rel_path("b"),
5295 rel_path("d"),
5296 rel_path("d/file3"),
5297 rel_path("d/file4"),
5298 ]
5299 );
5300 });
5301}
5302
5303#[cfg(target_os = "linux")]
5304#[gpui::test(retries = 5)]
5305async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5306 init_test(cx);
5307 cx.executor().allow_parking();
5308
5309 let dir = TempTree::new(json!({}));
5310 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5311 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5312
5313 tree.flush_fs_events(cx).await;
5314
5315 let repro_dir = dir.path().join("repro");
5316 std::fs::create_dir(&repro_dir).unwrap();
5317 tree.flush_fs_events(cx).await;
5318
5319 cx.update(|cx| {
5320 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5321 });
5322
5323 std::fs::remove_dir_all(&repro_dir).unwrap();
5324 tree.flush_fs_events(cx).await;
5325
5326 cx.update(|cx| {
5327 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5328 });
5329
5330 std::fs::create_dir(&repro_dir).unwrap();
5331 tree.flush_fs_events(cx).await;
5332
5333 cx.update(|cx| {
5334 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5335 });
5336
5337 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5338 tree.flush_fs_events(cx).await;
5339
5340 cx.update(|cx| {
5341 assert!(
5342 tree.read(cx)
5343 .entry_for_path(rel_path("repro/repro-marker"))
5344 .is_some()
5345 );
5346 });
5347}
5348
5349#[gpui::test(iterations = 10)]
5350async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5351 init_test(cx);
5352
5353 let fs = FakeFs::new(cx.executor());
5354 fs.insert_tree(
5355 path!("/dir"),
5356 json!({
5357 "a": {
5358 "file1": "",
5359 }
5360 }),
5361 )
5362 .await;
5363
5364 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5365 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5366 let tree_id = tree.update(cx, |tree, _| tree.id());
5367
5368 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5369 project.update(cx, |project, cx| {
5370 let tree = project.worktrees(cx).next().unwrap();
5371 tree.read(cx)
5372 .entry_for_path(rel_path(path))
5373 .unwrap_or_else(|| panic!("no entry for path {}", path))
5374 .id
5375 })
5376 };
5377
5378 let dir_id = id_for_path("a", cx);
5379 let file_id = id_for_path("a/file1", cx);
5380 let buffer = project
5381 .update(cx, |p, cx| {
5382 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5383 })
5384 .await
5385 .unwrap();
5386 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5387
5388 project
5389 .update(cx, |project, cx| {
5390 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5391 })
5392 .unwrap()
5393 .await
5394 .into_included()
5395 .unwrap();
5396 cx.executor().run_until_parked();
5397
5398 assert_eq!(id_for_path("b", cx), dir_id);
5399 assert_eq!(id_for_path("b/file1", cx), file_id);
5400 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5401}
5402
5403#[gpui::test]
5404async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5405 init_test(cx);
5406
5407 let fs = FakeFs::new(cx.executor());
5408 fs.insert_tree(
5409 "/dir",
5410 json!({
5411 "a.txt": "a-contents",
5412 "b.txt": "b-contents",
5413 }),
5414 )
5415 .await;
5416
5417 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5418
5419 // Spawn multiple tasks to open paths, repeating some paths.
5420 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5421 (
5422 p.open_local_buffer("/dir/a.txt", cx),
5423 p.open_local_buffer("/dir/b.txt", cx),
5424 p.open_local_buffer("/dir/a.txt", cx),
5425 )
5426 });
5427
5428 let buffer_a_1 = buffer_a_1.await.unwrap();
5429 let buffer_a_2 = buffer_a_2.await.unwrap();
5430 let buffer_b = buffer_b.await.unwrap();
5431 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5432 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5433
5434 // There is only one buffer per path.
5435 let buffer_a_id = buffer_a_1.entity_id();
5436 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5437
5438 // Open the same path again while it is still open.
5439 drop(buffer_a_1);
5440 let buffer_a_3 = project
5441 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5442 .await
5443 .unwrap();
5444
5445 // There's still only one buffer per path.
5446 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5447}
5448
5449#[gpui::test]
5450async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5451 init_test(cx);
5452
5453 let fs = FakeFs::new(cx.executor());
5454 fs.insert_tree(
5455 path!("/dir"),
5456 json!({
5457 "file1": "abc",
5458 "file2": "def",
5459 "file3": "ghi",
5460 }),
5461 )
5462 .await;
5463
5464 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5465
5466 let buffer1 = project
5467 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5468 .await
5469 .unwrap();
5470 let events = Arc::new(Mutex::new(Vec::new()));
5471
5472 // initially, the buffer isn't dirty.
5473 buffer1.update(cx, |buffer, cx| {
5474 cx.subscribe(&buffer1, {
5475 let events = events.clone();
5476 move |_, _, event, _| match event {
5477 BufferEvent::Operation { .. } => {}
5478 _ => events.lock().push(event.clone()),
5479 }
5480 })
5481 .detach();
5482
5483 assert!(!buffer.is_dirty());
5484 assert!(events.lock().is_empty());
5485
5486 buffer.edit([(1..2, "")], None, cx);
5487 });
5488
5489 // after the first edit, the buffer is dirty, and emits a dirtied event.
5490 buffer1.update(cx, |buffer, cx| {
5491 assert!(buffer.text() == "ac");
5492 assert!(buffer.is_dirty());
5493 assert_eq!(
5494 *events.lock(),
5495 &[
5496 language::BufferEvent::Edited { is_local: true },
5497 language::BufferEvent::DirtyChanged
5498 ]
5499 );
5500 events.lock().clear();
5501 buffer.did_save(
5502 buffer.version(),
5503 buffer.file().unwrap().disk_state().mtime(),
5504 cx,
5505 );
5506 });
5507
5508 // after saving, the buffer is not dirty, and emits a saved event.
5509 buffer1.update(cx, |buffer, cx| {
5510 assert!(!buffer.is_dirty());
5511 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5512 events.lock().clear();
5513
5514 buffer.edit([(1..1, "B")], None, cx);
5515 buffer.edit([(2..2, "D")], None, cx);
5516 });
5517
5518 // after editing again, the buffer is dirty, and emits another dirty event.
5519 buffer1.update(cx, |buffer, cx| {
5520 assert!(buffer.text() == "aBDc");
5521 assert!(buffer.is_dirty());
5522 assert_eq!(
5523 *events.lock(),
5524 &[
5525 language::BufferEvent::Edited { is_local: true },
5526 language::BufferEvent::DirtyChanged,
5527 language::BufferEvent::Edited { is_local: true },
5528 ],
5529 );
5530 events.lock().clear();
5531
5532 // After restoring the buffer to its previously-saved state,
5533 // the buffer is not considered dirty anymore.
5534 buffer.edit([(1..3, "")], None, cx);
5535 assert!(buffer.text() == "ac");
5536 assert!(!buffer.is_dirty());
5537 });
5538
5539 assert_eq!(
5540 *events.lock(),
5541 &[
5542 language::BufferEvent::Edited { is_local: true },
5543 language::BufferEvent::DirtyChanged
5544 ]
5545 );
5546
5547 // When a file is deleted, it is not considered dirty.
5548 let events = Arc::new(Mutex::new(Vec::new()));
5549 let buffer2 = project
5550 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5551 .await
5552 .unwrap();
5553 buffer2.update(cx, |_, cx| {
5554 cx.subscribe(&buffer2, {
5555 let events = events.clone();
5556 move |_, _, event, _| match event {
5557 BufferEvent::Operation { .. } => {}
5558 _ => events.lock().push(event.clone()),
5559 }
5560 })
5561 .detach();
5562 });
5563
5564 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5565 .await
5566 .unwrap();
5567 cx.executor().run_until_parked();
5568 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5569 assert_eq!(
5570 mem::take(&mut *events.lock()),
5571 &[language::BufferEvent::FileHandleChanged]
5572 );
5573
5574 // Buffer becomes dirty when edited.
5575 buffer2.update(cx, |buffer, cx| {
5576 buffer.edit([(2..3, "")], None, cx);
5577 assert_eq!(buffer.is_dirty(), true);
5578 });
5579 assert_eq!(
5580 mem::take(&mut *events.lock()),
5581 &[
5582 language::BufferEvent::Edited { is_local: true },
5583 language::BufferEvent::DirtyChanged
5584 ]
5585 );
5586
5587 // Buffer becomes clean again when all of its content is removed, because
5588 // the file was deleted.
5589 buffer2.update(cx, |buffer, cx| {
5590 buffer.edit([(0..2, "")], None, cx);
5591 assert_eq!(buffer.is_empty(), true);
5592 assert_eq!(buffer.is_dirty(), false);
5593 });
5594 assert_eq!(
5595 *events.lock(),
5596 &[
5597 language::BufferEvent::Edited { is_local: true },
5598 language::BufferEvent::DirtyChanged
5599 ]
5600 );
5601
5602 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5603 let events = Arc::new(Mutex::new(Vec::new()));
5604 let buffer3 = project
5605 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5606 .await
5607 .unwrap();
5608 buffer3.update(cx, |_, cx| {
5609 cx.subscribe(&buffer3, {
5610 let events = events.clone();
5611 move |_, _, event, _| match event {
5612 BufferEvent::Operation { .. } => {}
5613 _ => events.lock().push(event.clone()),
5614 }
5615 })
5616 .detach();
5617 });
5618
5619 buffer3.update(cx, |buffer, cx| {
5620 buffer.edit([(0..0, "x")], None, cx);
5621 });
5622 events.lock().clear();
5623 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5624 .await
5625 .unwrap();
5626 cx.executor().run_until_parked();
5627 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5628 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5629}
5630
5631#[gpui::test]
5632async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
5633 init_test(cx);
5634
5635 let fs = FakeFs::new(cx.executor());
5636 fs.insert_tree(
5637 path!("/dir"),
5638 json!({
5639 "file.txt": "version 1",
5640 }),
5641 )
5642 .await;
5643
5644 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5645 let buffer = project
5646 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
5647 .await
5648 .unwrap();
5649
5650 buffer.read_with(cx, |buffer, _| {
5651 assert_eq!(buffer.text(), "version 1");
5652 assert!(!buffer.is_dirty());
5653 });
5654
5655 // User makes an edit, making the buffer dirty.
5656 buffer.update(cx, |buffer, cx| {
5657 buffer.edit([(0..0, "user edit: ")], None, cx);
5658 });
5659
5660 buffer.read_with(cx, |buffer, _| {
5661 assert!(buffer.is_dirty());
5662 assert_eq!(buffer.text(), "user edit: version 1");
5663 });
5664
5665 // External tool writes new content while buffer is dirty.
5666 // file_updated() updates the File but suppresses ReloadNeeded.
5667 fs.save(
5668 path!("/dir/file.txt").as_ref(),
5669 &"version 2 from external tool".into(),
5670 Default::default(),
5671 )
5672 .await
5673 .unwrap();
5674 cx.executor().run_until_parked();
5675
5676 buffer.read_with(cx, |buffer, _| {
5677 assert!(buffer.has_conflict());
5678 assert_eq!(buffer.text(), "user edit: version 1");
5679 });
5680
5681 // User undoes their edit. Buffer becomes clean, but disk has different
5682 // content. did_edit() detects the dirty->clean transition and checks if
5683 // disk changed while dirty. Since mtime differs from saved_mtime, it
5684 // emits ReloadNeeded.
5685 buffer.update(cx, |buffer, cx| {
5686 buffer.undo(cx);
5687 });
5688 cx.executor().run_until_parked();
5689
5690 buffer.read_with(cx, |buffer, _| {
5691 assert_eq!(
5692 buffer.text(),
5693 "version 2 from external tool",
5694 "buffer should reload from disk after undo makes it clean"
5695 );
5696 assert!(!buffer.is_dirty());
5697 });
5698}
5699
5700#[gpui::test]
5701async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5702 init_test(cx);
5703
5704 let (initial_contents, initial_offsets) =
5705 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5706 let fs = FakeFs::new(cx.executor());
5707 fs.insert_tree(
5708 path!("/dir"),
5709 json!({
5710 "the-file": initial_contents,
5711 }),
5712 )
5713 .await;
5714 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5715 let buffer = project
5716 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5717 .await
5718 .unwrap();
5719
5720 let anchors = initial_offsets
5721 .iter()
5722 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5723 .collect::<Vec<_>>();
5724
5725 // Change the file on disk, adding two new lines of text, and removing
5726 // one line.
5727 buffer.update(cx, |buffer, _| {
5728 assert!(!buffer.is_dirty());
5729 assert!(!buffer.has_conflict());
5730 });
5731
5732 let (new_contents, new_offsets) =
5733 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5734 fs.save(
5735 path!("/dir/the-file").as_ref(),
5736 &new_contents.as_str().into(),
5737 LineEnding::Unix,
5738 )
5739 .await
5740 .unwrap();
5741
5742 // Because the buffer was not modified, it is reloaded from disk. Its
5743 // contents are edited according to the diff between the old and new
5744 // file contents.
5745 cx.executor().run_until_parked();
5746 buffer.update(cx, |buffer, _| {
5747 assert_eq!(buffer.text(), new_contents);
5748 assert!(!buffer.is_dirty());
5749 assert!(!buffer.has_conflict());
5750
5751 let anchor_offsets = anchors
5752 .iter()
5753 .map(|anchor| anchor.to_offset(&*buffer))
5754 .collect::<Vec<_>>();
5755 assert_eq!(anchor_offsets, new_offsets);
5756 });
5757
5758 // Modify the buffer
5759 buffer.update(cx, |buffer, cx| {
5760 buffer.edit([(0..0, " ")], None, cx);
5761 assert!(buffer.is_dirty());
5762 assert!(!buffer.has_conflict());
5763 });
5764
5765 // Change the file on disk again, adding blank lines to the beginning.
5766 fs.save(
5767 path!("/dir/the-file").as_ref(),
5768 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5769 LineEnding::Unix,
5770 )
5771 .await
5772 .unwrap();
5773
5774 // Because the buffer is modified, it doesn't reload from disk, but is
5775 // marked as having a conflict.
5776 cx.executor().run_until_parked();
5777 buffer.update(cx, |buffer, _| {
5778 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5779 assert!(buffer.has_conflict());
5780 });
5781}
5782
5783#[gpui::test]
5784async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5785 init_test(cx);
5786
5787 let fs = FakeFs::new(cx.executor());
5788 fs.insert_tree(
5789 path!("/dir"),
5790 json!({
5791 "file1": "a\nb\nc\n",
5792 "file2": "one\r\ntwo\r\nthree\r\n",
5793 }),
5794 )
5795 .await;
5796
5797 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5798 let buffer1 = project
5799 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5800 .await
5801 .unwrap();
5802 let buffer2 = project
5803 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5804 .await
5805 .unwrap();
5806
5807 buffer1.update(cx, |buffer, _| {
5808 assert_eq!(buffer.text(), "a\nb\nc\n");
5809 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5810 });
5811 buffer2.update(cx, |buffer, _| {
5812 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5813 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5814 });
5815
5816 // Change a file's line endings on disk from unix to windows. The buffer's
5817 // state updates correctly.
5818 fs.save(
5819 path!("/dir/file1").as_ref(),
5820 &"aaa\nb\nc\n".into(),
5821 LineEnding::Windows,
5822 )
5823 .await
5824 .unwrap();
5825 cx.executor().run_until_parked();
5826 buffer1.update(cx, |buffer, _| {
5827 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5828 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5829 });
5830
5831 // Save a file with windows line endings. The file is written correctly.
5832 buffer2.update(cx, |buffer, cx| {
5833 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5834 });
5835 project
5836 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5837 .await
5838 .unwrap();
5839 assert_eq!(
5840 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5841 "one\r\ntwo\r\nthree\r\nfour\r\n",
5842 );
5843}
5844
5845#[gpui::test]
5846async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5847 init_test(cx);
5848
5849 let fs = FakeFs::new(cx.executor());
5850 fs.insert_tree(
5851 path!("/dir"),
5852 json!({
5853 "a.rs": "
5854 fn foo(mut v: Vec<usize>) {
5855 for x in &v {
5856 v.push(1);
5857 }
5858 }
5859 "
5860 .unindent(),
5861 }),
5862 )
5863 .await;
5864
5865 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5866 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5867 let buffer = project
5868 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5869 .await
5870 .unwrap();
5871
5872 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5873 let message = lsp::PublishDiagnosticsParams {
5874 uri: buffer_uri.clone(),
5875 diagnostics: vec![
5876 lsp::Diagnostic {
5877 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5878 severity: Some(DiagnosticSeverity::WARNING),
5879 message: "error 1".to_string(),
5880 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5881 location: lsp::Location {
5882 uri: buffer_uri.clone(),
5883 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5884 },
5885 message: "error 1 hint 1".to_string(),
5886 }]),
5887 ..Default::default()
5888 },
5889 lsp::Diagnostic {
5890 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5891 severity: Some(DiagnosticSeverity::HINT),
5892 message: "error 1 hint 1".to_string(),
5893 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5894 location: lsp::Location {
5895 uri: buffer_uri.clone(),
5896 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5897 },
5898 message: "original diagnostic".to_string(),
5899 }]),
5900 ..Default::default()
5901 },
5902 lsp::Diagnostic {
5903 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5904 severity: Some(DiagnosticSeverity::ERROR),
5905 message: "error 2".to_string(),
5906 related_information: Some(vec![
5907 lsp::DiagnosticRelatedInformation {
5908 location: lsp::Location {
5909 uri: buffer_uri.clone(),
5910 range: lsp::Range::new(
5911 lsp::Position::new(1, 13),
5912 lsp::Position::new(1, 15),
5913 ),
5914 },
5915 message: "error 2 hint 1".to_string(),
5916 },
5917 lsp::DiagnosticRelatedInformation {
5918 location: lsp::Location {
5919 uri: buffer_uri.clone(),
5920 range: lsp::Range::new(
5921 lsp::Position::new(1, 13),
5922 lsp::Position::new(1, 15),
5923 ),
5924 },
5925 message: "error 2 hint 2".to_string(),
5926 },
5927 ]),
5928 ..Default::default()
5929 },
5930 lsp::Diagnostic {
5931 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5932 severity: Some(DiagnosticSeverity::HINT),
5933 message: "error 2 hint 1".to_string(),
5934 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5935 location: lsp::Location {
5936 uri: buffer_uri.clone(),
5937 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5938 },
5939 message: "original diagnostic".to_string(),
5940 }]),
5941 ..Default::default()
5942 },
5943 lsp::Diagnostic {
5944 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5945 severity: Some(DiagnosticSeverity::HINT),
5946 message: "error 2 hint 2".to_string(),
5947 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5948 location: lsp::Location {
5949 uri: buffer_uri,
5950 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5951 },
5952 message: "original diagnostic".to_string(),
5953 }]),
5954 ..Default::default()
5955 },
5956 ],
5957 version: None,
5958 };
5959
5960 lsp_store
5961 .update(cx, |lsp_store, cx| {
5962 lsp_store.update_diagnostics(
5963 LanguageServerId(0),
5964 message,
5965 None,
5966 DiagnosticSourceKind::Pushed,
5967 &[],
5968 cx,
5969 )
5970 })
5971 .unwrap();
5972 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5973
5974 assert_eq!(
5975 buffer
5976 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5977 .collect::<Vec<_>>(),
5978 &[
5979 DiagnosticEntry {
5980 range: Point::new(1, 8)..Point::new(1, 9),
5981 diagnostic: Diagnostic {
5982 severity: DiagnosticSeverity::WARNING,
5983 message: "error 1".to_string(),
5984 group_id: 1,
5985 is_primary: true,
5986 source_kind: DiagnosticSourceKind::Pushed,
5987 ..Diagnostic::default()
5988 }
5989 },
5990 DiagnosticEntry {
5991 range: Point::new(1, 8)..Point::new(1, 9),
5992 diagnostic: Diagnostic {
5993 severity: DiagnosticSeverity::HINT,
5994 message: "error 1 hint 1".to_string(),
5995 group_id: 1,
5996 is_primary: false,
5997 source_kind: DiagnosticSourceKind::Pushed,
5998 ..Diagnostic::default()
5999 }
6000 },
6001 DiagnosticEntry {
6002 range: Point::new(1, 13)..Point::new(1, 15),
6003 diagnostic: Diagnostic {
6004 severity: DiagnosticSeverity::HINT,
6005 message: "error 2 hint 1".to_string(),
6006 group_id: 0,
6007 is_primary: false,
6008 source_kind: DiagnosticSourceKind::Pushed,
6009 ..Diagnostic::default()
6010 }
6011 },
6012 DiagnosticEntry {
6013 range: Point::new(1, 13)..Point::new(1, 15),
6014 diagnostic: Diagnostic {
6015 severity: DiagnosticSeverity::HINT,
6016 message: "error 2 hint 2".to_string(),
6017 group_id: 0,
6018 is_primary: false,
6019 source_kind: DiagnosticSourceKind::Pushed,
6020 ..Diagnostic::default()
6021 }
6022 },
6023 DiagnosticEntry {
6024 range: Point::new(2, 8)..Point::new(2, 17),
6025 diagnostic: Diagnostic {
6026 severity: DiagnosticSeverity::ERROR,
6027 message: "error 2".to_string(),
6028 group_id: 0,
6029 is_primary: true,
6030 source_kind: DiagnosticSourceKind::Pushed,
6031 ..Diagnostic::default()
6032 }
6033 }
6034 ]
6035 );
6036
6037 assert_eq!(
6038 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6039 &[
6040 DiagnosticEntry {
6041 range: Point::new(1, 13)..Point::new(1, 15),
6042 diagnostic: Diagnostic {
6043 severity: DiagnosticSeverity::HINT,
6044 message: "error 2 hint 1".to_string(),
6045 group_id: 0,
6046 is_primary: false,
6047 source_kind: DiagnosticSourceKind::Pushed,
6048 ..Diagnostic::default()
6049 }
6050 },
6051 DiagnosticEntry {
6052 range: Point::new(1, 13)..Point::new(1, 15),
6053 diagnostic: Diagnostic {
6054 severity: DiagnosticSeverity::HINT,
6055 message: "error 2 hint 2".to_string(),
6056 group_id: 0,
6057 is_primary: false,
6058 source_kind: DiagnosticSourceKind::Pushed,
6059 ..Diagnostic::default()
6060 }
6061 },
6062 DiagnosticEntry {
6063 range: Point::new(2, 8)..Point::new(2, 17),
6064 diagnostic: Diagnostic {
6065 severity: DiagnosticSeverity::ERROR,
6066 message: "error 2".to_string(),
6067 group_id: 0,
6068 is_primary: true,
6069 source_kind: DiagnosticSourceKind::Pushed,
6070 ..Diagnostic::default()
6071 }
6072 }
6073 ]
6074 );
6075
6076 assert_eq!(
6077 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6078 &[
6079 DiagnosticEntry {
6080 range: Point::new(1, 8)..Point::new(1, 9),
6081 diagnostic: Diagnostic {
6082 severity: DiagnosticSeverity::WARNING,
6083 message: "error 1".to_string(),
6084 group_id: 1,
6085 is_primary: true,
6086 source_kind: DiagnosticSourceKind::Pushed,
6087 ..Diagnostic::default()
6088 }
6089 },
6090 DiagnosticEntry {
6091 range: Point::new(1, 8)..Point::new(1, 9),
6092 diagnostic: Diagnostic {
6093 severity: DiagnosticSeverity::HINT,
6094 message: "error 1 hint 1".to_string(),
6095 group_id: 1,
6096 is_primary: false,
6097 source_kind: DiagnosticSourceKind::Pushed,
6098 ..Diagnostic::default()
6099 }
6100 },
6101 ]
6102 );
6103}
6104
6105#[gpui::test]
6106async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6107 init_test(cx);
6108
6109 let fs = FakeFs::new(cx.executor());
6110 fs.insert_tree(
6111 path!("/dir"),
6112 json!({
6113 "one.rs": "const ONE: usize = 1;",
6114 "two": {
6115 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6116 }
6117
6118 }),
6119 )
6120 .await;
6121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6122
6123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6124 language_registry.add(rust_lang());
6125 let watched_paths = lsp::FileOperationRegistrationOptions {
6126 filters: vec![
6127 FileOperationFilter {
6128 scheme: Some("file".to_owned()),
6129 pattern: lsp::FileOperationPattern {
6130 glob: "**/*.rs".to_owned(),
6131 matches: Some(lsp::FileOperationPatternKind::File),
6132 options: None,
6133 },
6134 },
6135 FileOperationFilter {
6136 scheme: Some("file".to_owned()),
6137 pattern: lsp::FileOperationPattern {
6138 glob: "**/**".to_owned(),
6139 matches: Some(lsp::FileOperationPatternKind::Folder),
6140 options: None,
6141 },
6142 },
6143 ],
6144 };
6145 let mut fake_servers = language_registry.register_fake_lsp(
6146 "Rust",
6147 FakeLspAdapter {
6148 capabilities: lsp::ServerCapabilities {
6149 workspace: Some(lsp::WorkspaceServerCapabilities {
6150 workspace_folders: None,
6151 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6152 did_rename: Some(watched_paths.clone()),
6153 will_rename: Some(watched_paths),
6154 ..Default::default()
6155 }),
6156 }),
6157 ..Default::default()
6158 },
6159 ..Default::default()
6160 },
6161 );
6162
6163 let _ = project
6164 .update(cx, |project, cx| {
6165 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6166 })
6167 .await
6168 .unwrap();
6169
6170 let fake_server = fake_servers.next().await.unwrap();
6171 cx.executor().run_until_parked();
6172 let response = project.update(cx, |project, cx| {
6173 let worktree = project.worktrees(cx).next().unwrap();
6174 let entry = worktree
6175 .read(cx)
6176 .entry_for_path(rel_path("one.rs"))
6177 .unwrap();
6178 project.rename_entry(
6179 entry.id,
6180 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6181 cx,
6182 )
6183 });
6184 let expected_edit = lsp::WorkspaceEdit {
6185 changes: None,
6186 document_changes: Some(DocumentChanges::Edits({
6187 vec![TextDocumentEdit {
6188 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6189 range: lsp::Range {
6190 start: lsp::Position {
6191 line: 0,
6192 character: 1,
6193 },
6194 end: lsp::Position {
6195 line: 0,
6196 character: 3,
6197 },
6198 },
6199 new_text: "This is not a drill".to_owned(),
6200 })],
6201 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6202 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6203 version: Some(1337),
6204 },
6205 }]
6206 })),
6207 change_annotations: None,
6208 };
6209 let resolved_workspace_edit = Arc::new(OnceLock::new());
6210 fake_server
6211 .set_request_handler::<WillRenameFiles, _, _>({
6212 let resolved_workspace_edit = resolved_workspace_edit.clone();
6213 let expected_edit = expected_edit.clone();
6214 move |params, _| {
6215 let resolved_workspace_edit = resolved_workspace_edit.clone();
6216 let expected_edit = expected_edit.clone();
6217 async move {
6218 assert_eq!(params.files.len(), 1);
6219 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6220 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6221 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6222 Ok(Some(expected_edit))
6223 }
6224 }
6225 })
6226 .next()
6227 .await
6228 .unwrap();
6229 let _ = response.await.unwrap();
6230 fake_server
6231 .handle_notification::<DidRenameFiles, _>(|params, _| {
6232 assert_eq!(params.files.len(), 1);
6233 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6234 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6235 })
6236 .next()
6237 .await
6238 .unwrap();
6239 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6240}
6241
6242#[gpui::test]
6243async fn test_rename(cx: &mut gpui::TestAppContext) {
6244 // hi
6245 init_test(cx);
6246
6247 let fs = FakeFs::new(cx.executor());
6248 fs.insert_tree(
6249 path!("/dir"),
6250 json!({
6251 "one.rs": "const ONE: usize = 1;",
6252 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6253 }),
6254 )
6255 .await;
6256
6257 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6258
6259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6260 language_registry.add(rust_lang());
6261 let mut fake_servers = language_registry.register_fake_lsp(
6262 "Rust",
6263 FakeLspAdapter {
6264 capabilities: lsp::ServerCapabilities {
6265 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6266 prepare_provider: Some(true),
6267 work_done_progress_options: Default::default(),
6268 })),
6269 ..Default::default()
6270 },
6271 ..Default::default()
6272 },
6273 );
6274
6275 let (buffer, _handle) = project
6276 .update(cx, |project, cx| {
6277 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6278 })
6279 .await
6280 .unwrap();
6281
6282 let fake_server = fake_servers.next().await.unwrap();
6283 cx.executor().run_until_parked();
6284
6285 let response = project.update(cx, |project, cx| {
6286 project.prepare_rename(buffer.clone(), 7, cx)
6287 });
6288 fake_server
6289 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6290 assert_eq!(
6291 params.text_document.uri.as_str(),
6292 uri!("file:///dir/one.rs")
6293 );
6294 assert_eq!(params.position, lsp::Position::new(0, 7));
6295 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6296 lsp::Position::new(0, 6),
6297 lsp::Position::new(0, 9),
6298 ))))
6299 })
6300 .next()
6301 .await
6302 .unwrap();
6303 let response = response.await.unwrap();
6304 let PrepareRenameResponse::Success(range) = response else {
6305 panic!("{:?}", response);
6306 };
6307 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6308 assert_eq!(range, 6..9);
6309
6310 let response = project.update(cx, |project, cx| {
6311 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6312 });
6313 fake_server
6314 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6315 assert_eq!(
6316 params.text_document_position.text_document.uri.as_str(),
6317 uri!("file:///dir/one.rs")
6318 );
6319 assert_eq!(
6320 params.text_document_position.position,
6321 lsp::Position::new(0, 7)
6322 );
6323 assert_eq!(params.new_name, "THREE");
6324 Ok(Some(lsp::WorkspaceEdit {
6325 changes: Some(
6326 [
6327 (
6328 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6329 vec![lsp::TextEdit::new(
6330 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6331 "THREE".to_string(),
6332 )],
6333 ),
6334 (
6335 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6336 vec![
6337 lsp::TextEdit::new(
6338 lsp::Range::new(
6339 lsp::Position::new(0, 24),
6340 lsp::Position::new(0, 27),
6341 ),
6342 "THREE".to_string(),
6343 ),
6344 lsp::TextEdit::new(
6345 lsp::Range::new(
6346 lsp::Position::new(0, 35),
6347 lsp::Position::new(0, 38),
6348 ),
6349 "THREE".to_string(),
6350 ),
6351 ],
6352 ),
6353 ]
6354 .into_iter()
6355 .collect(),
6356 ),
6357 ..Default::default()
6358 }))
6359 })
6360 .next()
6361 .await
6362 .unwrap();
6363 let mut transaction = response.await.unwrap().0;
6364 assert_eq!(transaction.len(), 2);
6365 assert_eq!(
6366 transaction
6367 .remove_entry(&buffer)
6368 .unwrap()
6369 .0
6370 .update(cx, |buffer, _| buffer.text()),
6371 "const THREE: usize = 1;"
6372 );
6373 assert_eq!(
6374 transaction
6375 .into_keys()
6376 .next()
6377 .unwrap()
6378 .update(cx, |buffer, _| buffer.text()),
6379 "const TWO: usize = one::THREE + one::THREE;"
6380 );
6381}
6382
6383#[gpui::test]
6384async fn test_search(cx: &mut gpui::TestAppContext) {
6385 init_test(cx);
6386
6387 let fs = FakeFs::new(cx.executor());
6388 fs.insert_tree(
6389 path!("/dir"),
6390 json!({
6391 "one.rs": "const ONE: usize = 1;",
6392 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6393 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6394 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6395 }),
6396 )
6397 .await;
6398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6399 assert_eq!(
6400 search(
6401 &project,
6402 SearchQuery::text(
6403 "TWO",
6404 false,
6405 true,
6406 false,
6407 Default::default(),
6408 Default::default(),
6409 false,
6410 None
6411 )
6412 .unwrap(),
6413 cx
6414 )
6415 .await
6416 .unwrap(),
6417 HashMap::from_iter([
6418 (path!("dir/two.rs").to_string(), vec![6..9]),
6419 (path!("dir/three.rs").to_string(), vec![37..40])
6420 ])
6421 );
6422
6423 let buffer_4 = project
6424 .update(cx, |project, cx| {
6425 project.open_local_buffer(path!("/dir/four.rs"), cx)
6426 })
6427 .await
6428 .unwrap();
6429 buffer_4.update(cx, |buffer, cx| {
6430 let text = "two::TWO";
6431 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6432 });
6433
6434 assert_eq!(
6435 search(
6436 &project,
6437 SearchQuery::text(
6438 "TWO",
6439 false,
6440 true,
6441 false,
6442 Default::default(),
6443 Default::default(),
6444 false,
6445 None,
6446 )
6447 .unwrap(),
6448 cx
6449 )
6450 .await
6451 .unwrap(),
6452 HashMap::from_iter([
6453 (path!("dir/two.rs").to_string(), vec![6..9]),
6454 (path!("dir/three.rs").to_string(), vec![37..40]),
6455 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6456 ])
6457 );
6458}
6459
6460#[gpui::test]
6461async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6462 init_test(cx);
6463
6464 let search_query = "file";
6465
6466 let fs = FakeFs::new(cx.executor());
6467 fs.insert_tree(
6468 path!("/dir"),
6469 json!({
6470 "one.rs": r#"// Rust file one"#,
6471 "one.ts": r#"// TypeScript file one"#,
6472 "two.rs": r#"// Rust file two"#,
6473 "two.ts": r#"// TypeScript file two"#,
6474 }),
6475 )
6476 .await;
6477 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6478
6479 assert!(
6480 search(
6481 &project,
6482 SearchQuery::text(
6483 search_query,
6484 false,
6485 true,
6486 false,
6487 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6488 Default::default(),
6489 false,
6490 None
6491 )
6492 .unwrap(),
6493 cx
6494 )
6495 .await
6496 .unwrap()
6497 .is_empty(),
6498 "If no inclusions match, no files should be returned"
6499 );
6500
6501 assert_eq!(
6502 search(
6503 &project,
6504 SearchQuery::text(
6505 search_query,
6506 false,
6507 true,
6508 false,
6509 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6510 Default::default(),
6511 false,
6512 None
6513 )
6514 .unwrap(),
6515 cx
6516 )
6517 .await
6518 .unwrap(),
6519 HashMap::from_iter([
6520 (path!("dir/one.rs").to_string(), vec![8..12]),
6521 (path!("dir/two.rs").to_string(), vec![8..12]),
6522 ]),
6523 "Rust only search should give only Rust files"
6524 );
6525
6526 assert_eq!(
6527 search(
6528 &project,
6529 SearchQuery::text(
6530 search_query,
6531 false,
6532 true,
6533 false,
6534 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6535 .unwrap(),
6536 Default::default(),
6537 false,
6538 None,
6539 )
6540 .unwrap(),
6541 cx
6542 )
6543 .await
6544 .unwrap(),
6545 HashMap::from_iter([
6546 (path!("dir/one.ts").to_string(), vec![14..18]),
6547 (path!("dir/two.ts").to_string(), vec![14..18]),
6548 ]),
6549 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6550 );
6551
6552 assert_eq!(
6553 search(
6554 &project,
6555 SearchQuery::text(
6556 search_query,
6557 false,
6558 true,
6559 false,
6560 PathMatcher::new(
6561 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6562 PathStyle::local()
6563 )
6564 .unwrap(),
6565 Default::default(),
6566 false,
6567 None,
6568 )
6569 .unwrap(),
6570 cx
6571 )
6572 .await
6573 .unwrap(),
6574 HashMap::from_iter([
6575 (path!("dir/two.ts").to_string(), vec![14..18]),
6576 (path!("dir/one.rs").to_string(), vec![8..12]),
6577 (path!("dir/one.ts").to_string(), vec![14..18]),
6578 (path!("dir/two.rs").to_string(), vec![8..12]),
6579 ]),
6580 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6581 );
6582}
6583
6584#[gpui::test]
6585async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6586 init_test(cx);
6587
6588 let search_query = "file";
6589
6590 let fs = FakeFs::new(cx.executor());
6591 fs.insert_tree(
6592 path!("/dir"),
6593 json!({
6594 "one.rs": r#"// Rust file one"#,
6595 "one.ts": r#"// TypeScript file one"#,
6596 "two.rs": r#"// Rust file two"#,
6597 "two.ts": r#"// TypeScript file two"#,
6598 }),
6599 )
6600 .await;
6601 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6602
6603 assert_eq!(
6604 search(
6605 &project,
6606 SearchQuery::text(
6607 search_query,
6608 false,
6609 true,
6610 false,
6611 Default::default(),
6612 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6613 false,
6614 None,
6615 )
6616 .unwrap(),
6617 cx
6618 )
6619 .await
6620 .unwrap(),
6621 HashMap::from_iter([
6622 (path!("dir/one.rs").to_string(), vec![8..12]),
6623 (path!("dir/one.ts").to_string(), vec![14..18]),
6624 (path!("dir/two.rs").to_string(), vec![8..12]),
6625 (path!("dir/two.ts").to_string(), vec![14..18]),
6626 ]),
6627 "If no exclusions match, all files should be returned"
6628 );
6629
6630 assert_eq!(
6631 search(
6632 &project,
6633 SearchQuery::text(
6634 search_query,
6635 false,
6636 true,
6637 false,
6638 Default::default(),
6639 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6640 false,
6641 None,
6642 )
6643 .unwrap(),
6644 cx
6645 )
6646 .await
6647 .unwrap(),
6648 HashMap::from_iter([
6649 (path!("dir/one.ts").to_string(), vec![14..18]),
6650 (path!("dir/two.ts").to_string(), vec![14..18]),
6651 ]),
6652 "Rust exclusion search should give only TypeScript files"
6653 );
6654
6655 assert_eq!(
6656 search(
6657 &project,
6658 SearchQuery::text(
6659 search_query,
6660 false,
6661 true,
6662 false,
6663 Default::default(),
6664 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6665 .unwrap(),
6666 false,
6667 None,
6668 )
6669 .unwrap(),
6670 cx
6671 )
6672 .await
6673 .unwrap(),
6674 HashMap::from_iter([
6675 (path!("dir/one.rs").to_string(), vec![8..12]),
6676 (path!("dir/two.rs").to_string(), vec![8..12]),
6677 ]),
6678 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6679 );
6680
6681 assert!(
6682 search(
6683 &project,
6684 SearchQuery::text(
6685 search_query,
6686 false,
6687 true,
6688 false,
6689 Default::default(),
6690 PathMatcher::new(
6691 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6692 PathStyle::local(),
6693 )
6694 .unwrap(),
6695 false,
6696 None,
6697 )
6698 .unwrap(),
6699 cx
6700 )
6701 .await
6702 .unwrap()
6703 .is_empty(),
6704 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6705 );
6706}
6707
6708#[gpui::test]
6709async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6710 init_test(cx);
6711
6712 let search_query = "file";
6713
6714 let fs = FakeFs::new(cx.executor());
6715 fs.insert_tree(
6716 path!("/dir"),
6717 json!({
6718 "one.rs": r#"// Rust file one"#,
6719 "one.ts": r#"// TypeScript file one"#,
6720 "two.rs": r#"// Rust file two"#,
6721 "two.ts": r#"// TypeScript file two"#,
6722 }),
6723 )
6724 .await;
6725
6726 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6727 let path_style = PathStyle::local();
6728 let _buffer = project.update(cx, |project, cx| {
6729 project.create_local_buffer("file", None, false, cx)
6730 });
6731
6732 assert_eq!(
6733 search(
6734 &project,
6735 SearchQuery::text(
6736 search_query,
6737 false,
6738 true,
6739 false,
6740 Default::default(),
6741 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6742 false,
6743 None,
6744 )
6745 .unwrap(),
6746 cx
6747 )
6748 .await
6749 .unwrap(),
6750 HashMap::from_iter([
6751 (path!("dir/one.rs").to_string(), vec![8..12]),
6752 (path!("dir/one.ts").to_string(), vec![14..18]),
6753 (path!("dir/two.rs").to_string(), vec![8..12]),
6754 (path!("dir/two.ts").to_string(), vec![14..18]),
6755 ]),
6756 "If no exclusions match, all files should be returned"
6757 );
6758
6759 assert_eq!(
6760 search(
6761 &project,
6762 SearchQuery::text(
6763 search_query,
6764 false,
6765 true,
6766 false,
6767 Default::default(),
6768 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6769 false,
6770 None,
6771 )
6772 .unwrap(),
6773 cx
6774 )
6775 .await
6776 .unwrap(),
6777 HashMap::from_iter([
6778 (path!("dir/one.ts").to_string(), vec![14..18]),
6779 (path!("dir/two.ts").to_string(), vec![14..18]),
6780 ]),
6781 "Rust exclusion search should give only TypeScript files"
6782 );
6783
6784 assert_eq!(
6785 search(
6786 &project,
6787 SearchQuery::text(
6788 search_query,
6789 false,
6790 true,
6791 false,
6792 Default::default(),
6793 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6794 false,
6795 None,
6796 )
6797 .unwrap(),
6798 cx
6799 )
6800 .await
6801 .unwrap(),
6802 HashMap::from_iter([
6803 (path!("dir/one.rs").to_string(), vec![8..12]),
6804 (path!("dir/two.rs").to_string(), vec![8..12]),
6805 ]),
6806 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6807 );
6808
6809 assert!(
6810 search(
6811 &project,
6812 SearchQuery::text(
6813 search_query,
6814 false,
6815 true,
6816 false,
6817 Default::default(),
6818 PathMatcher::new(
6819 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6820 PathStyle::local(),
6821 )
6822 .unwrap(),
6823 false,
6824 None,
6825 )
6826 .unwrap(),
6827 cx
6828 )
6829 .await
6830 .unwrap()
6831 .is_empty(),
6832 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6833 );
6834}
6835
6836#[gpui::test]
6837async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6838 init_test(cx);
6839
6840 let search_query = "file";
6841
6842 let fs = FakeFs::new(cx.executor());
6843 fs.insert_tree(
6844 path!("/dir"),
6845 json!({
6846 "one.rs": r#"// Rust file one"#,
6847 "one.ts": r#"// TypeScript file one"#,
6848 "two.rs": r#"// Rust file two"#,
6849 "two.ts": r#"// TypeScript file two"#,
6850 }),
6851 )
6852 .await;
6853 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6854 assert!(
6855 search(
6856 &project,
6857 SearchQuery::text(
6858 search_query,
6859 false,
6860 true,
6861 false,
6862 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6863 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6864 false,
6865 None,
6866 )
6867 .unwrap(),
6868 cx
6869 )
6870 .await
6871 .unwrap()
6872 .is_empty(),
6873 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6874 );
6875
6876 assert!(
6877 search(
6878 &project,
6879 SearchQuery::text(
6880 search_query,
6881 false,
6882 true,
6883 false,
6884 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6885 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6886 false,
6887 None,
6888 )
6889 .unwrap(),
6890 cx
6891 )
6892 .await
6893 .unwrap()
6894 .is_empty(),
6895 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6896 );
6897
6898 assert!(
6899 search(
6900 &project,
6901 SearchQuery::text(
6902 search_query,
6903 false,
6904 true,
6905 false,
6906 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6907 .unwrap(),
6908 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6909 .unwrap(),
6910 false,
6911 None,
6912 )
6913 .unwrap(),
6914 cx
6915 )
6916 .await
6917 .unwrap()
6918 .is_empty(),
6919 "Non-matching inclusions and exclusions should not change that."
6920 );
6921
6922 assert_eq!(
6923 search(
6924 &project,
6925 SearchQuery::text(
6926 search_query,
6927 false,
6928 true,
6929 false,
6930 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6931 .unwrap(),
6932 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6933 .unwrap(),
6934 false,
6935 None,
6936 )
6937 .unwrap(),
6938 cx
6939 )
6940 .await
6941 .unwrap(),
6942 HashMap::from_iter([
6943 (path!("dir/one.ts").to_string(), vec![14..18]),
6944 (path!("dir/two.ts").to_string(), vec![14..18]),
6945 ]),
6946 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6947 );
6948}
6949
6950#[gpui::test]
6951async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6952 init_test(cx);
6953
6954 let fs = FakeFs::new(cx.executor());
6955 fs.insert_tree(
6956 path!("/worktree-a"),
6957 json!({
6958 "haystack.rs": r#"// NEEDLE"#,
6959 "haystack.ts": r#"// NEEDLE"#,
6960 }),
6961 )
6962 .await;
6963 fs.insert_tree(
6964 path!("/worktree-b"),
6965 json!({
6966 "haystack.rs": r#"// NEEDLE"#,
6967 "haystack.ts": r#"// NEEDLE"#,
6968 }),
6969 )
6970 .await;
6971
6972 let path_style = PathStyle::local();
6973 let project = Project::test(
6974 fs.clone(),
6975 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6976 cx,
6977 )
6978 .await;
6979
6980 assert_eq!(
6981 search(
6982 &project,
6983 SearchQuery::text(
6984 "NEEDLE",
6985 false,
6986 true,
6987 false,
6988 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6989 Default::default(),
6990 true,
6991 None,
6992 )
6993 .unwrap(),
6994 cx
6995 )
6996 .await
6997 .unwrap(),
6998 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6999 "should only return results from included worktree"
7000 );
7001 assert_eq!(
7002 search(
7003 &project,
7004 SearchQuery::text(
7005 "NEEDLE",
7006 false,
7007 true,
7008 false,
7009 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7010 Default::default(),
7011 true,
7012 None,
7013 )
7014 .unwrap(),
7015 cx
7016 )
7017 .await
7018 .unwrap(),
7019 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7020 "should only return results from included worktree"
7021 );
7022
7023 assert_eq!(
7024 search(
7025 &project,
7026 SearchQuery::text(
7027 "NEEDLE",
7028 false,
7029 true,
7030 false,
7031 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7032 Default::default(),
7033 false,
7034 None,
7035 )
7036 .unwrap(),
7037 cx
7038 )
7039 .await
7040 .unwrap(),
7041 HashMap::from_iter([
7042 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7043 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7044 ]),
7045 "should return results from both worktrees"
7046 );
7047}
7048
7049#[gpui::test]
7050async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7051 init_test(cx);
7052
7053 let fs = FakeFs::new(cx.background_executor.clone());
7054 fs.insert_tree(
7055 path!("/dir"),
7056 json!({
7057 ".git": {},
7058 ".gitignore": "**/target\n/node_modules\n",
7059 "target": {
7060 "index.txt": "index_key:index_value"
7061 },
7062 "node_modules": {
7063 "eslint": {
7064 "index.ts": "const eslint_key = 'eslint value'",
7065 "package.json": r#"{ "some_key": "some value" }"#,
7066 },
7067 "prettier": {
7068 "index.ts": "const prettier_key = 'prettier value'",
7069 "package.json": r#"{ "other_key": "other value" }"#,
7070 },
7071 },
7072 "package.json": r#"{ "main_key": "main value" }"#,
7073 }),
7074 )
7075 .await;
7076 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7077
7078 let query = "key";
7079 assert_eq!(
7080 search(
7081 &project,
7082 SearchQuery::text(
7083 query,
7084 false,
7085 false,
7086 false,
7087 Default::default(),
7088 Default::default(),
7089 false,
7090 None,
7091 )
7092 .unwrap(),
7093 cx
7094 )
7095 .await
7096 .unwrap(),
7097 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7098 "Only one non-ignored file should have the query"
7099 );
7100
7101 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7102 let path_style = PathStyle::local();
7103 assert_eq!(
7104 search(
7105 &project,
7106 SearchQuery::text(
7107 query,
7108 false,
7109 false,
7110 true,
7111 Default::default(),
7112 Default::default(),
7113 false,
7114 None,
7115 )
7116 .unwrap(),
7117 cx
7118 )
7119 .await
7120 .unwrap(),
7121 HashMap::from_iter([
7122 (path!("dir/package.json").to_string(), vec![8..11]),
7123 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7124 (
7125 path!("dir/node_modules/prettier/package.json").to_string(),
7126 vec![9..12]
7127 ),
7128 (
7129 path!("dir/node_modules/prettier/index.ts").to_string(),
7130 vec![15..18]
7131 ),
7132 (
7133 path!("dir/node_modules/eslint/index.ts").to_string(),
7134 vec![13..16]
7135 ),
7136 (
7137 path!("dir/node_modules/eslint/package.json").to_string(),
7138 vec![8..11]
7139 ),
7140 ]),
7141 "Unrestricted search with ignored directories should find every file with the query"
7142 );
7143
7144 let files_to_include =
7145 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7146 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7147 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7148 assert_eq!(
7149 search(
7150 &project,
7151 SearchQuery::text(
7152 query,
7153 false,
7154 false,
7155 true,
7156 files_to_include,
7157 files_to_exclude,
7158 false,
7159 None,
7160 )
7161 .unwrap(),
7162 cx
7163 )
7164 .await
7165 .unwrap(),
7166 HashMap::from_iter([(
7167 path!("dir/node_modules/prettier/package.json").to_string(),
7168 vec![9..12]
7169 )]),
7170 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7171 );
7172}
7173
7174#[gpui::test]
7175async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7176 init_test(cx);
7177
7178 let fs = FakeFs::new(cx.executor());
7179 fs.insert_tree(
7180 path!("/dir"),
7181 json!({
7182 "one.rs": "// ПРИВЕТ? привет!",
7183 "two.rs": "// ПРИВЕТ.",
7184 "three.rs": "// привет",
7185 }),
7186 )
7187 .await;
7188 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7189 let unicode_case_sensitive_query = SearchQuery::text(
7190 "привет",
7191 false,
7192 true,
7193 false,
7194 Default::default(),
7195 Default::default(),
7196 false,
7197 None,
7198 );
7199 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7200 assert_eq!(
7201 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7202 .await
7203 .unwrap(),
7204 HashMap::from_iter([
7205 (path!("dir/one.rs").to_string(), vec![17..29]),
7206 (path!("dir/three.rs").to_string(), vec![3..15]),
7207 ])
7208 );
7209
7210 let unicode_case_insensitive_query = SearchQuery::text(
7211 "привет",
7212 false,
7213 false,
7214 false,
7215 Default::default(),
7216 Default::default(),
7217 false,
7218 None,
7219 );
7220 assert_matches!(
7221 unicode_case_insensitive_query,
7222 Ok(SearchQuery::Regex { .. })
7223 );
7224 assert_eq!(
7225 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7226 .await
7227 .unwrap(),
7228 HashMap::from_iter([
7229 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7230 (path!("dir/two.rs").to_string(), vec![3..15]),
7231 (path!("dir/three.rs").to_string(), vec![3..15]),
7232 ])
7233 );
7234
7235 assert_eq!(
7236 search(
7237 &project,
7238 SearchQuery::text(
7239 "привет.",
7240 false,
7241 false,
7242 false,
7243 Default::default(),
7244 Default::default(),
7245 false,
7246 None,
7247 )
7248 .unwrap(),
7249 cx
7250 )
7251 .await
7252 .unwrap(),
7253 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7254 );
7255}
7256
7257#[gpui::test]
7258async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7259 init_test(cx);
7260
7261 let fs = FakeFs::new(cx.executor());
7262 fs.insert_tree(
7263 "/one/two",
7264 json!({
7265 "three": {
7266 "a.txt": "",
7267 "four": {}
7268 },
7269 "c.rs": ""
7270 }),
7271 )
7272 .await;
7273
7274 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7275 project
7276 .update(cx, |project, cx| {
7277 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7278 project.create_entry((id, rel_path("b..")), true, cx)
7279 })
7280 .await
7281 .unwrap()
7282 .into_included()
7283 .unwrap();
7284
7285 assert_eq!(
7286 fs.paths(true),
7287 vec![
7288 PathBuf::from(path!("/")),
7289 PathBuf::from(path!("/one")),
7290 PathBuf::from(path!("/one/two")),
7291 PathBuf::from(path!("/one/two/c.rs")),
7292 PathBuf::from(path!("/one/two/three")),
7293 PathBuf::from(path!("/one/two/three/a.txt")),
7294 PathBuf::from(path!("/one/two/three/b..")),
7295 PathBuf::from(path!("/one/two/three/four")),
7296 ]
7297 );
7298}
7299
7300#[gpui::test]
7301async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7302 init_test(cx);
7303
7304 let fs = FakeFs::new(cx.executor());
7305 fs.insert_tree(
7306 path!("/dir"),
7307 json!({
7308 "a.tsx": "a",
7309 }),
7310 )
7311 .await;
7312
7313 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7314
7315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7316 language_registry.add(tsx_lang());
7317 let language_server_names = [
7318 "TypeScriptServer",
7319 "TailwindServer",
7320 "ESLintServer",
7321 "NoHoverCapabilitiesServer",
7322 ];
7323 let mut language_servers = [
7324 language_registry.register_fake_lsp(
7325 "tsx",
7326 FakeLspAdapter {
7327 name: language_server_names[0],
7328 capabilities: lsp::ServerCapabilities {
7329 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7330 ..lsp::ServerCapabilities::default()
7331 },
7332 ..FakeLspAdapter::default()
7333 },
7334 ),
7335 language_registry.register_fake_lsp(
7336 "tsx",
7337 FakeLspAdapter {
7338 name: language_server_names[1],
7339 capabilities: lsp::ServerCapabilities {
7340 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7341 ..lsp::ServerCapabilities::default()
7342 },
7343 ..FakeLspAdapter::default()
7344 },
7345 ),
7346 language_registry.register_fake_lsp(
7347 "tsx",
7348 FakeLspAdapter {
7349 name: language_server_names[2],
7350 capabilities: lsp::ServerCapabilities {
7351 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7352 ..lsp::ServerCapabilities::default()
7353 },
7354 ..FakeLspAdapter::default()
7355 },
7356 ),
7357 language_registry.register_fake_lsp(
7358 "tsx",
7359 FakeLspAdapter {
7360 name: language_server_names[3],
7361 capabilities: lsp::ServerCapabilities {
7362 hover_provider: None,
7363 ..lsp::ServerCapabilities::default()
7364 },
7365 ..FakeLspAdapter::default()
7366 },
7367 ),
7368 ];
7369
7370 let (buffer, _handle) = project
7371 .update(cx, |p, cx| {
7372 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7373 })
7374 .await
7375 .unwrap();
7376 cx.executor().run_until_parked();
7377
7378 let mut servers_with_hover_requests = HashMap::default();
7379 for i in 0..language_server_names.len() {
7380 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7381 panic!(
7382 "Failed to get language server #{i} with name {}",
7383 &language_server_names[i]
7384 )
7385 });
7386 let new_server_name = new_server.server.name();
7387 assert!(
7388 !servers_with_hover_requests.contains_key(&new_server_name),
7389 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7390 );
7391 match new_server_name.as_ref() {
7392 "TailwindServer" | "TypeScriptServer" => {
7393 servers_with_hover_requests.insert(
7394 new_server_name.clone(),
7395 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7396 move |_, _| {
7397 let name = new_server_name.clone();
7398 async move {
7399 Ok(Some(lsp::Hover {
7400 contents: lsp::HoverContents::Scalar(
7401 lsp::MarkedString::String(format!("{name} hover")),
7402 ),
7403 range: None,
7404 }))
7405 }
7406 },
7407 ),
7408 );
7409 }
7410 "ESLintServer" => {
7411 servers_with_hover_requests.insert(
7412 new_server_name,
7413 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7414 |_, _| async move { Ok(None) },
7415 ),
7416 );
7417 }
7418 "NoHoverCapabilitiesServer" => {
7419 let _never_handled = new_server
7420 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7421 panic!(
7422 "Should not call for hovers server with no corresponding capabilities"
7423 )
7424 });
7425 }
7426 unexpected => panic!("Unexpected server name: {unexpected}"),
7427 }
7428 }
7429
7430 let hover_task = project.update(cx, |project, cx| {
7431 project.hover(&buffer, Point::new(0, 0), cx)
7432 });
7433 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7434 |mut hover_request| async move {
7435 hover_request
7436 .next()
7437 .await
7438 .expect("All hover requests should have been triggered")
7439 },
7440 ))
7441 .await;
7442 assert_eq!(
7443 vec!["TailwindServer hover", "TypeScriptServer hover"],
7444 hover_task
7445 .await
7446 .into_iter()
7447 .flatten()
7448 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7449 .sorted()
7450 .collect::<Vec<_>>(),
7451 "Should receive hover responses from all related servers with hover capabilities"
7452 );
7453}
7454
7455#[gpui::test]
7456async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7457 init_test(cx);
7458
7459 let fs = FakeFs::new(cx.executor());
7460 fs.insert_tree(
7461 path!("/dir"),
7462 json!({
7463 "a.ts": "a",
7464 }),
7465 )
7466 .await;
7467
7468 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7469
7470 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7471 language_registry.add(typescript_lang());
7472 let mut fake_language_servers = language_registry.register_fake_lsp(
7473 "TypeScript",
7474 FakeLspAdapter {
7475 capabilities: lsp::ServerCapabilities {
7476 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7477 ..lsp::ServerCapabilities::default()
7478 },
7479 ..FakeLspAdapter::default()
7480 },
7481 );
7482
7483 let (buffer, _handle) = project
7484 .update(cx, |p, cx| {
7485 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7486 })
7487 .await
7488 .unwrap();
7489 cx.executor().run_until_parked();
7490
7491 let fake_server = fake_language_servers
7492 .next()
7493 .await
7494 .expect("failed to get the language server");
7495
7496 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7497 move |_, _| async move {
7498 Ok(Some(lsp::Hover {
7499 contents: lsp::HoverContents::Array(vec![
7500 lsp::MarkedString::String("".to_string()),
7501 lsp::MarkedString::String(" ".to_string()),
7502 lsp::MarkedString::String("\n\n\n".to_string()),
7503 ]),
7504 range: None,
7505 }))
7506 },
7507 );
7508
7509 let hover_task = project.update(cx, |project, cx| {
7510 project.hover(&buffer, Point::new(0, 0), cx)
7511 });
7512 let () = request_handled
7513 .next()
7514 .await
7515 .expect("All hover requests should have been triggered");
7516 assert_eq!(
7517 Vec::<String>::new(),
7518 hover_task
7519 .await
7520 .into_iter()
7521 .flatten()
7522 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7523 .sorted()
7524 .collect::<Vec<_>>(),
7525 "Empty hover parts should be ignored"
7526 );
7527}
7528
7529#[gpui::test]
7530async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7531 init_test(cx);
7532
7533 let fs = FakeFs::new(cx.executor());
7534 fs.insert_tree(
7535 path!("/dir"),
7536 json!({
7537 "a.ts": "a",
7538 }),
7539 )
7540 .await;
7541
7542 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7543
7544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7545 language_registry.add(typescript_lang());
7546 let mut fake_language_servers = language_registry.register_fake_lsp(
7547 "TypeScript",
7548 FakeLspAdapter {
7549 capabilities: lsp::ServerCapabilities {
7550 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7551 ..lsp::ServerCapabilities::default()
7552 },
7553 ..FakeLspAdapter::default()
7554 },
7555 );
7556
7557 let (buffer, _handle) = project
7558 .update(cx, |p, cx| {
7559 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7560 })
7561 .await
7562 .unwrap();
7563 cx.executor().run_until_parked();
7564
7565 let fake_server = fake_language_servers
7566 .next()
7567 .await
7568 .expect("failed to get the language server");
7569
7570 let mut request_handled = fake_server
7571 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7572 Ok(Some(vec![
7573 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7574 title: "organize imports".to_string(),
7575 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7576 ..lsp::CodeAction::default()
7577 }),
7578 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7579 title: "fix code".to_string(),
7580 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7581 ..lsp::CodeAction::default()
7582 }),
7583 ]))
7584 });
7585
7586 let code_actions_task = project.update(cx, |project, cx| {
7587 project.code_actions(
7588 &buffer,
7589 0..buffer.read(cx).len(),
7590 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7591 cx,
7592 )
7593 });
7594
7595 let () = request_handled
7596 .next()
7597 .await
7598 .expect("The code action request should have been triggered");
7599
7600 let code_actions = code_actions_task.await.unwrap().unwrap();
7601 assert_eq!(code_actions.len(), 1);
7602 assert_eq!(
7603 code_actions[0].lsp_action.action_kind(),
7604 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7605 );
7606}
7607
7608#[gpui::test]
7609async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7610 init_test(cx);
7611
7612 let fs = FakeFs::new(cx.executor());
7613 fs.insert_tree(
7614 path!("/dir"),
7615 json!({
7616 "a.tsx": "a",
7617 }),
7618 )
7619 .await;
7620
7621 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7622
7623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7624 language_registry.add(tsx_lang());
7625 let language_server_names = [
7626 "TypeScriptServer",
7627 "TailwindServer",
7628 "ESLintServer",
7629 "NoActionsCapabilitiesServer",
7630 ];
7631
7632 let mut language_server_rxs = [
7633 language_registry.register_fake_lsp(
7634 "tsx",
7635 FakeLspAdapter {
7636 name: language_server_names[0],
7637 capabilities: lsp::ServerCapabilities {
7638 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7639 ..lsp::ServerCapabilities::default()
7640 },
7641 ..FakeLspAdapter::default()
7642 },
7643 ),
7644 language_registry.register_fake_lsp(
7645 "tsx",
7646 FakeLspAdapter {
7647 name: language_server_names[1],
7648 capabilities: lsp::ServerCapabilities {
7649 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7650 ..lsp::ServerCapabilities::default()
7651 },
7652 ..FakeLspAdapter::default()
7653 },
7654 ),
7655 language_registry.register_fake_lsp(
7656 "tsx",
7657 FakeLspAdapter {
7658 name: language_server_names[2],
7659 capabilities: lsp::ServerCapabilities {
7660 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7661 ..lsp::ServerCapabilities::default()
7662 },
7663 ..FakeLspAdapter::default()
7664 },
7665 ),
7666 language_registry.register_fake_lsp(
7667 "tsx",
7668 FakeLspAdapter {
7669 name: language_server_names[3],
7670 capabilities: lsp::ServerCapabilities {
7671 code_action_provider: None,
7672 ..lsp::ServerCapabilities::default()
7673 },
7674 ..FakeLspAdapter::default()
7675 },
7676 ),
7677 ];
7678
7679 let (buffer, _handle) = project
7680 .update(cx, |p, cx| {
7681 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7682 })
7683 .await
7684 .unwrap();
7685 cx.executor().run_until_parked();
7686
7687 let mut servers_with_actions_requests = HashMap::default();
7688 for i in 0..language_server_names.len() {
7689 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7690 panic!(
7691 "Failed to get language server #{i} with name {}",
7692 &language_server_names[i]
7693 )
7694 });
7695 let new_server_name = new_server.server.name();
7696
7697 assert!(
7698 !servers_with_actions_requests.contains_key(&new_server_name),
7699 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7700 );
7701 match new_server_name.0.as_ref() {
7702 "TailwindServer" | "TypeScriptServer" => {
7703 servers_with_actions_requests.insert(
7704 new_server_name.clone(),
7705 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7706 move |_, _| {
7707 let name = new_server_name.clone();
7708 async move {
7709 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7710 lsp::CodeAction {
7711 title: format!("{name} code action"),
7712 ..lsp::CodeAction::default()
7713 },
7714 )]))
7715 }
7716 },
7717 ),
7718 );
7719 }
7720 "ESLintServer" => {
7721 servers_with_actions_requests.insert(
7722 new_server_name,
7723 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7724 |_, _| async move { Ok(None) },
7725 ),
7726 );
7727 }
7728 "NoActionsCapabilitiesServer" => {
7729 let _never_handled = new_server
7730 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7731 panic!(
7732 "Should not call for code actions server with no corresponding capabilities"
7733 )
7734 });
7735 }
7736 unexpected => panic!("Unexpected server name: {unexpected}"),
7737 }
7738 }
7739
7740 let code_actions_task = project.update(cx, |project, cx| {
7741 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7742 });
7743
7744 // cx.run_until_parked();
7745 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7746 |mut code_actions_request| async move {
7747 code_actions_request
7748 .next()
7749 .await
7750 .expect("All code actions requests should have been triggered")
7751 },
7752 ))
7753 .await;
7754 assert_eq!(
7755 vec!["TailwindServer code action", "TypeScriptServer code action"],
7756 code_actions_task
7757 .await
7758 .unwrap()
7759 .unwrap()
7760 .into_iter()
7761 .map(|code_action| code_action.lsp_action.title().to_owned())
7762 .sorted()
7763 .collect::<Vec<_>>(),
7764 "Should receive code actions responses from all related servers with hover capabilities"
7765 );
7766}
7767
7768#[gpui::test]
7769async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7770 init_test(cx);
7771
7772 let fs = FakeFs::new(cx.executor());
7773 fs.insert_tree(
7774 "/dir",
7775 json!({
7776 "a.rs": "let a = 1;",
7777 "b.rs": "let b = 2;",
7778 "c.rs": "let c = 2;",
7779 }),
7780 )
7781 .await;
7782
7783 let project = Project::test(
7784 fs,
7785 [
7786 "/dir/a.rs".as_ref(),
7787 "/dir/b.rs".as_ref(),
7788 "/dir/c.rs".as_ref(),
7789 ],
7790 cx,
7791 )
7792 .await;
7793
7794 // check the initial state and get the worktrees
7795 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7796 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7797 assert_eq!(worktrees.len(), 3);
7798
7799 let worktree_a = worktrees[0].read(cx);
7800 let worktree_b = worktrees[1].read(cx);
7801 let worktree_c = worktrees[2].read(cx);
7802
7803 // check they start in the right order
7804 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7805 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7806 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7807
7808 (
7809 worktrees[0].clone(),
7810 worktrees[1].clone(),
7811 worktrees[2].clone(),
7812 )
7813 });
7814
7815 // move first worktree to after the second
7816 // [a, b, c] -> [b, a, c]
7817 project
7818 .update(cx, |project, cx| {
7819 let first = worktree_a.read(cx);
7820 let second = worktree_b.read(cx);
7821 project.move_worktree(first.id(), second.id(), cx)
7822 })
7823 .expect("moving first after second");
7824
7825 // check the state after moving
7826 project.update(cx, |project, cx| {
7827 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7828 assert_eq!(worktrees.len(), 3);
7829
7830 let first = worktrees[0].read(cx);
7831 let second = worktrees[1].read(cx);
7832 let third = worktrees[2].read(cx);
7833
7834 // check they are now in the right order
7835 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7836 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7837 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7838 });
7839
7840 // move the second worktree to before the first
7841 // [b, a, c] -> [a, b, c]
7842 project
7843 .update(cx, |project, cx| {
7844 let second = worktree_a.read(cx);
7845 let first = worktree_b.read(cx);
7846 project.move_worktree(first.id(), second.id(), cx)
7847 })
7848 .expect("moving second before first");
7849
7850 // check the state after moving
7851 project.update(cx, |project, cx| {
7852 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7853 assert_eq!(worktrees.len(), 3);
7854
7855 let first = worktrees[0].read(cx);
7856 let second = worktrees[1].read(cx);
7857 let third = worktrees[2].read(cx);
7858
7859 // check they are now in the right order
7860 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7861 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7862 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7863 });
7864
7865 // move the second worktree to after the third
7866 // [a, b, c] -> [a, c, b]
7867 project
7868 .update(cx, |project, cx| {
7869 let second = worktree_b.read(cx);
7870 let third = worktree_c.read(cx);
7871 project.move_worktree(second.id(), third.id(), cx)
7872 })
7873 .expect("moving second after third");
7874
7875 // check the state after moving
7876 project.update(cx, |project, cx| {
7877 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7878 assert_eq!(worktrees.len(), 3);
7879
7880 let first = worktrees[0].read(cx);
7881 let second = worktrees[1].read(cx);
7882 let third = worktrees[2].read(cx);
7883
7884 // check they are now in the right order
7885 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7886 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7887 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7888 });
7889
7890 // move the third worktree to before the second
7891 // [a, c, b] -> [a, b, c]
7892 project
7893 .update(cx, |project, cx| {
7894 let third = worktree_c.read(cx);
7895 let second = worktree_b.read(cx);
7896 project.move_worktree(third.id(), second.id(), cx)
7897 })
7898 .expect("moving third before second");
7899
7900 // check the state after moving
7901 project.update(cx, |project, cx| {
7902 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7903 assert_eq!(worktrees.len(), 3);
7904
7905 let first = worktrees[0].read(cx);
7906 let second = worktrees[1].read(cx);
7907 let third = worktrees[2].read(cx);
7908
7909 // check they are now in the right order
7910 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7911 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7912 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7913 });
7914
7915 // move the first worktree to after the third
7916 // [a, b, c] -> [b, c, a]
7917 project
7918 .update(cx, |project, cx| {
7919 let first = worktree_a.read(cx);
7920 let third = worktree_c.read(cx);
7921 project.move_worktree(first.id(), third.id(), cx)
7922 })
7923 .expect("moving first after third");
7924
7925 // check the state after moving
7926 project.update(cx, |project, cx| {
7927 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7928 assert_eq!(worktrees.len(), 3);
7929
7930 let first = worktrees[0].read(cx);
7931 let second = worktrees[1].read(cx);
7932 let third = worktrees[2].read(cx);
7933
7934 // check they are now in the right order
7935 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7936 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7937 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7938 });
7939
7940 // move the third worktree to before the first
7941 // [b, c, a] -> [a, b, c]
7942 project
7943 .update(cx, |project, cx| {
7944 let third = worktree_a.read(cx);
7945 let first = worktree_b.read(cx);
7946 project.move_worktree(third.id(), first.id(), cx)
7947 })
7948 .expect("moving third before first");
7949
7950 // check the state after moving
7951 project.update(cx, |project, cx| {
7952 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7953 assert_eq!(worktrees.len(), 3);
7954
7955 let first = worktrees[0].read(cx);
7956 let second = worktrees[1].read(cx);
7957 let third = worktrees[2].read(cx);
7958
7959 // check they are now in the right order
7960 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7961 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7962 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7963 });
7964}
7965
7966#[gpui::test]
7967async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7968 init_test(cx);
7969
7970 let staged_contents = r#"
7971 fn main() {
7972 println!("hello world");
7973 }
7974 "#
7975 .unindent();
7976 let file_contents = r#"
7977 // print goodbye
7978 fn main() {
7979 println!("goodbye world");
7980 }
7981 "#
7982 .unindent();
7983
7984 let fs = FakeFs::new(cx.background_executor.clone());
7985 fs.insert_tree(
7986 "/dir",
7987 json!({
7988 ".git": {},
7989 "src": {
7990 "main.rs": file_contents,
7991 }
7992 }),
7993 )
7994 .await;
7995
7996 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7997
7998 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7999
8000 let buffer = project
8001 .update(cx, |project, cx| {
8002 project.open_local_buffer("/dir/src/main.rs", cx)
8003 })
8004 .await
8005 .unwrap();
8006 let unstaged_diff = project
8007 .update(cx, |project, cx| {
8008 project.open_unstaged_diff(buffer.clone(), cx)
8009 })
8010 .await
8011 .unwrap();
8012
8013 cx.run_until_parked();
8014 unstaged_diff.update(cx, |unstaged_diff, cx| {
8015 let snapshot = buffer.read(cx).snapshot();
8016 assert_hunks(
8017 unstaged_diff.snapshot(cx).hunks(&snapshot),
8018 &snapshot,
8019 &unstaged_diff.base_text_string(cx).unwrap(),
8020 &[
8021 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8022 (
8023 2..3,
8024 " println!(\"hello world\");\n",
8025 " println!(\"goodbye world\");\n",
8026 DiffHunkStatus::modified_none(),
8027 ),
8028 ],
8029 );
8030 });
8031
8032 let staged_contents = r#"
8033 // print goodbye
8034 fn main() {
8035 }
8036 "#
8037 .unindent();
8038
8039 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8040
8041 cx.run_until_parked();
8042 unstaged_diff.update(cx, |unstaged_diff, cx| {
8043 let snapshot = buffer.read(cx).snapshot();
8044 assert_hunks(
8045 unstaged_diff
8046 .snapshot(cx)
8047 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8048 &snapshot,
8049 &unstaged_diff.base_text(cx).text(),
8050 &[(
8051 2..3,
8052 "",
8053 " println!(\"goodbye world\");\n",
8054 DiffHunkStatus::added_none(),
8055 )],
8056 );
8057 });
8058}
8059
8060#[gpui::test]
8061async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8062 init_test(cx);
8063
8064 let committed_contents = r#"
8065 fn main() {
8066 println!("hello world");
8067 }
8068 "#
8069 .unindent();
8070 let staged_contents = r#"
8071 fn main() {
8072 println!("goodbye world");
8073 }
8074 "#
8075 .unindent();
8076 let file_contents = r#"
8077 // print goodbye
8078 fn main() {
8079 println!("goodbye world");
8080 }
8081 "#
8082 .unindent();
8083
8084 let fs = FakeFs::new(cx.background_executor.clone());
8085 fs.insert_tree(
8086 "/dir",
8087 json!({
8088 ".git": {},
8089 "src": {
8090 "modification.rs": file_contents,
8091 }
8092 }),
8093 )
8094 .await;
8095
8096 fs.set_head_for_repo(
8097 Path::new("/dir/.git"),
8098 &[
8099 ("src/modification.rs", committed_contents),
8100 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8101 ],
8102 "deadbeef",
8103 );
8104 fs.set_index_for_repo(
8105 Path::new("/dir/.git"),
8106 &[
8107 ("src/modification.rs", staged_contents),
8108 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8109 ],
8110 );
8111
8112 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8114 let language = rust_lang();
8115 language_registry.add(language.clone());
8116
8117 let buffer_1 = project
8118 .update(cx, |project, cx| {
8119 project.open_local_buffer("/dir/src/modification.rs", cx)
8120 })
8121 .await
8122 .unwrap();
8123 let diff_1 = project
8124 .update(cx, |project, cx| {
8125 project.open_uncommitted_diff(buffer_1.clone(), cx)
8126 })
8127 .await
8128 .unwrap();
8129 diff_1.read_with(cx, |diff, cx| {
8130 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8131 });
8132 cx.run_until_parked();
8133 diff_1.update(cx, |diff, cx| {
8134 let snapshot = buffer_1.read(cx).snapshot();
8135 assert_hunks(
8136 diff.snapshot(cx)
8137 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8138 &snapshot,
8139 &diff.base_text_string(cx).unwrap(),
8140 &[
8141 (
8142 0..1,
8143 "",
8144 "// print goodbye\n",
8145 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8146 ),
8147 (
8148 2..3,
8149 " println!(\"hello world\");\n",
8150 " println!(\"goodbye world\");\n",
8151 DiffHunkStatus::modified_none(),
8152 ),
8153 ],
8154 );
8155 });
8156
8157 // Reset HEAD to a version that differs from both the buffer and the index.
8158 let committed_contents = r#"
8159 // print goodbye
8160 fn main() {
8161 }
8162 "#
8163 .unindent();
8164 fs.set_head_for_repo(
8165 Path::new("/dir/.git"),
8166 &[
8167 ("src/modification.rs", committed_contents.clone()),
8168 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8169 ],
8170 "deadbeef",
8171 );
8172
8173 // Buffer now has an unstaged hunk.
8174 cx.run_until_parked();
8175 diff_1.update(cx, |diff, cx| {
8176 let snapshot = buffer_1.read(cx).snapshot();
8177 assert_hunks(
8178 diff.snapshot(cx)
8179 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8180 &snapshot,
8181 &diff.base_text(cx).text(),
8182 &[(
8183 2..3,
8184 "",
8185 " println!(\"goodbye world\");\n",
8186 DiffHunkStatus::added_none(),
8187 )],
8188 );
8189 });
8190
8191 // Open a buffer for a file that's been deleted.
8192 let buffer_2 = project
8193 .update(cx, |project, cx| {
8194 project.open_local_buffer("/dir/src/deletion.rs", cx)
8195 })
8196 .await
8197 .unwrap();
8198 let diff_2 = project
8199 .update(cx, |project, cx| {
8200 project.open_uncommitted_diff(buffer_2.clone(), cx)
8201 })
8202 .await
8203 .unwrap();
8204 cx.run_until_parked();
8205 diff_2.update(cx, |diff, cx| {
8206 let snapshot = buffer_2.read(cx).snapshot();
8207 assert_hunks(
8208 diff.snapshot(cx)
8209 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8210 &snapshot,
8211 &diff.base_text_string(cx).unwrap(),
8212 &[(
8213 0..0,
8214 "// the-deleted-contents\n",
8215 "",
8216 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8217 )],
8218 );
8219 });
8220
8221 // Stage the deletion of this file
8222 fs.set_index_for_repo(
8223 Path::new("/dir/.git"),
8224 &[("src/modification.rs", committed_contents.clone())],
8225 );
8226 cx.run_until_parked();
8227 diff_2.update(cx, |diff, cx| {
8228 let snapshot = buffer_2.read(cx).snapshot();
8229 assert_hunks(
8230 diff.snapshot(cx)
8231 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8232 &snapshot,
8233 &diff.base_text_string(cx).unwrap(),
8234 &[(
8235 0..0,
8236 "// the-deleted-contents\n",
8237 "",
8238 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8239 )],
8240 );
8241 });
8242}
8243
8244#[gpui::test]
8245async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8246 use DiffHunkSecondaryStatus::*;
8247 init_test(cx);
8248
8249 let committed_contents = r#"
8250 zero
8251 one
8252 two
8253 three
8254 four
8255 five
8256 "#
8257 .unindent();
8258 let file_contents = r#"
8259 one
8260 TWO
8261 three
8262 FOUR
8263 five
8264 "#
8265 .unindent();
8266
8267 let fs = FakeFs::new(cx.background_executor.clone());
8268 fs.insert_tree(
8269 "/dir",
8270 json!({
8271 ".git": {},
8272 "file.txt": file_contents.clone()
8273 }),
8274 )
8275 .await;
8276
8277 fs.set_head_and_index_for_repo(
8278 path!("/dir/.git").as_ref(),
8279 &[("file.txt", committed_contents.clone())],
8280 );
8281
8282 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8283
8284 let buffer = project
8285 .update(cx, |project, cx| {
8286 project.open_local_buffer("/dir/file.txt", cx)
8287 })
8288 .await
8289 .unwrap();
8290 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8291 let uncommitted_diff = project
8292 .update(cx, |project, cx| {
8293 project.open_uncommitted_diff(buffer.clone(), cx)
8294 })
8295 .await
8296 .unwrap();
8297 let mut diff_events = cx.events(&uncommitted_diff);
8298
8299 // The hunks are initially unstaged.
8300 uncommitted_diff.read_with(cx, |diff, cx| {
8301 assert_hunks(
8302 diff.snapshot(cx).hunks(&snapshot),
8303 &snapshot,
8304 &diff.base_text_string(cx).unwrap(),
8305 &[
8306 (
8307 0..0,
8308 "zero\n",
8309 "",
8310 DiffHunkStatus::deleted(HasSecondaryHunk),
8311 ),
8312 (
8313 1..2,
8314 "two\n",
8315 "TWO\n",
8316 DiffHunkStatus::modified(HasSecondaryHunk),
8317 ),
8318 (
8319 3..4,
8320 "four\n",
8321 "FOUR\n",
8322 DiffHunkStatus::modified(HasSecondaryHunk),
8323 ),
8324 ],
8325 );
8326 });
8327
8328 // Stage a hunk. It appears as optimistically staged.
8329 uncommitted_diff.update(cx, |diff, cx| {
8330 let range =
8331 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8332 let hunks = diff
8333 .snapshot(cx)
8334 .hunks_intersecting_range(range, &snapshot)
8335 .collect::<Vec<_>>();
8336 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8337
8338 assert_hunks(
8339 diff.snapshot(cx).hunks(&snapshot),
8340 &snapshot,
8341 &diff.base_text_string(cx).unwrap(),
8342 &[
8343 (
8344 0..0,
8345 "zero\n",
8346 "",
8347 DiffHunkStatus::deleted(HasSecondaryHunk),
8348 ),
8349 (
8350 1..2,
8351 "two\n",
8352 "TWO\n",
8353 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8354 ),
8355 (
8356 3..4,
8357 "four\n",
8358 "FOUR\n",
8359 DiffHunkStatus::modified(HasSecondaryHunk),
8360 ),
8361 ],
8362 );
8363 });
8364
8365 // The diff emits a change event for the range of the staged hunk.
8366 assert!(matches!(
8367 diff_events.next().await.unwrap(),
8368 BufferDiffEvent::HunksStagedOrUnstaged(_)
8369 ));
8370 let event = diff_events.next().await.unwrap();
8371 if let BufferDiffEvent::DiffChanged(DiffChanged {
8372 changed_range: Some(changed_range),
8373 base_text_changed_range: _,
8374 extended_range: _,
8375 }) = event
8376 {
8377 let changed_range = changed_range.to_point(&snapshot);
8378 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8379 } else {
8380 panic!("Unexpected event {event:?}");
8381 }
8382
8383 // When the write to the index completes, it appears as staged.
8384 cx.run_until_parked();
8385 uncommitted_diff.update(cx, |diff, cx| {
8386 assert_hunks(
8387 diff.snapshot(cx).hunks(&snapshot),
8388 &snapshot,
8389 &diff.base_text_string(cx).unwrap(),
8390 &[
8391 (
8392 0..0,
8393 "zero\n",
8394 "",
8395 DiffHunkStatus::deleted(HasSecondaryHunk),
8396 ),
8397 (
8398 1..2,
8399 "two\n",
8400 "TWO\n",
8401 DiffHunkStatus::modified(NoSecondaryHunk),
8402 ),
8403 (
8404 3..4,
8405 "four\n",
8406 "FOUR\n",
8407 DiffHunkStatus::modified(HasSecondaryHunk),
8408 ),
8409 ],
8410 );
8411 });
8412
8413 // The diff emits a change event for the changed index text.
8414 let event = diff_events.next().await.unwrap();
8415 if let BufferDiffEvent::DiffChanged(DiffChanged {
8416 changed_range: Some(changed_range),
8417 base_text_changed_range: _,
8418 extended_range: _,
8419 }) = event
8420 {
8421 let changed_range = changed_range.to_point(&snapshot);
8422 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8423 } else {
8424 panic!("Unexpected event {event:?}");
8425 }
8426
8427 // Simulate a problem writing to the git index.
8428 fs.set_error_message_for_index_write(
8429 "/dir/.git".as_ref(),
8430 Some("failed to write git index".into()),
8431 );
8432
8433 // Stage another hunk.
8434 uncommitted_diff.update(cx, |diff, cx| {
8435 let range =
8436 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8437 let hunks = diff
8438 .snapshot(cx)
8439 .hunks_intersecting_range(range, &snapshot)
8440 .collect::<Vec<_>>();
8441 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8442
8443 assert_hunks(
8444 diff.snapshot(cx).hunks(&snapshot),
8445 &snapshot,
8446 &diff.base_text_string(cx).unwrap(),
8447 &[
8448 (
8449 0..0,
8450 "zero\n",
8451 "",
8452 DiffHunkStatus::deleted(HasSecondaryHunk),
8453 ),
8454 (
8455 1..2,
8456 "two\n",
8457 "TWO\n",
8458 DiffHunkStatus::modified(NoSecondaryHunk),
8459 ),
8460 (
8461 3..4,
8462 "four\n",
8463 "FOUR\n",
8464 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8465 ),
8466 ],
8467 );
8468 });
8469 assert!(matches!(
8470 diff_events.next().await.unwrap(),
8471 BufferDiffEvent::HunksStagedOrUnstaged(_)
8472 ));
8473 let event = diff_events.next().await.unwrap();
8474 if let BufferDiffEvent::DiffChanged(DiffChanged {
8475 changed_range: Some(changed_range),
8476 base_text_changed_range: _,
8477 extended_range: _,
8478 }) = event
8479 {
8480 let changed_range = changed_range.to_point(&snapshot);
8481 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8482 } else {
8483 panic!("Unexpected event {event:?}");
8484 }
8485
8486 // When the write fails, the hunk returns to being unstaged.
8487 cx.run_until_parked();
8488 uncommitted_diff.update(cx, |diff, cx| {
8489 assert_hunks(
8490 diff.snapshot(cx).hunks(&snapshot),
8491 &snapshot,
8492 &diff.base_text_string(cx).unwrap(),
8493 &[
8494 (
8495 0..0,
8496 "zero\n",
8497 "",
8498 DiffHunkStatus::deleted(HasSecondaryHunk),
8499 ),
8500 (
8501 1..2,
8502 "two\n",
8503 "TWO\n",
8504 DiffHunkStatus::modified(NoSecondaryHunk),
8505 ),
8506 (
8507 3..4,
8508 "four\n",
8509 "FOUR\n",
8510 DiffHunkStatus::modified(HasSecondaryHunk),
8511 ),
8512 ],
8513 );
8514 });
8515
8516 let event = diff_events.next().await.unwrap();
8517 if let BufferDiffEvent::DiffChanged(DiffChanged {
8518 changed_range: Some(changed_range),
8519 base_text_changed_range: _,
8520 extended_range: _,
8521 }) = event
8522 {
8523 let changed_range = changed_range.to_point(&snapshot);
8524 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8525 } else {
8526 panic!("Unexpected event {event:?}");
8527 }
8528
8529 // Allow writing to the git index to succeed again.
8530 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8531
8532 // Stage two hunks with separate operations.
8533 uncommitted_diff.update(cx, |diff, cx| {
8534 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8535 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8536 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8537 });
8538
8539 // Both staged hunks appear as pending.
8540 uncommitted_diff.update(cx, |diff, cx| {
8541 assert_hunks(
8542 diff.snapshot(cx).hunks(&snapshot),
8543 &snapshot,
8544 &diff.base_text_string(cx).unwrap(),
8545 &[
8546 (
8547 0..0,
8548 "zero\n",
8549 "",
8550 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8551 ),
8552 (
8553 1..2,
8554 "two\n",
8555 "TWO\n",
8556 DiffHunkStatus::modified(NoSecondaryHunk),
8557 ),
8558 (
8559 3..4,
8560 "four\n",
8561 "FOUR\n",
8562 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8563 ),
8564 ],
8565 );
8566 });
8567
8568 // Both staging operations take effect.
8569 cx.run_until_parked();
8570 uncommitted_diff.update(cx, |diff, cx| {
8571 assert_hunks(
8572 diff.snapshot(cx).hunks(&snapshot),
8573 &snapshot,
8574 &diff.base_text_string(cx).unwrap(),
8575 &[
8576 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8577 (
8578 1..2,
8579 "two\n",
8580 "TWO\n",
8581 DiffHunkStatus::modified(NoSecondaryHunk),
8582 ),
8583 (
8584 3..4,
8585 "four\n",
8586 "FOUR\n",
8587 DiffHunkStatus::modified(NoSecondaryHunk),
8588 ),
8589 ],
8590 );
8591 });
8592}
8593
8594#[gpui::test(seeds(340, 472))]
8595async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8596 use DiffHunkSecondaryStatus::*;
8597 init_test(cx);
8598
8599 let committed_contents = r#"
8600 zero
8601 one
8602 two
8603 three
8604 four
8605 five
8606 "#
8607 .unindent();
8608 let file_contents = r#"
8609 one
8610 TWO
8611 three
8612 FOUR
8613 five
8614 "#
8615 .unindent();
8616
8617 let fs = FakeFs::new(cx.background_executor.clone());
8618 fs.insert_tree(
8619 "/dir",
8620 json!({
8621 ".git": {},
8622 "file.txt": file_contents.clone()
8623 }),
8624 )
8625 .await;
8626
8627 fs.set_head_for_repo(
8628 "/dir/.git".as_ref(),
8629 &[("file.txt", committed_contents.clone())],
8630 "deadbeef",
8631 );
8632 fs.set_index_for_repo(
8633 "/dir/.git".as_ref(),
8634 &[("file.txt", committed_contents.clone())],
8635 );
8636
8637 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8638
8639 let buffer = project
8640 .update(cx, |project, cx| {
8641 project.open_local_buffer("/dir/file.txt", cx)
8642 })
8643 .await
8644 .unwrap();
8645 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8646 let uncommitted_diff = project
8647 .update(cx, |project, cx| {
8648 project.open_uncommitted_diff(buffer.clone(), cx)
8649 })
8650 .await
8651 .unwrap();
8652
8653 // The hunks are initially unstaged.
8654 uncommitted_diff.read_with(cx, |diff, cx| {
8655 assert_hunks(
8656 diff.snapshot(cx).hunks(&snapshot),
8657 &snapshot,
8658 &diff.base_text_string(cx).unwrap(),
8659 &[
8660 (
8661 0..0,
8662 "zero\n",
8663 "",
8664 DiffHunkStatus::deleted(HasSecondaryHunk),
8665 ),
8666 (
8667 1..2,
8668 "two\n",
8669 "TWO\n",
8670 DiffHunkStatus::modified(HasSecondaryHunk),
8671 ),
8672 (
8673 3..4,
8674 "four\n",
8675 "FOUR\n",
8676 DiffHunkStatus::modified(HasSecondaryHunk),
8677 ),
8678 ],
8679 );
8680 });
8681
8682 // Pause IO events
8683 fs.pause_events();
8684
8685 // Stage the first hunk.
8686 uncommitted_diff.update(cx, |diff, cx| {
8687 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8688 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8689 assert_hunks(
8690 diff.snapshot(cx).hunks(&snapshot),
8691 &snapshot,
8692 &diff.base_text_string(cx).unwrap(),
8693 &[
8694 (
8695 0..0,
8696 "zero\n",
8697 "",
8698 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8699 ),
8700 (
8701 1..2,
8702 "two\n",
8703 "TWO\n",
8704 DiffHunkStatus::modified(HasSecondaryHunk),
8705 ),
8706 (
8707 3..4,
8708 "four\n",
8709 "FOUR\n",
8710 DiffHunkStatus::modified(HasSecondaryHunk),
8711 ),
8712 ],
8713 );
8714 });
8715
8716 // Stage the second hunk *before* receiving the FS event for the first hunk.
8717 cx.run_until_parked();
8718 uncommitted_diff.update(cx, |diff, cx| {
8719 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8720 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8721 assert_hunks(
8722 diff.snapshot(cx).hunks(&snapshot),
8723 &snapshot,
8724 &diff.base_text_string(cx).unwrap(),
8725 &[
8726 (
8727 0..0,
8728 "zero\n",
8729 "",
8730 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8731 ),
8732 (
8733 1..2,
8734 "two\n",
8735 "TWO\n",
8736 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8737 ),
8738 (
8739 3..4,
8740 "four\n",
8741 "FOUR\n",
8742 DiffHunkStatus::modified(HasSecondaryHunk),
8743 ),
8744 ],
8745 );
8746 });
8747
8748 // Process the FS event for staging the first hunk (second event is still pending).
8749 fs.flush_events(1);
8750 cx.run_until_parked();
8751
8752 // Stage the third hunk before receiving the second FS event.
8753 uncommitted_diff.update(cx, |diff, cx| {
8754 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8755 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8756 });
8757
8758 // Wait for all remaining IO.
8759 cx.run_until_parked();
8760 fs.flush_events(fs.buffered_event_count());
8761
8762 // Now all hunks are staged.
8763 cx.run_until_parked();
8764 uncommitted_diff.update(cx, |diff, cx| {
8765 assert_hunks(
8766 diff.snapshot(cx).hunks(&snapshot),
8767 &snapshot,
8768 &diff.base_text_string(cx).unwrap(),
8769 &[
8770 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8771 (
8772 1..2,
8773 "two\n",
8774 "TWO\n",
8775 DiffHunkStatus::modified(NoSecondaryHunk),
8776 ),
8777 (
8778 3..4,
8779 "four\n",
8780 "FOUR\n",
8781 DiffHunkStatus::modified(NoSecondaryHunk),
8782 ),
8783 ],
8784 );
8785 });
8786}
8787
8788#[gpui::test(iterations = 25)]
8789async fn test_staging_random_hunks(
8790 mut rng: StdRng,
8791 _executor: BackgroundExecutor,
8792 cx: &mut gpui::TestAppContext,
8793) {
8794 let operations = env::var("OPERATIONS")
8795 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8796 .unwrap_or(20);
8797
8798 use DiffHunkSecondaryStatus::*;
8799 init_test(cx);
8800
8801 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8802 let index_text = committed_text.clone();
8803 let buffer_text = (0..30)
8804 .map(|i| match i % 5 {
8805 0 => format!("line {i} (modified)\n"),
8806 _ => format!("line {i}\n"),
8807 })
8808 .collect::<String>();
8809
8810 let fs = FakeFs::new(cx.background_executor.clone());
8811 fs.insert_tree(
8812 path!("/dir"),
8813 json!({
8814 ".git": {},
8815 "file.txt": buffer_text.clone()
8816 }),
8817 )
8818 .await;
8819 fs.set_head_for_repo(
8820 path!("/dir/.git").as_ref(),
8821 &[("file.txt", committed_text.clone())],
8822 "deadbeef",
8823 );
8824 fs.set_index_for_repo(
8825 path!("/dir/.git").as_ref(),
8826 &[("file.txt", index_text.clone())],
8827 );
8828 let repo = fs
8829 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8830 .unwrap();
8831
8832 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8833 let buffer = project
8834 .update(cx, |project, cx| {
8835 project.open_local_buffer(path!("/dir/file.txt"), cx)
8836 })
8837 .await
8838 .unwrap();
8839 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8840 let uncommitted_diff = project
8841 .update(cx, |project, cx| {
8842 project.open_uncommitted_diff(buffer.clone(), cx)
8843 })
8844 .await
8845 .unwrap();
8846
8847 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8848 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8849 });
8850 assert_eq!(hunks.len(), 6);
8851
8852 for _i in 0..operations {
8853 let hunk_ix = rng.random_range(0..hunks.len());
8854 let hunk = &mut hunks[hunk_ix];
8855 let row = hunk.range.start.row;
8856
8857 if hunk.status().has_secondary_hunk() {
8858 log::info!("staging hunk at {row}");
8859 uncommitted_diff.update(cx, |diff, cx| {
8860 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8861 });
8862 hunk.secondary_status = SecondaryHunkRemovalPending;
8863 } else {
8864 log::info!("unstaging hunk at {row}");
8865 uncommitted_diff.update(cx, |diff, cx| {
8866 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8867 });
8868 hunk.secondary_status = SecondaryHunkAdditionPending;
8869 }
8870
8871 for _ in 0..rng.random_range(0..10) {
8872 log::info!("yielding");
8873 cx.executor().simulate_random_delay().await;
8874 }
8875 }
8876
8877 cx.executor().run_until_parked();
8878
8879 for hunk in &mut hunks {
8880 if hunk.secondary_status == SecondaryHunkRemovalPending {
8881 hunk.secondary_status = NoSecondaryHunk;
8882 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8883 hunk.secondary_status = HasSecondaryHunk;
8884 }
8885 }
8886
8887 log::info!(
8888 "index text:\n{}",
8889 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8890 .await
8891 .unwrap()
8892 );
8893
8894 uncommitted_diff.update(cx, |diff, cx| {
8895 let expected_hunks = hunks
8896 .iter()
8897 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8898 .collect::<Vec<_>>();
8899 let actual_hunks = diff
8900 .snapshot(cx)
8901 .hunks(&snapshot)
8902 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8903 .collect::<Vec<_>>();
8904 assert_eq!(actual_hunks, expected_hunks);
8905 });
8906}
8907
8908#[gpui::test]
8909async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8910 init_test(cx);
8911
8912 let committed_contents = r#"
8913 fn main() {
8914 println!("hello from HEAD");
8915 }
8916 "#
8917 .unindent();
8918 let file_contents = r#"
8919 fn main() {
8920 println!("hello from the working copy");
8921 }
8922 "#
8923 .unindent();
8924
8925 let fs = FakeFs::new(cx.background_executor.clone());
8926 fs.insert_tree(
8927 "/dir",
8928 json!({
8929 ".git": {},
8930 "src": {
8931 "main.rs": file_contents,
8932 }
8933 }),
8934 )
8935 .await;
8936
8937 fs.set_head_for_repo(
8938 Path::new("/dir/.git"),
8939 &[("src/main.rs", committed_contents.clone())],
8940 "deadbeef",
8941 );
8942 fs.set_index_for_repo(
8943 Path::new("/dir/.git"),
8944 &[("src/main.rs", committed_contents.clone())],
8945 );
8946
8947 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8948
8949 let buffer = project
8950 .update(cx, |project, cx| {
8951 project.open_local_buffer("/dir/src/main.rs", cx)
8952 })
8953 .await
8954 .unwrap();
8955 let uncommitted_diff = project
8956 .update(cx, |project, cx| {
8957 project.open_uncommitted_diff(buffer.clone(), cx)
8958 })
8959 .await
8960 .unwrap();
8961
8962 cx.run_until_parked();
8963 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8964 let snapshot = buffer.read(cx).snapshot();
8965 assert_hunks(
8966 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8967 &snapshot,
8968 &uncommitted_diff.base_text_string(cx).unwrap(),
8969 &[(
8970 1..2,
8971 " println!(\"hello from HEAD\");\n",
8972 " println!(\"hello from the working copy\");\n",
8973 DiffHunkStatus {
8974 kind: DiffHunkStatusKind::Modified,
8975 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8976 },
8977 )],
8978 );
8979 });
8980}
8981
8982// TODO: Should we test this on Windows also?
8983#[gpui::test]
8984#[cfg(not(windows))]
8985async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8986 use std::os::unix::fs::PermissionsExt;
8987 init_test(cx);
8988 cx.executor().allow_parking();
8989 let committed_contents = "bar\n";
8990 let file_contents = "baz\n";
8991 let root = TempTree::new(json!({
8992 "project": {
8993 "foo": committed_contents
8994 },
8995 }));
8996
8997 let work_dir = root.path().join("project");
8998 let file_path = work_dir.join("foo");
8999 let repo = git_init(work_dir.as_path());
9000 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9001 perms.set_mode(0o755);
9002 std::fs::set_permissions(&file_path, perms).unwrap();
9003 git_add("foo", &repo);
9004 git_commit("Initial commit", &repo);
9005 std::fs::write(&file_path, file_contents).unwrap();
9006
9007 let project = Project::test(
9008 Arc::new(RealFs::new(None, cx.executor())),
9009 [root.path()],
9010 cx,
9011 )
9012 .await;
9013
9014 let buffer = project
9015 .update(cx, |project, cx| {
9016 project.open_local_buffer(file_path.as_path(), cx)
9017 })
9018 .await
9019 .unwrap();
9020
9021 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9022
9023 let uncommitted_diff = project
9024 .update(cx, |project, cx| {
9025 project.open_uncommitted_diff(buffer.clone(), cx)
9026 })
9027 .await
9028 .unwrap();
9029
9030 uncommitted_diff.update(cx, |diff, cx| {
9031 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9032 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9033 });
9034
9035 cx.run_until_parked();
9036
9037 let output = smol::process::Command::new("git")
9038 .current_dir(&work_dir)
9039 .args(["diff", "--staged"])
9040 .output()
9041 .await
9042 .unwrap();
9043
9044 let staged_diff = String::from_utf8_lossy(&output.stdout);
9045
9046 assert!(
9047 !staged_diff.contains("new mode 100644"),
9048 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9049 staged_diff
9050 );
9051
9052 let output = smol::process::Command::new("git")
9053 .current_dir(&work_dir)
9054 .args(["ls-files", "-s"])
9055 .output()
9056 .await
9057 .unwrap();
9058 let index_contents = String::from_utf8_lossy(&output.stdout);
9059
9060 assert!(
9061 index_contents.contains("100755"),
9062 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9063 index_contents
9064 );
9065}
9066
9067#[gpui::test]
9068async fn test_repository_and_path_for_project_path(
9069 background_executor: BackgroundExecutor,
9070 cx: &mut gpui::TestAppContext,
9071) {
9072 init_test(cx);
9073 let fs = FakeFs::new(background_executor);
9074 fs.insert_tree(
9075 path!("/root"),
9076 json!({
9077 "c.txt": "",
9078 "dir1": {
9079 ".git": {},
9080 "deps": {
9081 "dep1": {
9082 ".git": {},
9083 "src": {
9084 "a.txt": ""
9085 }
9086 }
9087 },
9088 "src": {
9089 "b.txt": ""
9090 }
9091 },
9092 }),
9093 )
9094 .await;
9095
9096 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9097 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9098 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9099 project
9100 .update(cx, |project, cx| project.git_scans_complete(cx))
9101 .await;
9102 cx.run_until_parked();
9103
9104 project.read_with(cx, |project, cx| {
9105 let git_store = project.git_store().read(cx);
9106 let pairs = [
9107 ("c.txt", None),
9108 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9109 (
9110 "dir1/deps/dep1/src/a.txt",
9111 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9112 ),
9113 ];
9114 let expected = pairs
9115 .iter()
9116 .map(|(path, result)| {
9117 (
9118 path,
9119 result.map(|(repo, repo_path)| {
9120 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9121 }),
9122 )
9123 })
9124 .collect::<Vec<_>>();
9125 let actual = pairs
9126 .iter()
9127 .map(|(path, _)| {
9128 let project_path = (tree_id, rel_path(path)).into();
9129 let result = maybe!({
9130 let (repo, repo_path) =
9131 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9132 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9133 });
9134 (path, result)
9135 })
9136 .collect::<Vec<_>>();
9137 pretty_assertions::assert_eq!(expected, actual);
9138 });
9139
9140 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9141 .await
9142 .unwrap();
9143 cx.run_until_parked();
9144
9145 project.read_with(cx, |project, cx| {
9146 let git_store = project.git_store().read(cx);
9147 assert_eq!(
9148 git_store.repository_and_path_for_project_path(
9149 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9150 cx
9151 ),
9152 None
9153 );
9154 });
9155}
9156
9157#[gpui::test]
9158async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9159 init_test(cx);
9160 let fs = FakeFs::new(cx.background_executor.clone());
9161 let home = paths::home_dir();
9162 fs.insert_tree(
9163 home,
9164 json!({
9165 ".git": {},
9166 "project": {
9167 "a.txt": "A"
9168 },
9169 }),
9170 )
9171 .await;
9172
9173 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9174 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9175 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9176
9177 project
9178 .update(cx, |project, cx| project.git_scans_complete(cx))
9179 .await;
9180 tree.flush_fs_events(cx).await;
9181
9182 project.read_with(cx, |project, cx| {
9183 let containing = project
9184 .git_store()
9185 .read(cx)
9186 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9187 assert!(containing.is_none());
9188 });
9189
9190 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9191 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9192 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9193 project
9194 .update(cx, |project, cx| project.git_scans_complete(cx))
9195 .await;
9196 tree.flush_fs_events(cx).await;
9197
9198 project.read_with(cx, |project, cx| {
9199 let containing = project
9200 .git_store()
9201 .read(cx)
9202 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9203 assert_eq!(
9204 containing
9205 .unwrap()
9206 .0
9207 .read(cx)
9208 .work_directory_abs_path
9209 .as_ref(),
9210 home,
9211 );
9212 });
9213}
9214
9215#[gpui::test]
9216async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9217 init_test(cx);
9218 cx.executor().allow_parking();
9219
9220 let root = TempTree::new(json!({
9221 "project": {
9222 "a.txt": "a", // Modified
9223 "b.txt": "bb", // Added
9224 "c.txt": "ccc", // Unchanged
9225 "d.txt": "dddd", // Deleted
9226 },
9227 }));
9228
9229 // Set up git repository before creating the project.
9230 let work_dir = root.path().join("project");
9231 let repo = git_init(work_dir.as_path());
9232 git_add("a.txt", &repo);
9233 git_add("c.txt", &repo);
9234 git_add("d.txt", &repo);
9235 git_commit("Initial commit", &repo);
9236 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9237 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9238
9239 let project = Project::test(
9240 Arc::new(RealFs::new(None, cx.executor())),
9241 [root.path()],
9242 cx,
9243 )
9244 .await;
9245
9246 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9247 tree.flush_fs_events(cx).await;
9248 project
9249 .update(cx, |project, cx| project.git_scans_complete(cx))
9250 .await;
9251 cx.executor().run_until_parked();
9252
9253 let repository = project.read_with(cx, |project, cx| {
9254 project.repositories(cx).values().next().unwrap().clone()
9255 });
9256
9257 // Check that the right git state is observed on startup
9258 repository.read_with(cx, |repository, _| {
9259 let entries = repository.cached_status().collect::<Vec<_>>();
9260 assert_eq!(
9261 entries,
9262 [
9263 StatusEntry {
9264 repo_path: repo_path("a.txt"),
9265 status: StatusCode::Modified.worktree(),
9266 diff_stat: Some(DiffStat {
9267 added: 1,
9268 deleted: 1,
9269 }),
9270 },
9271 StatusEntry {
9272 repo_path: repo_path("b.txt"),
9273 status: FileStatus::Untracked,
9274 diff_stat: None,
9275 },
9276 StatusEntry {
9277 repo_path: repo_path("d.txt"),
9278 status: StatusCode::Deleted.worktree(),
9279 diff_stat: Some(DiffStat {
9280 added: 0,
9281 deleted: 1,
9282 }),
9283 },
9284 ]
9285 );
9286 });
9287
9288 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9289
9290 tree.flush_fs_events(cx).await;
9291 project
9292 .update(cx, |project, cx| project.git_scans_complete(cx))
9293 .await;
9294 cx.executor().run_until_parked();
9295
9296 repository.read_with(cx, |repository, _| {
9297 let entries = repository.cached_status().collect::<Vec<_>>();
9298 assert_eq!(
9299 entries,
9300 [
9301 StatusEntry {
9302 repo_path: repo_path("a.txt"),
9303 status: StatusCode::Modified.worktree(),
9304 diff_stat: Some(DiffStat {
9305 added: 1,
9306 deleted: 1,
9307 }),
9308 },
9309 StatusEntry {
9310 repo_path: repo_path("b.txt"),
9311 status: FileStatus::Untracked,
9312 diff_stat: None,
9313 },
9314 StatusEntry {
9315 repo_path: repo_path("c.txt"),
9316 status: StatusCode::Modified.worktree(),
9317 diff_stat: Some(DiffStat {
9318 added: 1,
9319 deleted: 1,
9320 }),
9321 },
9322 StatusEntry {
9323 repo_path: repo_path("d.txt"),
9324 status: StatusCode::Deleted.worktree(),
9325 diff_stat: Some(DiffStat {
9326 added: 0,
9327 deleted: 1,
9328 }),
9329 },
9330 ]
9331 );
9332 });
9333
9334 git_add("a.txt", &repo);
9335 git_add("c.txt", &repo);
9336 git_remove_index(Path::new("d.txt"), &repo);
9337 git_commit("Another commit", &repo);
9338 tree.flush_fs_events(cx).await;
9339 project
9340 .update(cx, |project, cx| project.git_scans_complete(cx))
9341 .await;
9342 cx.executor().run_until_parked();
9343
9344 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9345 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9346 tree.flush_fs_events(cx).await;
9347 project
9348 .update(cx, |project, cx| project.git_scans_complete(cx))
9349 .await;
9350 cx.executor().run_until_parked();
9351
9352 repository.read_with(cx, |repository, _cx| {
9353 let entries = repository.cached_status().collect::<Vec<_>>();
9354
9355 // Deleting an untracked entry, b.txt, should leave no status
9356 // a.txt was tracked, and so should have a status
9357 assert_eq!(
9358 entries,
9359 [StatusEntry {
9360 repo_path: repo_path("a.txt"),
9361 status: StatusCode::Deleted.worktree(),
9362 diff_stat: Some(DiffStat {
9363 added: 0,
9364 deleted: 1,
9365 }),
9366 }]
9367 );
9368 });
9369}
9370
9371#[gpui::test]
9372#[ignore]
9373async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9374 init_test(cx);
9375 cx.executor().allow_parking();
9376
9377 let root = TempTree::new(json!({
9378 "project": {
9379 "sub": {},
9380 "a.txt": "",
9381 },
9382 }));
9383
9384 let work_dir = root.path().join("project");
9385 let repo = git_init(work_dir.as_path());
9386 // a.txt exists in HEAD and the working copy but is deleted in the index.
9387 git_add("a.txt", &repo);
9388 git_commit("Initial commit", &repo);
9389 git_remove_index("a.txt".as_ref(), &repo);
9390 // `sub` is a nested git repository.
9391 let _sub = git_init(&work_dir.join("sub"));
9392
9393 let project = Project::test(
9394 Arc::new(RealFs::new(None, cx.executor())),
9395 [root.path()],
9396 cx,
9397 )
9398 .await;
9399
9400 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9401 tree.flush_fs_events(cx).await;
9402 project
9403 .update(cx, |project, cx| project.git_scans_complete(cx))
9404 .await;
9405 cx.executor().run_until_parked();
9406
9407 let repository = project.read_with(cx, |project, cx| {
9408 project
9409 .repositories(cx)
9410 .values()
9411 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9412 .unwrap()
9413 .clone()
9414 });
9415
9416 repository.read_with(cx, |repository, _cx| {
9417 let entries = repository.cached_status().collect::<Vec<_>>();
9418
9419 // `sub` doesn't appear in our computed statuses.
9420 // a.txt appears with a combined `DA` status.
9421 assert_eq!(
9422 entries,
9423 [StatusEntry {
9424 repo_path: repo_path("a.txt"),
9425 status: TrackedStatus {
9426 index_status: StatusCode::Deleted,
9427 worktree_status: StatusCode::Added
9428 }
9429 .into(),
9430 diff_stat: None,
9431 }]
9432 )
9433 });
9434}
9435
9436#[track_caller]
9437/// We merge lhs into rhs.
9438fn merge_pending_ops_snapshots(
9439 source: Vec<pending_op::PendingOps>,
9440 mut target: Vec<pending_op::PendingOps>,
9441) -> Vec<pending_op::PendingOps> {
9442 for s_ops in source {
9443 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9444 if ops.repo_path == s_ops.repo_path {
9445 Some(idx)
9446 } else {
9447 None
9448 }
9449 }) {
9450 let t_ops = &mut target[idx];
9451 for s_op in s_ops.ops {
9452 if let Some(op_idx) = t_ops
9453 .ops
9454 .iter()
9455 .zip(0..)
9456 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9457 {
9458 let t_op = &mut t_ops.ops[op_idx];
9459 match (s_op.job_status, t_op.job_status) {
9460 (pending_op::JobStatus::Running, _) => {}
9461 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9462 (s_st, t_st) if s_st == t_st => {}
9463 _ => unreachable!(),
9464 }
9465 } else {
9466 t_ops.ops.push(s_op);
9467 }
9468 }
9469 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9470 } else {
9471 target.push(s_ops);
9472 }
9473 }
9474 target
9475}
9476
9477#[gpui::test]
9478async fn test_repository_pending_ops_staging(
9479 executor: gpui::BackgroundExecutor,
9480 cx: &mut gpui::TestAppContext,
9481) {
9482 init_test(cx);
9483
9484 let fs = FakeFs::new(executor);
9485 fs.insert_tree(
9486 path!("/root"),
9487 json!({
9488 "my-repo": {
9489 ".git": {},
9490 "a.txt": "a",
9491 }
9492
9493 }),
9494 )
9495 .await;
9496
9497 fs.set_status_for_repo(
9498 path!("/root/my-repo/.git").as_ref(),
9499 &[("a.txt", FileStatus::Untracked)],
9500 );
9501
9502 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9503 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9504 project.update(cx, |project, cx| {
9505 let pending_ops_all = pending_ops_all.clone();
9506 cx.subscribe(project.git_store(), move |_, _, e, _| {
9507 if let GitStoreEvent::RepositoryUpdated(
9508 _,
9509 RepositoryEvent::PendingOpsChanged { pending_ops },
9510 _,
9511 ) = e
9512 {
9513 let merged = merge_pending_ops_snapshots(
9514 pending_ops.items(()),
9515 pending_ops_all.lock().items(()),
9516 );
9517 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9518 }
9519 })
9520 .detach();
9521 });
9522 project
9523 .update(cx, |project, cx| project.git_scans_complete(cx))
9524 .await;
9525
9526 let repo = project.read_with(cx, |project, cx| {
9527 project.repositories(cx).values().next().unwrap().clone()
9528 });
9529
9530 // Ensure we have no pending ops for any of the untracked files
9531 repo.read_with(cx, |repo, _cx| {
9532 assert!(repo.pending_ops().next().is_none());
9533 });
9534
9535 let mut id = 1u16;
9536
9537 let mut assert_stage = async |path: RepoPath, stage| {
9538 let git_status = if stage {
9539 pending_op::GitStatus::Staged
9540 } else {
9541 pending_op::GitStatus::Unstaged
9542 };
9543 repo.update(cx, |repo, cx| {
9544 let task = if stage {
9545 repo.stage_entries(vec![path.clone()], cx)
9546 } else {
9547 repo.unstage_entries(vec![path.clone()], cx)
9548 };
9549 let ops = repo.pending_ops_for_path(&path).unwrap();
9550 assert_eq!(
9551 ops.ops.last(),
9552 Some(&pending_op::PendingOp {
9553 id: id.into(),
9554 git_status,
9555 job_status: pending_op::JobStatus::Running
9556 })
9557 );
9558 task
9559 })
9560 .await
9561 .unwrap();
9562
9563 repo.read_with(cx, |repo, _cx| {
9564 let ops = repo.pending_ops_for_path(&path).unwrap();
9565 assert_eq!(
9566 ops.ops.last(),
9567 Some(&pending_op::PendingOp {
9568 id: id.into(),
9569 git_status,
9570 job_status: pending_op::JobStatus::Finished
9571 })
9572 );
9573 });
9574
9575 id += 1;
9576 };
9577
9578 assert_stage(repo_path("a.txt"), true).await;
9579 assert_stage(repo_path("a.txt"), false).await;
9580 assert_stage(repo_path("a.txt"), true).await;
9581 assert_stage(repo_path("a.txt"), false).await;
9582 assert_stage(repo_path("a.txt"), true).await;
9583
9584 cx.run_until_parked();
9585
9586 assert_eq!(
9587 pending_ops_all
9588 .lock()
9589 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9590 .unwrap()
9591 .ops,
9592 vec![
9593 pending_op::PendingOp {
9594 id: 1u16.into(),
9595 git_status: pending_op::GitStatus::Staged,
9596 job_status: pending_op::JobStatus::Finished
9597 },
9598 pending_op::PendingOp {
9599 id: 2u16.into(),
9600 git_status: pending_op::GitStatus::Unstaged,
9601 job_status: pending_op::JobStatus::Finished
9602 },
9603 pending_op::PendingOp {
9604 id: 3u16.into(),
9605 git_status: pending_op::GitStatus::Staged,
9606 job_status: pending_op::JobStatus::Finished
9607 },
9608 pending_op::PendingOp {
9609 id: 4u16.into(),
9610 git_status: pending_op::GitStatus::Unstaged,
9611 job_status: pending_op::JobStatus::Finished
9612 },
9613 pending_op::PendingOp {
9614 id: 5u16.into(),
9615 git_status: pending_op::GitStatus::Staged,
9616 job_status: pending_op::JobStatus::Finished
9617 }
9618 ],
9619 );
9620
9621 repo.update(cx, |repo, _cx| {
9622 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9623
9624 assert_eq!(
9625 git_statuses,
9626 [StatusEntry {
9627 repo_path: repo_path("a.txt"),
9628 status: TrackedStatus {
9629 index_status: StatusCode::Added,
9630 worktree_status: StatusCode::Unmodified
9631 }
9632 .into(),
9633 diff_stat: Some(DiffStat {
9634 added: 1,
9635 deleted: 0,
9636 }),
9637 }]
9638 );
9639 });
9640}
9641
9642#[gpui::test]
9643async fn test_repository_pending_ops_long_running_staging(
9644 executor: gpui::BackgroundExecutor,
9645 cx: &mut gpui::TestAppContext,
9646) {
9647 init_test(cx);
9648
9649 let fs = FakeFs::new(executor);
9650 fs.insert_tree(
9651 path!("/root"),
9652 json!({
9653 "my-repo": {
9654 ".git": {},
9655 "a.txt": "a",
9656 }
9657
9658 }),
9659 )
9660 .await;
9661
9662 fs.set_status_for_repo(
9663 path!("/root/my-repo/.git").as_ref(),
9664 &[("a.txt", FileStatus::Untracked)],
9665 );
9666
9667 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9668 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9669 project.update(cx, |project, cx| {
9670 let pending_ops_all = pending_ops_all.clone();
9671 cx.subscribe(project.git_store(), move |_, _, e, _| {
9672 if let GitStoreEvent::RepositoryUpdated(
9673 _,
9674 RepositoryEvent::PendingOpsChanged { pending_ops },
9675 _,
9676 ) = e
9677 {
9678 let merged = merge_pending_ops_snapshots(
9679 pending_ops.items(()),
9680 pending_ops_all.lock().items(()),
9681 );
9682 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9683 }
9684 })
9685 .detach();
9686 });
9687
9688 project
9689 .update(cx, |project, cx| project.git_scans_complete(cx))
9690 .await;
9691
9692 let repo = project.read_with(cx, |project, cx| {
9693 project.repositories(cx).values().next().unwrap().clone()
9694 });
9695
9696 repo.update(cx, |repo, cx| {
9697 repo.stage_entries(vec![repo_path("a.txt")], cx)
9698 })
9699 .detach();
9700
9701 repo.update(cx, |repo, cx| {
9702 repo.stage_entries(vec![repo_path("a.txt")], cx)
9703 })
9704 .unwrap()
9705 .with_timeout(Duration::from_secs(1), &cx.executor())
9706 .await
9707 .unwrap();
9708
9709 cx.run_until_parked();
9710
9711 assert_eq!(
9712 pending_ops_all
9713 .lock()
9714 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9715 .unwrap()
9716 .ops,
9717 vec![
9718 pending_op::PendingOp {
9719 id: 1u16.into(),
9720 git_status: pending_op::GitStatus::Staged,
9721 job_status: pending_op::JobStatus::Skipped
9722 },
9723 pending_op::PendingOp {
9724 id: 2u16.into(),
9725 git_status: pending_op::GitStatus::Staged,
9726 job_status: pending_op::JobStatus::Finished
9727 }
9728 ],
9729 );
9730
9731 repo.update(cx, |repo, _cx| {
9732 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9733
9734 assert_eq!(
9735 git_statuses,
9736 [StatusEntry {
9737 repo_path: repo_path("a.txt"),
9738 status: TrackedStatus {
9739 index_status: StatusCode::Added,
9740 worktree_status: StatusCode::Unmodified
9741 }
9742 .into(),
9743 diff_stat: Some(DiffStat {
9744 added: 1,
9745 deleted: 0,
9746 }),
9747 }]
9748 );
9749 });
9750}
9751
9752#[gpui::test]
9753async fn test_repository_pending_ops_stage_all(
9754 executor: gpui::BackgroundExecutor,
9755 cx: &mut gpui::TestAppContext,
9756) {
9757 init_test(cx);
9758
9759 let fs = FakeFs::new(executor);
9760 fs.insert_tree(
9761 path!("/root"),
9762 json!({
9763 "my-repo": {
9764 ".git": {},
9765 "a.txt": "a",
9766 "b.txt": "b"
9767 }
9768
9769 }),
9770 )
9771 .await;
9772
9773 fs.set_status_for_repo(
9774 path!("/root/my-repo/.git").as_ref(),
9775 &[
9776 ("a.txt", FileStatus::Untracked),
9777 ("b.txt", FileStatus::Untracked),
9778 ],
9779 );
9780
9781 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9782 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9783 project.update(cx, |project, cx| {
9784 let pending_ops_all = pending_ops_all.clone();
9785 cx.subscribe(project.git_store(), move |_, _, e, _| {
9786 if let GitStoreEvent::RepositoryUpdated(
9787 _,
9788 RepositoryEvent::PendingOpsChanged { pending_ops },
9789 _,
9790 ) = e
9791 {
9792 let merged = merge_pending_ops_snapshots(
9793 pending_ops.items(()),
9794 pending_ops_all.lock().items(()),
9795 );
9796 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9797 }
9798 })
9799 .detach();
9800 });
9801 project
9802 .update(cx, |project, cx| project.git_scans_complete(cx))
9803 .await;
9804
9805 let repo = project.read_with(cx, |project, cx| {
9806 project.repositories(cx).values().next().unwrap().clone()
9807 });
9808
9809 repo.update(cx, |repo, cx| {
9810 repo.stage_entries(vec![repo_path("a.txt")], cx)
9811 })
9812 .await
9813 .unwrap();
9814 repo.update(cx, |repo, cx| repo.stage_all(cx))
9815 .await
9816 .unwrap();
9817 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9818 .await
9819 .unwrap();
9820
9821 cx.run_until_parked();
9822
9823 assert_eq!(
9824 pending_ops_all
9825 .lock()
9826 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9827 .unwrap()
9828 .ops,
9829 vec![
9830 pending_op::PendingOp {
9831 id: 1u16.into(),
9832 git_status: pending_op::GitStatus::Staged,
9833 job_status: pending_op::JobStatus::Finished
9834 },
9835 pending_op::PendingOp {
9836 id: 2u16.into(),
9837 git_status: pending_op::GitStatus::Unstaged,
9838 job_status: pending_op::JobStatus::Finished
9839 },
9840 ],
9841 );
9842 assert_eq!(
9843 pending_ops_all
9844 .lock()
9845 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9846 .unwrap()
9847 .ops,
9848 vec![
9849 pending_op::PendingOp {
9850 id: 1u16.into(),
9851 git_status: pending_op::GitStatus::Staged,
9852 job_status: pending_op::JobStatus::Finished
9853 },
9854 pending_op::PendingOp {
9855 id: 2u16.into(),
9856 git_status: pending_op::GitStatus::Unstaged,
9857 job_status: pending_op::JobStatus::Finished
9858 },
9859 ],
9860 );
9861
9862 repo.update(cx, |repo, _cx| {
9863 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9864
9865 assert_eq!(
9866 git_statuses,
9867 [
9868 StatusEntry {
9869 repo_path: repo_path("a.txt"),
9870 status: FileStatus::Untracked,
9871 diff_stat: None,
9872 },
9873 StatusEntry {
9874 repo_path: repo_path("b.txt"),
9875 status: FileStatus::Untracked,
9876 diff_stat: None,
9877 },
9878 ]
9879 );
9880 });
9881}
9882
9883#[gpui::test]
9884async fn test_repository_subfolder_git_status(
9885 executor: gpui::BackgroundExecutor,
9886 cx: &mut gpui::TestAppContext,
9887) {
9888 init_test(cx);
9889
9890 let fs = FakeFs::new(executor);
9891 fs.insert_tree(
9892 path!("/root"),
9893 json!({
9894 "my-repo": {
9895 ".git": {},
9896 "a.txt": "a",
9897 "sub-folder-1": {
9898 "sub-folder-2": {
9899 "c.txt": "cc",
9900 "d": {
9901 "e.txt": "eee"
9902 }
9903 },
9904 }
9905 },
9906 }),
9907 )
9908 .await;
9909
9910 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9911 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9912
9913 fs.set_status_for_repo(
9914 path!("/root/my-repo/.git").as_ref(),
9915 &[(E_TXT, FileStatus::Untracked)],
9916 );
9917
9918 let project = Project::test(
9919 fs.clone(),
9920 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9921 cx,
9922 )
9923 .await;
9924
9925 project
9926 .update(cx, |project, cx| project.git_scans_complete(cx))
9927 .await;
9928 cx.run_until_parked();
9929
9930 let repository = project.read_with(cx, |project, cx| {
9931 project.repositories(cx).values().next().unwrap().clone()
9932 });
9933
9934 // Ensure that the git status is loaded correctly
9935 repository.read_with(cx, |repository, _cx| {
9936 assert_eq!(
9937 repository.work_directory_abs_path,
9938 Path::new(path!("/root/my-repo")).into()
9939 );
9940
9941 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9942 assert_eq!(
9943 repository
9944 .status_for_path(&repo_path(E_TXT))
9945 .unwrap()
9946 .status,
9947 FileStatus::Untracked
9948 );
9949 });
9950
9951 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9952 project
9953 .update(cx, |project, cx| project.git_scans_complete(cx))
9954 .await;
9955 cx.run_until_parked();
9956
9957 repository.read_with(cx, |repository, _cx| {
9958 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9959 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9960 });
9961}
9962
9963// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9964#[cfg(any())]
9965#[gpui::test]
9966async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9967 init_test(cx);
9968 cx.executor().allow_parking();
9969
9970 let root = TempTree::new(json!({
9971 "project": {
9972 "a.txt": "a",
9973 },
9974 }));
9975 let root_path = root.path();
9976
9977 let repo = git_init(&root_path.join("project"));
9978 git_add("a.txt", &repo);
9979 git_commit("init", &repo);
9980
9981 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9982
9983 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9984 tree.flush_fs_events(cx).await;
9985 project
9986 .update(cx, |project, cx| project.git_scans_complete(cx))
9987 .await;
9988 cx.executor().run_until_parked();
9989
9990 let repository = project.read_with(cx, |project, cx| {
9991 project.repositories(cx).values().next().unwrap().clone()
9992 });
9993
9994 git_branch("other-branch", &repo);
9995 git_checkout("refs/heads/other-branch", &repo);
9996 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9997 git_add("a.txt", &repo);
9998 git_commit("capitalize", &repo);
9999 let commit = repo
10000 .head()
10001 .expect("Failed to get HEAD")
10002 .peel_to_commit()
10003 .expect("HEAD is not a commit");
10004 git_checkout("refs/heads/main", &repo);
10005 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10006 git_add("a.txt", &repo);
10007 git_commit("improve letter", &repo);
10008 git_cherry_pick(&commit, &repo);
10009 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10010 .expect("No CHERRY_PICK_HEAD");
10011 pretty_assertions::assert_eq!(
10012 git_status(&repo),
10013 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10014 );
10015 tree.flush_fs_events(cx).await;
10016 project
10017 .update(cx, |project, cx| project.git_scans_complete(cx))
10018 .await;
10019 cx.executor().run_until_parked();
10020 let conflicts = repository.update(cx, |repository, _| {
10021 repository
10022 .merge_conflicts
10023 .iter()
10024 .cloned()
10025 .collect::<Vec<_>>()
10026 });
10027 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10028
10029 git_add("a.txt", &repo);
10030 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10031 git_commit("whatevs", &repo);
10032 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10033 .expect("Failed to remove CHERRY_PICK_HEAD");
10034 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10035 tree.flush_fs_events(cx).await;
10036 let conflicts = repository.update(cx, |repository, _| {
10037 repository
10038 .merge_conflicts
10039 .iter()
10040 .cloned()
10041 .collect::<Vec<_>>()
10042 });
10043 pretty_assertions::assert_eq!(conflicts, []);
10044}
10045
10046#[gpui::test]
10047async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10048 init_test(cx);
10049 let fs = FakeFs::new(cx.background_executor.clone());
10050 fs.insert_tree(
10051 path!("/root"),
10052 json!({
10053 ".git": {},
10054 ".gitignore": "*.txt\n",
10055 "a.xml": "<a></a>",
10056 "b.txt": "Some text"
10057 }),
10058 )
10059 .await;
10060
10061 fs.set_head_and_index_for_repo(
10062 path!("/root/.git").as_ref(),
10063 &[
10064 (".gitignore", "*.txt\n".into()),
10065 ("a.xml", "<a></a>".into()),
10066 ],
10067 );
10068
10069 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10070
10071 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10072 tree.flush_fs_events(cx).await;
10073 project
10074 .update(cx, |project, cx| project.git_scans_complete(cx))
10075 .await;
10076 cx.executor().run_until_parked();
10077
10078 let repository = project.read_with(cx, |project, cx| {
10079 project.repositories(cx).values().next().unwrap().clone()
10080 });
10081
10082 // One file is unmodified, the other is ignored.
10083 cx.read(|cx| {
10084 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10085 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10086 });
10087
10088 // Change the gitignore, and stage the newly non-ignored file.
10089 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10090 .await
10091 .unwrap();
10092 fs.set_index_for_repo(
10093 Path::new(path!("/root/.git")),
10094 &[
10095 (".gitignore", "*.txt\n".into()),
10096 ("a.xml", "<a></a>".into()),
10097 ("b.txt", "Some text".into()),
10098 ],
10099 );
10100
10101 cx.executor().run_until_parked();
10102 cx.read(|cx| {
10103 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10104 assert_entry_git_state(
10105 tree.read(cx),
10106 repository.read(cx),
10107 "b.txt",
10108 Some(StatusCode::Added),
10109 false,
10110 );
10111 });
10112}
10113
10114// NOTE:
10115// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10116// a directory which some program has already open.
10117// This is a limitation of the Windows.
10118// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10119// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10120#[gpui::test]
10121#[cfg_attr(target_os = "windows", ignore)]
10122async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10123 init_test(cx);
10124 cx.executor().allow_parking();
10125 let root = TempTree::new(json!({
10126 "projects": {
10127 "project1": {
10128 "a": "",
10129 "b": "",
10130 }
10131 },
10132
10133 }));
10134 let root_path = root.path();
10135
10136 let repo = git_init(&root_path.join("projects/project1"));
10137 git_add("a", &repo);
10138 git_commit("init", &repo);
10139 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10140
10141 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10142
10143 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10144 tree.flush_fs_events(cx).await;
10145 project
10146 .update(cx, |project, cx| project.git_scans_complete(cx))
10147 .await;
10148 cx.executor().run_until_parked();
10149
10150 let repository = project.read_with(cx, |project, cx| {
10151 project.repositories(cx).values().next().unwrap().clone()
10152 });
10153
10154 repository.read_with(cx, |repository, _| {
10155 assert_eq!(
10156 repository.work_directory_abs_path.as_ref(),
10157 root_path.join("projects/project1").as_path()
10158 );
10159 assert_eq!(
10160 repository
10161 .status_for_path(&repo_path("a"))
10162 .map(|entry| entry.status),
10163 Some(StatusCode::Modified.worktree()),
10164 );
10165 assert_eq!(
10166 repository
10167 .status_for_path(&repo_path("b"))
10168 .map(|entry| entry.status),
10169 Some(FileStatus::Untracked),
10170 );
10171 });
10172
10173 std::fs::rename(
10174 root_path.join("projects/project1"),
10175 root_path.join("projects/project2"),
10176 )
10177 .unwrap();
10178 tree.flush_fs_events(cx).await;
10179
10180 repository.read_with(cx, |repository, _| {
10181 assert_eq!(
10182 repository.work_directory_abs_path.as_ref(),
10183 root_path.join("projects/project2").as_path()
10184 );
10185 assert_eq!(
10186 repository.status_for_path(&repo_path("a")).unwrap().status,
10187 StatusCode::Modified.worktree(),
10188 );
10189 assert_eq!(
10190 repository.status_for_path(&repo_path("b")).unwrap().status,
10191 FileStatus::Untracked,
10192 );
10193 });
10194}
10195
10196// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10197// you can't rename a directory which some program has already open. This is a
10198// limitation of the Windows. See:
10199// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10200// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10201#[gpui::test]
10202#[cfg_attr(target_os = "windows", ignore)]
10203async fn test_file_status(cx: &mut gpui::TestAppContext) {
10204 init_test(cx);
10205 cx.executor().allow_parking();
10206 const IGNORE_RULE: &str = "**/target";
10207
10208 let root = TempTree::new(json!({
10209 "project": {
10210 "a.txt": "a",
10211 "b.txt": "bb",
10212 "c": {
10213 "d": {
10214 "e.txt": "eee"
10215 }
10216 },
10217 "f.txt": "ffff",
10218 "target": {
10219 "build_file": "???"
10220 },
10221 ".gitignore": IGNORE_RULE
10222 },
10223
10224 }));
10225 let root_path = root.path();
10226
10227 const A_TXT: &str = "a.txt";
10228 const B_TXT: &str = "b.txt";
10229 const E_TXT: &str = "c/d/e.txt";
10230 const F_TXT: &str = "f.txt";
10231 const DOTGITIGNORE: &str = ".gitignore";
10232 const BUILD_FILE: &str = "target/build_file";
10233
10234 // Set up git repository before creating the worktree.
10235 let work_dir = root.path().join("project");
10236 let mut repo = git_init(work_dir.as_path());
10237 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10238 git_add(A_TXT, &repo);
10239 git_add(E_TXT, &repo);
10240 git_add(DOTGITIGNORE, &repo);
10241 git_commit("Initial commit", &repo);
10242
10243 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10244
10245 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10246 tree.flush_fs_events(cx).await;
10247 project
10248 .update(cx, |project, cx| project.git_scans_complete(cx))
10249 .await;
10250 cx.executor().run_until_parked();
10251
10252 let repository = project.read_with(cx, |project, cx| {
10253 project.repositories(cx).values().next().unwrap().clone()
10254 });
10255
10256 // Check that the right git state is observed on startup
10257 repository.read_with(cx, |repository, _cx| {
10258 assert_eq!(
10259 repository.work_directory_abs_path.as_ref(),
10260 root_path.join("project").as_path()
10261 );
10262
10263 assert_eq!(
10264 repository
10265 .status_for_path(&repo_path(B_TXT))
10266 .unwrap()
10267 .status,
10268 FileStatus::Untracked,
10269 );
10270 assert_eq!(
10271 repository
10272 .status_for_path(&repo_path(F_TXT))
10273 .unwrap()
10274 .status,
10275 FileStatus::Untracked,
10276 );
10277 });
10278
10279 // Modify a file in the working copy.
10280 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10281 tree.flush_fs_events(cx).await;
10282 project
10283 .update(cx, |project, cx| project.git_scans_complete(cx))
10284 .await;
10285 cx.executor().run_until_parked();
10286
10287 // The worktree detects that the file's git status has changed.
10288 repository.read_with(cx, |repository, _| {
10289 assert_eq!(
10290 repository
10291 .status_for_path(&repo_path(A_TXT))
10292 .unwrap()
10293 .status,
10294 StatusCode::Modified.worktree(),
10295 );
10296 });
10297
10298 // Create a commit in the git repository.
10299 git_add(A_TXT, &repo);
10300 git_add(B_TXT, &repo);
10301 git_commit("Committing modified and added", &repo);
10302 tree.flush_fs_events(cx).await;
10303 project
10304 .update(cx, |project, cx| project.git_scans_complete(cx))
10305 .await;
10306 cx.executor().run_until_parked();
10307
10308 // The worktree detects that the files' git status have changed.
10309 repository.read_with(cx, |repository, _cx| {
10310 assert_eq!(
10311 repository
10312 .status_for_path(&repo_path(F_TXT))
10313 .unwrap()
10314 .status,
10315 FileStatus::Untracked,
10316 );
10317 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10318 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10319 });
10320
10321 // Modify files in the working copy and perform git operations on other files.
10322 git_reset(0, &repo);
10323 git_remove_index(Path::new(B_TXT), &repo);
10324 git_stash(&mut repo);
10325 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10326 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10327 tree.flush_fs_events(cx).await;
10328 project
10329 .update(cx, |project, cx| project.git_scans_complete(cx))
10330 .await;
10331 cx.executor().run_until_parked();
10332
10333 // Check that more complex repo changes are tracked
10334 repository.read_with(cx, |repository, _cx| {
10335 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10336 assert_eq!(
10337 repository
10338 .status_for_path(&repo_path(B_TXT))
10339 .unwrap()
10340 .status,
10341 FileStatus::Untracked,
10342 );
10343 assert_eq!(
10344 repository
10345 .status_for_path(&repo_path(E_TXT))
10346 .unwrap()
10347 .status,
10348 StatusCode::Modified.worktree(),
10349 );
10350 });
10351
10352 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10353 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10354 std::fs::write(
10355 work_dir.join(DOTGITIGNORE),
10356 [IGNORE_RULE, "f.txt"].join("\n"),
10357 )
10358 .unwrap();
10359
10360 git_add(Path::new(DOTGITIGNORE), &repo);
10361 git_commit("Committing modified git ignore", &repo);
10362
10363 tree.flush_fs_events(cx).await;
10364 cx.executor().run_until_parked();
10365
10366 let mut renamed_dir_name = "first_directory/second_directory";
10367 const RENAMED_FILE: &str = "rf.txt";
10368
10369 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10370 std::fs::write(
10371 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10372 "new-contents",
10373 )
10374 .unwrap();
10375
10376 tree.flush_fs_events(cx).await;
10377 project
10378 .update(cx, |project, cx| project.git_scans_complete(cx))
10379 .await;
10380 cx.executor().run_until_parked();
10381
10382 repository.read_with(cx, |repository, _cx| {
10383 assert_eq!(
10384 repository
10385 .status_for_path(&RepoPath::from_rel_path(
10386 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10387 ))
10388 .unwrap()
10389 .status,
10390 FileStatus::Untracked,
10391 );
10392 });
10393
10394 renamed_dir_name = "new_first_directory/second_directory";
10395
10396 std::fs::rename(
10397 work_dir.join("first_directory"),
10398 work_dir.join("new_first_directory"),
10399 )
10400 .unwrap();
10401
10402 tree.flush_fs_events(cx).await;
10403 project
10404 .update(cx, |project, cx| project.git_scans_complete(cx))
10405 .await;
10406 cx.executor().run_until_parked();
10407
10408 repository.read_with(cx, |repository, _cx| {
10409 assert_eq!(
10410 repository
10411 .status_for_path(&RepoPath::from_rel_path(
10412 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10413 ))
10414 .unwrap()
10415 .status,
10416 FileStatus::Untracked,
10417 );
10418 });
10419}
10420
10421#[gpui::test]
10422#[ignore]
10423async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10424 init_test(cx);
10425 cx.executor().allow_parking();
10426
10427 const IGNORE_RULE: &str = "**/target";
10428
10429 let root = TempTree::new(json!({
10430 "project": {
10431 "src": {
10432 "main.rs": "fn main() {}"
10433 },
10434 "target": {
10435 "debug": {
10436 "important_text.txt": "important text",
10437 },
10438 },
10439 ".gitignore": IGNORE_RULE
10440 },
10441
10442 }));
10443 let root_path = root.path();
10444
10445 // Set up git repository before creating the worktree.
10446 let work_dir = root.path().join("project");
10447 let repo = git_init(work_dir.as_path());
10448 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10449 git_add("src/main.rs", &repo);
10450 git_add(".gitignore", &repo);
10451 git_commit("Initial commit", &repo);
10452
10453 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10454 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10455 let project_events = Arc::new(Mutex::new(Vec::new()));
10456 project.update(cx, |project, cx| {
10457 let repo_events = repository_updates.clone();
10458 cx.subscribe(project.git_store(), move |_, _, e, _| {
10459 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10460 repo_events.lock().push(e.clone());
10461 }
10462 })
10463 .detach();
10464 let project_events = project_events.clone();
10465 cx.subscribe_self(move |_, e, _| {
10466 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10467 project_events.lock().extend(
10468 updates
10469 .iter()
10470 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10471 .filter(|(path, _)| path != "fs-event-sentinel"),
10472 );
10473 }
10474 })
10475 .detach();
10476 });
10477
10478 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10479 tree.flush_fs_events(cx).await;
10480 tree.update(cx, |tree, cx| {
10481 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10482 })
10483 .await
10484 .unwrap();
10485 tree.update(cx, |tree, _| {
10486 assert_eq!(
10487 tree.entries(true, 0)
10488 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10489 .collect::<Vec<_>>(),
10490 vec![
10491 (rel_path(""), false),
10492 (rel_path("project/"), false),
10493 (rel_path("project/.gitignore"), false),
10494 (rel_path("project/src"), false),
10495 (rel_path("project/src/main.rs"), false),
10496 (rel_path("project/target"), true),
10497 (rel_path("project/target/debug"), true),
10498 (rel_path("project/target/debug/important_text.txt"), true),
10499 ]
10500 );
10501 });
10502
10503 assert_eq!(
10504 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10505 vec![RepositoryEvent::StatusesChanged,],
10506 "Initial worktree scan should produce a repo update event"
10507 );
10508 assert_eq!(
10509 project_events.lock().drain(..).collect::<Vec<_>>(),
10510 vec![
10511 ("project/target".to_string(), PathChange::Loaded),
10512 ("project/target/debug".to_string(), PathChange::Loaded),
10513 (
10514 "project/target/debug/important_text.txt".to_string(),
10515 PathChange::Loaded
10516 ),
10517 ],
10518 "Initial project changes should show that all not-ignored and all opened files are loaded"
10519 );
10520
10521 let deps_dir = work_dir.join("target").join("debug").join("deps");
10522 std::fs::create_dir_all(&deps_dir).unwrap();
10523 tree.flush_fs_events(cx).await;
10524 project
10525 .update(cx, |project, cx| project.git_scans_complete(cx))
10526 .await;
10527 cx.executor().run_until_parked();
10528 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10529 tree.flush_fs_events(cx).await;
10530 project
10531 .update(cx, |project, cx| project.git_scans_complete(cx))
10532 .await;
10533 cx.executor().run_until_parked();
10534 std::fs::remove_dir_all(&deps_dir).unwrap();
10535 tree.flush_fs_events(cx).await;
10536 project
10537 .update(cx, |project, cx| project.git_scans_complete(cx))
10538 .await;
10539 cx.executor().run_until_parked();
10540
10541 tree.update(cx, |tree, _| {
10542 assert_eq!(
10543 tree.entries(true, 0)
10544 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10545 .collect::<Vec<_>>(),
10546 vec![
10547 (rel_path(""), false),
10548 (rel_path("project/"), false),
10549 (rel_path("project/.gitignore"), false),
10550 (rel_path("project/src"), false),
10551 (rel_path("project/src/main.rs"), false),
10552 (rel_path("project/target"), true),
10553 (rel_path("project/target/debug"), true),
10554 (rel_path("project/target/debug/important_text.txt"), true),
10555 ],
10556 "No stray temp files should be left after the flycheck changes"
10557 );
10558 });
10559
10560 assert_eq!(
10561 repository_updates
10562 .lock()
10563 .iter()
10564 .cloned()
10565 .collect::<Vec<_>>(),
10566 Vec::new(),
10567 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10568 );
10569 assert_eq!(
10570 project_events.lock().as_slice(),
10571 vec![
10572 ("project/target/debug/deps".to_string(), PathChange::Added),
10573 ("project/target/debug/deps".to_string(), PathChange::Removed),
10574 ],
10575 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10576 No updates for more nested directories should happen as those are ignored",
10577 );
10578}
10579
10580// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10581// to different timings/ordering of events.
10582#[ignore]
10583#[gpui::test]
10584async fn test_odd_events_for_ignored_dirs(
10585 executor: BackgroundExecutor,
10586 cx: &mut gpui::TestAppContext,
10587) {
10588 init_test(cx);
10589 let fs = FakeFs::new(executor);
10590 fs.insert_tree(
10591 path!("/root"),
10592 json!({
10593 ".git": {},
10594 ".gitignore": "**/target/",
10595 "src": {
10596 "main.rs": "fn main() {}",
10597 },
10598 "target": {
10599 "debug": {
10600 "foo.txt": "foo",
10601 "deps": {}
10602 }
10603 }
10604 }),
10605 )
10606 .await;
10607 fs.set_head_and_index_for_repo(
10608 path!("/root/.git").as_ref(),
10609 &[
10610 (".gitignore", "**/target/".into()),
10611 ("src/main.rs", "fn main() {}".into()),
10612 ],
10613 );
10614
10615 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10616 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10617 let project_events = Arc::new(Mutex::new(Vec::new()));
10618 project.update(cx, |project, cx| {
10619 let repository_updates = repository_updates.clone();
10620 cx.subscribe(project.git_store(), move |_, _, e, _| {
10621 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10622 repository_updates.lock().push(e.clone());
10623 }
10624 })
10625 .detach();
10626 let project_events = project_events.clone();
10627 cx.subscribe_self(move |_, e, _| {
10628 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10629 project_events.lock().extend(
10630 updates
10631 .iter()
10632 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10633 .filter(|(path, _)| path != "fs-event-sentinel"),
10634 );
10635 }
10636 })
10637 .detach();
10638 });
10639
10640 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10641 tree.update(cx, |tree, cx| {
10642 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10643 })
10644 .await
10645 .unwrap();
10646 tree.flush_fs_events(cx).await;
10647 project
10648 .update(cx, |project, cx| project.git_scans_complete(cx))
10649 .await;
10650 cx.run_until_parked();
10651 tree.update(cx, |tree, _| {
10652 assert_eq!(
10653 tree.entries(true, 0)
10654 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10655 .collect::<Vec<_>>(),
10656 vec![
10657 (rel_path(""), false),
10658 (rel_path(".gitignore"), false),
10659 (rel_path("src"), false),
10660 (rel_path("src/main.rs"), false),
10661 (rel_path("target"), true),
10662 (rel_path("target/debug"), true),
10663 (rel_path("target/debug/deps"), true),
10664 (rel_path("target/debug/foo.txt"), true),
10665 ]
10666 );
10667 });
10668
10669 assert_eq!(
10670 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10671 vec![
10672 RepositoryEvent::BranchChanged,
10673 RepositoryEvent::StatusesChanged,
10674 RepositoryEvent::StatusesChanged,
10675 ],
10676 "Initial worktree scan should produce a repo update event"
10677 );
10678 assert_eq!(
10679 project_events.lock().drain(..).collect::<Vec<_>>(),
10680 vec![
10681 ("target".to_string(), PathChange::Loaded),
10682 ("target/debug".to_string(), PathChange::Loaded),
10683 ("target/debug/deps".to_string(), PathChange::Loaded),
10684 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10685 ],
10686 "All non-ignored entries and all opened firs should be getting a project event",
10687 );
10688
10689 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10690 // This may happen multiple times during a single flycheck, but once is enough for testing.
10691 fs.emit_fs_event("/root/target/debug/deps", None);
10692 tree.flush_fs_events(cx).await;
10693 project
10694 .update(cx, |project, cx| project.git_scans_complete(cx))
10695 .await;
10696 cx.executor().run_until_parked();
10697
10698 assert_eq!(
10699 repository_updates
10700 .lock()
10701 .iter()
10702 .cloned()
10703 .collect::<Vec<_>>(),
10704 Vec::new(),
10705 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10706 );
10707 assert_eq!(
10708 project_events.lock().as_slice(),
10709 Vec::new(),
10710 "No further project events should happen, as only ignored dirs received FS events",
10711 );
10712}
10713
10714#[gpui::test]
10715async fn test_repos_in_invisible_worktrees(
10716 executor: BackgroundExecutor,
10717 cx: &mut gpui::TestAppContext,
10718) {
10719 init_test(cx);
10720 let fs = FakeFs::new(executor);
10721 fs.insert_tree(
10722 path!("/root"),
10723 json!({
10724 "dir1": {
10725 ".git": {},
10726 "dep1": {
10727 ".git": {},
10728 "src": {
10729 "a.txt": "",
10730 },
10731 },
10732 "b.txt": "",
10733 },
10734 }),
10735 )
10736 .await;
10737
10738 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10739 let _visible_worktree =
10740 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10741 project
10742 .update(cx, |project, cx| project.git_scans_complete(cx))
10743 .await;
10744
10745 let repos = project.read_with(cx, |project, cx| {
10746 project
10747 .repositories(cx)
10748 .values()
10749 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10750 .collect::<Vec<_>>()
10751 });
10752 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10753
10754 let (_invisible_worktree, _) = project
10755 .update(cx, |project, cx| {
10756 project.worktree_store().update(cx, |worktree_store, cx| {
10757 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10758 })
10759 })
10760 .await
10761 .expect("failed to create worktree");
10762 project
10763 .update(cx, |project, cx| project.git_scans_complete(cx))
10764 .await;
10765
10766 let repos = project.read_with(cx, |project, cx| {
10767 project
10768 .repositories(cx)
10769 .values()
10770 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10771 .collect::<Vec<_>>()
10772 });
10773 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10774}
10775
10776#[gpui::test(iterations = 10)]
10777async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10778 init_test(cx);
10779 cx.update(|cx| {
10780 cx.update_global::<SettingsStore, _>(|store, cx| {
10781 store.update_user_settings(cx, |settings| {
10782 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10783 });
10784 });
10785 });
10786 let fs = FakeFs::new(cx.background_executor.clone());
10787 fs.insert_tree(
10788 path!("/root"),
10789 json!({
10790 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10791 "tree": {
10792 ".git": {},
10793 ".gitignore": "ignored-dir\n",
10794 "tracked-dir": {
10795 "tracked-file1": "",
10796 "ancestor-ignored-file1": "",
10797 },
10798 "ignored-dir": {
10799 "ignored-file1": ""
10800 }
10801 }
10802 }),
10803 )
10804 .await;
10805 fs.set_head_and_index_for_repo(
10806 path!("/root/tree/.git").as_ref(),
10807 &[
10808 (".gitignore", "ignored-dir\n".into()),
10809 ("tracked-dir/tracked-file1", "".into()),
10810 ],
10811 );
10812
10813 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10814
10815 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10816 tree.flush_fs_events(cx).await;
10817 project
10818 .update(cx, |project, cx| project.git_scans_complete(cx))
10819 .await;
10820 cx.executor().run_until_parked();
10821
10822 let repository = project.read_with(cx, |project, cx| {
10823 project.repositories(cx).values().next().unwrap().clone()
10824 });
10825
10826 tree.read_with(cx, |tree, _| {
10827 tree.as_local()
10828 .unwrap()
10829 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10830 })
10831 .recv()
10832 .await;
10833
10834 cx.read(|cx| {
10835 assert_entry_git_state(
10836 tree.read(cx),
10837 repository.read(cx),
10838 "tracked-dir/tracked-file1",
10839 None,
10840 false,
10841 );
10842 assert_entry_git_state(
10843 tree.read(cx),
10844 repository.read(cx),
10845 "tracked-dir/ancestor-ignored-file1",
10846 None,
10847 false,
10848 );
10849 assert_entry_git_state(
10850 tree.read(cx),
10851 repository.read(cx),
10852 "ignored-dir/ignored-file1",
10853 None,
10854 true,
10855 );
10856 });
10857
10858 fs.create_file(
10859 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10860 Default::default(),
10861 )
10862 .await
10863 .unwrap();
10864 fs.set_index_for_repo(
10865 path!("/root/tree/.git").as_ref(),
10866 &[
10867 (".gitignore", "ignored-dir\n".into()),
10868 ("tracked-dir/tracked-file1", "".into()),
10869 ("tracked-dir/tracked-file2", "".into()),
10870 ],
10871 );
10872 fs.create_file(
10873 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10874 Default::default(),
10875 )
10876 .await
10877 .unwrap();
10878 fs.create_file(
10879 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10880 Default::default(),
10881 )
10882 .await
10883 .unwrap();
10884
10885 cx.executor().run_until_parked();
10886 cx.read(|cx| {
10887 assert_entry_git_state(
10888 tree.read(cx),
10889 repository.read(cx),
10890 "tracked-dir/tracked-file2",
10891 Some(StatusCode::Added),
10892 false,
10893 );
10894 assert_entry_git_state(
10895 tree.read(cx),
10896 repository.read(cx),
10897 "tracked-dir/ancestor-ignored-file2",
10898 None,
10899 false,
10900 );
10901 assert_entry_git_state(
10902 tree.read(cx),
10903 repository.read(cx),
10904 "ignored-dir/ignored-file2",
10905 None,
10906 true,
10907 );
10908 assert!(
10909 tree.read(cx)
10910 .entry_for_path(&rel_path(".git"))
10911 .unwrap()
10912 .is_ignored
10913 );
10914 });
10915}
10916
10917#[gpui::test]
10918async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10919 init_test(cx);
10920
10921 let fs = FakeFs::new(cx.executor());
10922 fs.insert_tree(
10923 path!("/project"),
10924 json!({
10925 ".git": {
10926 "worktrees": {
10927 "some-worktree": {
10928 "commondir": "../..\n",
10929 // For is_git_dir
10930 "HEAD": "",
10931 "config": ""
10932 }
10933 },
10934 "modules": {
10935 "subdir": {
10936 "some-submodule": {
10937 // For is_git_dir
10938 "HEAD": "",
10939 "config": "",
10940 }
10941 }
10942 }
10943 },
10944 "src": {
10945 "a.txt": "A",
10946 },
10947 "some-worktree": {
10948 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10949 "src": {
10950 "b.txt": "B",
10951 }
10952 },
10953 "subdir": {
10954 "some-submodule": {
10955 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10956 "c.txt": "C",
10957 }
10958 }
10959 }),
10960 )
10961 .await;
10962
10963 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10964 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10965 scan_complete.await;
10966
10967 let mut repositories = project.update(cx, |project, cx| {
10968 project
10969 .repositories(cx)
10970 .values()
10971 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10972 .collect::<Vec<_>>()
10973 });
10974 repositories.sort();
10975 pretty_assertions::assert_eq!(
10976 repositories,
10977 [
10978 Path::new(path!("/project")).into(),
10979 Path::new(path!("/project/some-worktree")).into(),
10980 Path::new(path!("/project/subdir/some-submodule")).into(),
10981 ]
10982 );
10983
10984 // Generate a git-related event for the worktree and check that it's refreshed.
10985 fs.with_git_state(
10986 path!("/project/some-worktree/.git").as_ref(),
10987 true,
10988 |state| {
10989 state
10990 .head_contents
10991 .insert(repo_path("src/b.txt"), "b".to_owned());
10992 state
10993 .index_contents
10994 .insert(repo_path("src/b.txt"), "b".to_owned());
10995 },
10996 )
10997 .unwrap();
10998 cx.run_until_parked();
10999
11000 let buffer = project
11001 .update(cx, |project, cx| {
11002 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11003 })
11004 .await
11005 .unwrap();
11006 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11007 let (repo, _) = project
11008 .git_store()
11009 .read(cx)
11010 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11011 .unwrap();
11012 pretty_assertions::assert_eq!(
11013 repo.read(cx).work_directory_abs_path,
11014 Path::new(path!("/project/some-worktree")).into(),
11015 );
11016 let barrier = repo.update(cx, |repo, _| repo.barrier());
11017 (repo.clone(), barrier)
11018 });
11019 barrier.await.unwrap();
11020 worktree_repo.update(cx, |repo, _| {
11021 pretty_assertions::assert_eq!(
11022 repo.status_for_path(&repo_path("src/b.txt"))
11023 .unwrap()
11024 .status,
11025 StatusCode::Modified.worktree(),
11026 );
11027 });
11028
11029 // The same for the submodule.
11030 fs.with_git_state(
11031 path!("/project/subdir/some-submodule/.git").as_ref(),
11032 true,
11033 |state| {
11034 state
11035 .head_contents
11036 .insert(repo_path("c.txt"), "c".to_owned());
11037 state
11038 .index_contents
11039 .insert(repo_path("c.txt"), "c".to_owned());
11040 },
11041 )
11042 .unwrap();
11043 cx.run_until_parked();
11044
11045 let buffer = project
11046 .update(cx, |project, cx| {
11047 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11048 })
11049 .await
11050 .unwrap();
11051 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11052 let (repo, _) = project
11053 .git_store()
11054 .read(cx)
11055 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11056 .unwrap();
11057 pretty_assertions::assert_eq!(
11058 repo.read(cx).work_directory_abs_path,
11059 Path::new(path!("/project/subdir/some-submodule")).into(),
11060 );
11061 let barrier = repo.update(cx, |repo, _| repo.barrier());
11062 (repo.clone(), barrier)
11063 });
11064 barrier.await.unwrap();
11065 submodule_repo.update(cx, |repo, _| {
11066 pretty_assertions::assert_eq!(
11067 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11068 StatusCode::Modified.worktree(),
11069 );
11070 });
11071}
11072
11073#[gpui::test]
11074async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11075 init_test(cx);
11076 let fs = FakeFs::new(cx.background_executor.clone());
11077 fs.insert_tree(
11078 path!("/root"),
11079 json!({
11080 "project": {
11081 ".git": {},
11082 "child1": {
11083 "a.txt": "A",
11084 },
11085 "child2": {
11086 "b.txt": "B",
11087 }
11088 }
11089 }),
11090 )
11091 .await;
11092
11093 let project = Project::test(
11094 fs.clone(),
11095 [
11096 path!("/root/project/child1").as_ref(),
11097 path!("/root/project/child2").as_ref(),
11098 ],
11099 cx,
11100 )
11101 .await;
11102
11103 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11104 tree.flush_fs_events(cx).await;
11105 project
11106 .update(cx, |project, cx| project.git_scans_complete(cx))
11107 .await;
11108 cx.executor().run_until_parked();
11109
11110 let repos = project.read_with(cx, |project, cx| {
11111 project
11112 .repositories(cx)
11113 .values()
11114 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11115 .collect::<Vec<_>>()
11116 });
11117 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11118}
11119
11120#[gpui::test]
11121async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11122 init_test(cx);
11123
11124 let file_1_committed = String::from(r#"file_1_committed"#);
11125 let file_1_staged = String::from(r#"file_1_staged"#);
11126 let file_2_committed = String::from(r#"file_2_committed"#);
11127 let file_2_staged = String::from(r#"file_2_staged"#);
11128 let buffer_contents = String::from(r#"buffer"#);
11129
11130 let fs = FakeFs::new(cx.background_executor.clone());
11131 fs.insert_tree(
11132 path!("/dir"),
11133 json!({
11134 ".git": {},
11135 "src": {
11136 "file_1.rs": file_1_committed.clone(),
11137 "file_2.rs": file_2_committed.clone(),
11138 }
11139 }),
11140 )
11141 .await;
11142
11143 fs.set_head_for_repo(
11144 path!("/dir/.git").as_ref(),
11145 &[
11146 ("src/file_1.rs", file_1_committed.clone()),
11147 ("src/file_2.rs", file_2_committed.clone()),
11148 ],
11149 "deadbeef",
11150 );
11151 fs.set_index_for_repo(
11152 path!("/dir/.git").as_ref(),
11153 &[
11154 ("src/file_1.rs", file_1_staged.clone()),
11155 ("src/file_2.rs", file_2_staged.clone()),
11156 ],
11157 );
11158
11159 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11160
11161 let buffer = project
11162 .update(cx, |project, cx| {
11163 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11164 })
11165 .await
11166 .unwrap();
11167
11168 buffer.update(cx, |buffer, cx| {
11169 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11170 });
11171
11172 let unstaged_diff = project
11173 .update(cx, |project, cx| {
11174 project.open_unstaged_diff(buffer.clone(), cx)
11175 })
11176 .await
11177 .unwrap();
11178
11179 cx.run_until_parked();
11180
11181 unstaged_diff.update(cx, |unstaged_diff, cx| {
11182 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11183 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11184 });
11185
11186 // Save the buffer as `file_2.rs`, which should trigger the
11187 // `BufferChangedFilePath` event.
11188 project
11189 .update(cx, |project, cx| {
11190 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11191 let path = ProjectPath {
11192 worktree_id,
11193 path: rel_path("src/file_2.rs").into(),
11194 };
11195 project.save_buffer_as(buffer.clone(), path, cx)
11196 })
11197 .await
11198 .unwrap();
11199
11200 cx.run_until_parked();
11201
11202 // Verify that the diff bases have been updated to file_2's contents due to
11203 // the `BufferChangedFilePath` event being handled.
11204 unstaged_diff.update(cx, |unstaged_diff, cx| {
11205 let snapshot = buffer.read(cx).snapshot();
11206 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11207 assert_eq!(
11208 base_text, file_2_staged,
11209 "Diff bases should be automatically updated to file_2 staged content"
11210 );
11211
11212 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11213 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11214 });
11215
11216 let uncommitted_diff = project
11217 .update(cx, |project, cx| {
11218 project.open_uncommitted_diff(buffer.clone(), cx)
11219 })
11220 .await
11221 .unwrap();
11222
11223 cx.run_until_parked();
11224
11225 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11226 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11227 assert_eq!(
11228 base_text, file_2_committed,
11229 "Uncommitted diff should compare against file_2 committed content"
11230 );
11231 });
11232}
11233
11234async fn search(
11235 project: &Entity<Project>,
11236 query: SearchQuery,
11237 cx: &mut gpui::TestAppContext,
11238) -> Result<HashMap<String, Vec<Range<usize>>>> {
11239 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11240 let mut results = HashMap::default();
11241 while let Ok(search_result) = search_rx.rx.recv().await {
11242 match search_result {
11243 SearchResult::Buffer { buffer, ranges } => {
11244 results.entry(buffer).or_insert(ranges);
11245 }
11246 SearchResult::LimitReached => {}
11247 }
11248 }
11249 Ok(results
11250 .into_iter()
11251 .map(|(buffer, ranges)| {
11252 buffer.update(cx, |buffer, cx| {
11253 let path = buffer
11254 .file()
11255 .unwrap()
11256 .full_path(cx)
11257 .to_string_lossy()
11258 .to_string();
11259 let ranges = ranges
11260 .into_iter()
11261 .map(|range| range.to_offset(buffer))
11262 .collect::<Vec<_>>();
11263 (path, ranges)
11264 })
11265 })
11266 .collect())
11267}
11268
11269#[gpui::test]
11270async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11271 init_test(cx);
11272
11273 let fs = FakeFs::new(cx.executor());
11274
11275 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11276 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11277 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11278 fs.insert_tree(path!("/dir"), json!({})).await;
11279 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11280
11281 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11282
11283 let buffer = project
11284 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11285 .await
11286 .unwrap();
11287
11288 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11289 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11290 });
11291 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11292 assert_eq!(initial_text, "Hi");
11293 assert!(!initial_dirty);
11294
11295 let reload_receiver = buffer.update(cx, |buffer, cx| {
11296 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11297 });
11298 cx.executor().run_until_parked();
11299
11300 // Wait for reload to complete
11301 let _ = reload_receiver.await;
11302
11303 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11304 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11305 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11306 });
11307 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11308 assert_eq!(reloaded_text, "楈");
11309 assert!(!reloaded_dirty);
11310
11311 // Undo the reload
11312 buffer.update(cx, |buffer, cx| {
11313 buffer.undo(cx);
11314 });
11315
11316 buffer.read_with(cx, |buffer, _| {
11317 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11318 assert_eq!(buffer.text(), "Hi");
11319 assert!(!buffer.is_dirty());
11320 });
11321
11322 buffer.update(cx, |buffer, cx| {
11323 buffer.redo(cx);
11324 });
11325
11326 buffer.read_with(cx, |buffer, _| {
11327 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11328 assert_ne!(buffer.text(), "Hi");
11329 assert!(!buffer.is_dirty());
11330 });
11331}
11332
11333pub fn init_test(cx: &mut gpui::TestAppContext) {
11334 zlog::init_test();
11335
11336 cx.update(|cx| {
11337 let settings_store = SettingsStore::test(cx);
11338 cx.set_global(settings_store);
11339 release_channel::init(semver::Version::new(0, 0, 0), cx);
11340 });
11341}
11342
11343fn json_lang() -> Arc<Language> {
11344 Arc::new(Language::new(
11345 LanguageConfig {
11346 name: "JSON".into(),
11347 matcher: LanguageMatcher {
11348 path_suffixes: vec!["json".to_string()],
11349 ..Default::default()
11350 },
11351 ..Default::default()
11352 },
11353 None,
11354 ))
11355}
11356
11357fn js_lang() -> Arc<Language> {
11358 Arc::new(Language::new(
11359 LanguageConfig {
11360 name: "JavaScript".into(),
11361 matcher: LanguageMatcher {
11362 path_suffixes: vec!["js".to_string()],
11363 ..Default::default()
11364 },
11365 ..Default::default()
11366 },
11367 None,
11368 ))
11369}
11370
11371fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11372 struct PythonMootToolchainLister(Arc<FakeFs>);
11373 #[async_trait]
11374 impl ToolchainLister for PythonMootToolchainLister {
11375 async fn list(
11376 &self,
11377 worktree_root: PathBuf,
11378 subroot_relative_path: Arc<RelPath>,
11379 _: Option<HashMap<String, String>>,
11380 _: &dyn Fs,
11381 ) -> ToolchainList {
11382 // This lister will always return a path .venv directories within ancestors
11383 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11384 let mut toolchains = vec![];
11385 for ancestor in ancestors {
11386 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11387 if self.0.is_dir(&venv_path).await {
11388 toolchains.push(Toolchain {
11389 name: SharedString::new_static("Python Venv"),
11390 path: venv_path.to_string_lossy().into_owned().into(),
11391 language_name: LanguageName(SharedString::new_static("Python")),
11392 as_json: serde_json::Value::Null,
11393 })
11394 }
11395 }
11396 ToolchainList {
11397 toolchains,
11398 ..Default::default()
11399 }
11400 }
11401 async fn resolve(
11402 &self,
11403 _: PathBuf,
11404 _: Option<HashMap<String, String>>,
11405 _: &dyn Fs,
11406 ) -> anyhow::Result<Toolchain> {
11407 Err(anyhow::anyhow!("Not implemented"))
11408 }
11409 fn meta(&self) -> ToolchainMetadata {
11410 ToolchainMetadata {
11411 term: SharedString::new_static("Virtual Environment"),
11412 new_toolchain_placeholder: SharedString::new_static(
11413 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11414 ),
11415 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11416 }
11417 }
11418 fn activation_script(
11419 &self,
11420 _: &Toolchain,
11421 _: ShellKind,
11422 _: &gpui::App,
11423 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11424 Box::pin(async { vec![] })
11425 }
11426 }
11427 Arc::new(
11428 Language::new(
11429 LanguageConfig {
11430 name: "Python".into(),
11431 matcher: LanguageMatcher {
11432 path_suffixes: vec!["py".to_string()],
11433 ..Default::default()
11434 },
11435 ..Default::default()
11436 },
11437 None, // We're not testing Python parsing with this language.
11438 )
11439 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11440 "pyproject.toml",
11441 ))))
11442 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11443 )
11444}
11445
11446fn typescript_lang() -> Arc<Language> {
11447 Arc::new(Language::new(
11448 LanguageConfig {
11449 name: "TypeScript".into(),
11450 matcher: LanguageMatcher {
11451 path_suffixes: vec!["ts".to_string()],
11452 ..Default::default()
11453 },
11454 ..Default::default()
11455 },
11456 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11457 ))
11458}
11459
11460fn tsx_lang() -> Arc<Language> {
11461 Arc::new(Language::new(
11462 LanguageConfig {
11463 name: "tsx".into(),
11464 matcher: LanguageMatcher {
11465 path_suffixes: vec!["tsx".to_string()],
11466 ..Default::default()
11467 },
11468 ..Default::default()
11469 },
11470 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11471 ))
11472}
11473
11474fn get_all_tasks(
11475 project: &Entity<Project>,
11476 task_contexts: Arc<TaskContexts>,
11477 cx: &mut App,
11478) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11479 let new_tasks = project.update(cx, |project, cx| {
11480 project.task_store().update(cx, |task_store, cx| {
11481 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11482 this.used_and_current_resolved_tasks(task_contexts, cx)
11483 })
11484 })
11485 });
11486
11487 cx.background_spawn(async move {
11488 let (mut old, new) = new_tasks.await;
11489 old.extend(new);
11490 old
11491 })
11492}
11493
11494#[track_caller]
11495fn assert_entry_git_state(
11496 tree: &Worktree,
11497 repository: &Repository,
11498 path: &str,
11499 index_status: Option<StatusCode>,
11500 is_ignored: bool,
11501) {
11502 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11503 let entry = tree
11504 .entry_for_path(&rel_path(path))
11505 .unwrap_or_else(|| panic!("entry {path} not found"));
11506 let status = repository
11507 .status_for_path(&repo_path(path))
11508 .map(|entry| entry.status);
11509 let expected = index_status.map(|index_status| {
11510 TrackedStatus {
11511 index_status,
11512 worktree_status: StatusCode::Unmodified,
11513 }
11514 .into()
11515 });
11516 assert_eq!(
11517 status, expected,
11518 "expected {path} to have git status: {expected:?}"
11519 );
11520 assert_eq!(
11521 entry.is_ignored, is_ignored,
11522 "expected {path} to have is_ignored: {is_ignored}"
11523 );
11524}
11525
11526#[track_caller]
11527fn git_init(path: &Path) -> git2::Repository {
11528 let mut init_opts = RepositoryInitOptions::new();
11529 init_opts.initial_head("main");
11530 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11531}
11532
11533#[track_caller]
11534fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11535 let path = path.as_ref();
11536 let mut index = repo.index().expect("Failed to get index");
11537 index.add_path(path).expect("Failed to add file");
11538 index.write().expect("Failed to write index");
11539}
11540
11541#[track_caller]
11542fn git_remove_index(path: &Path, repo: &git2::Repository) {
11543 let mut index = repo.index().expect("Failed to get index");
11544 index.remove_path(path).expect("Failed to add file");
11545 index.write().expect("Failed to write index");
11546}
11547
11548#[track_caller]
11549fn git_commit(msg: &'static str, repo: &git2::Repository) {
11550 use git2::Signature;
11551
11552 let signature = Signature::now("test", "test@zed.dev").unwrap();
11553 let oid = repo.index().unwrap().write_tree().unwrap();
11554 let tree = repo.find_tree(oid).unwrap();
11555 if let Ok(head) = repo.head() {
11556 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11557
11558 let parent_commit = parent_obj.as_commit().unwrap();
11559
11560 repo.commit(
11561 Some("HEAD"),
11562 &signature,
11563 &signature,
11564 msg,
11565 &tree,
11566 &[parent_commit],
11567 )
11568 .expect("Failed to commit with parent");
11569 } else {
11570 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11571 .expect("Failed to commit");
11572 }
11573}
11574
11575#[cfg(any())]
11576#[track_caller]
11577fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11578 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11579}
11580
11581#[track_caller]
11582fn git_stash(repo: &mut git2::Repository) {
11583 use git2::Signature;
11584
11585 let signature = Signature::now("test", "test@zed.dev").unwrap();
11586 repo.stash_save(&signature, "N/A", None)
11587 .expect("Failed to stash");
11588}
11589
11590#[track_caller]
11591fn git_reset(offset: usize, repo: &git2::Repository) {
11592 let head = repo.head().expect("Couldn't get repo head");
11593 let object = head.peel(git2::ObjectType::Commit).unwrap();
11594 let commit = object.as_commit().unwrap();
11595 let new_head = commit
11596 .parents()
11597 .inspect(|parnet| {
11598 parnet.message();
11599 })
11600 .nth(offset)
11601 .expect("Not enough history");
11602 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11603 .expect("Could not reset");
11604}
11605
11606#[cfg(any())]
11607#[track_caller]
11608fn git_branch(name: &str, repo: &git2::Repository) {
11609 let head = repo
11610 .head()
11611 .expect("Couldn't get repo head")
11612 .peel_to_commit()
11613 .expect("HEAD is not a commit");
11614 repo.branch(name, &head, false).expect("Failed to commit");
11615}
11616
11617#[cfg(any())]
11618#[track_caller]
11619fn git_checkout(name: &str, repo: &git2::Repository) {
11620 repo.set_head(name).expect("Failed to set head");
11621 repo.checkout_head(None).expect("Failed to check out head");
11622}
11623
11624#[cfg(any())]
11625#[track_caller]
11626fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11627 repo.statuses(None)
11628 .unwrap()
11629 .iter()
11630 .map(|status| (status.path().unwrap().to_string(), status.status()))
11631 .collect()
11632}
11633
11634#[gpui::test]
11635async fn test_find_project_path_abs(
11636 background_executor: BackgroundExecutor,
11637 cx: &mut gpui::TestAppContext,
11638) {
11639 // find_project_path should work with absolute paths
11640 init_test(cx);
11641
11642 let fs = FakeFs::new(background_executor);
11643 fs.insert_tree(
11644 path!("/root"),
11645 json!({
11646 "project1": {
11647 "file1.txt": "content1",
11648 "subdir": {
11649 "file2.txt": "content2"
11650 }
11651 },
11652 "project2": {
11653 "file3.txt": "content3"
11654 }
11655 }),
11656 )
11657 .await;
11658
11659 let project = Project::test(
11660 fs.clone(),
11661 [
11662 path!("/root/project1").as_ref(),
11663 path!("/root/project2").as_ref(),
11664 ],
11665 cx,
11666 )
11667 .await;
11668
11669 // Make sure the worktrees are fully initialized
11670 project
11671 .update(cx, |project, cx| project.git_scans_complete(cx))
11672 .await;
11673 cx.run_until_parked();
11674
11675 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11676 project.read_with(cx, |project, cx| {
11677 let worktrees: Vec<_> = project.worktrees(cx).collect();
11678 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11679 let id1 = worktrees[0].read(cx).id();
11680 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11681 let id2 = worktrees[1].read(cx).id();
11682 (abs_path1, id1, abs_path2, id2)
11683 });
11684
11685 project.update(cx, |project, cx| {
11686 let abs_path = project1_abs_path.join("file1.txt");
11687 let found_path = project.find_project_path(abs_path, cx).unwrap();
11688 assert_eq!(found_path.worktree_id, project1_id);
11689 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11690
11691 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11692 let found_path = project.find_project_path(abs_path, cx).unwrap();
11693 assert_eq!(found_path.worktree_id, project1_id);
11694 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11695
11696 let abs_path = project2_abs_path.join("file3.txt");
11697 let found_path = project.find_project_path(abs_path, cx).unwrap();
11698 assert_eq!(found_path.worktree_id, project2_id);
11699 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11700
11701 let abs_path = project1_abs_path.join("nonexistent.txt");
11702 let found_path = project.find_project_path(abs_path, cx);
11703 assert!(
11704 found_path.is_some(),
11705 "Should find project path for nonexistent file in worktree"
11706 );
11707
11708 // Test with an absolute path outside any worktree
11709 let abs_path = Path::new("/some/other/path");
11710 let found_path = project.find_project_path(abs_path, cx);
11711 assert!(
11712 found_path.is_none(),
11713 "Should not find project path for path outside any worktree"
11714 );
11715 });
11716}
11717
11718#[gpui::test]
11719async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11720 init_test(cx);
11721
11722 let fs = FakeFs::new(cx.executor());
11723 fs.insert_tree(
11724 path!("/root"),
11725 json!({
11726 "a": {
11727 ".git": {},
11728 "src": {
11729 "main.rs": "fn main() {}",
11730 }
11731 },
11732 "b": {
11733 ".git": {},
11734 "src": {
11735 "main.rs": "fn main() {}",
11736 },
11737 "script": {
11738 "run.sh": "#!/bin/bash"
11739 }
11740 }
11741 }),
11742 )
11743 .await;
11744
11745 let project = Project::test(
11746 fs.clone(),
11747 [
11748 path!("/root/a").as_ref(),
11749 path!("/root/b/script").as_ref(),
11750 path!("/root/b").as_ref(),
11751 ],
11752 cx,
11753 )
11754 .await;
11755 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11756 scan_complete.await;
11757
11758 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11759 assert_eq!(worktrees.len(), 3);
11760
11761 let worktree_id_by_abs_path = worktrees
11762 .into_iter()
11763 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11764 .collect::<HashMap<_, _>>();
11765 let worktree_id = worktree_id_by_abs_path
11766 .get(Path::new(path!("/root/b/script")))
11767 .unwrap();
11768
11769 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11770 assert_eq!(repos.len(), 2);
11771
11772 project.update(cx, |project, cx| {
11773 project.remove_worktree(*worktree_id, cx);
11774 });
11775 cx.run_until_parked();
11776
11777 let mut repo_paths = project
11778 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11779 .values()
11780 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11781 .collect::<Vec<_>>();
11782 repo_paths.sort();
11783
11784 pretty_assertions::assert_eq!(
11785 repo_paths,
11786 [
11787 Path::new(path!("/root/a")).into(),
11788 Path::new(path!("/root/b")).into(),
11789 ]
11790 );
11791
11792 let active_repo_path = project
11793 .read_with(cx, |p, cx| {
11794 p.active_repository(cx)
11795 .map(|r| r.read(cx).work_directory_abs_path.clone())
11796 })
11797 .unwrap();
11798 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11799
11800 let worktree_id = worktree_id_by_abs_path
11801 .get(Path::new(path!("/root/a")))
11802 .unwrap();
11803 project.update(cx, |project, cx| {
11804 project.remove_worktree(*worktree_id, cx);
11805 });
11806 cx.run_until_parked();
11807
11808 let active_repo_path = project
11809 .read_with(cx, |p, cx| {
11810 p.active_repository(cx)
11811 .map(|r| r.read(cx).work_directory_abs_path.clone())
11812 })
11813 .unwrap();
11814 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11815
11816 let worktree_id = worktree_id_by_abs_path
11817 .get(Path::new(path!("/root/b")))
11818 .unwrap();
11819 project.update(cx, |project, cx| {
11820 project.remove_worktree(*worktree_id, cx);
11821 });
11822 cx.run_until_parked();
11823
11824 let active_repo_path = project.read_with(cx, |p, cx| {
11825 p.active_repository(cx)
11826 .map(|r| r.read(cx).work_directory_abs_path.clone())
11827 });
11828 assert!(active_repo_path.is_none());
11829}
11830
11831#[gpui::test]
11832async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11833 use DiffHunkSecondaryStatus::*;
11834 init_test(cx);
11835
11836 let committed_contents = r#"
11837 one
11838 two
11839 three
11840 "#
11841 .unindent();
11842 let file_contents = r#"
11843 one
11844 TWO
11845 three
11846 "#
11847 .unindent();
11848
11849 let fs = FakeFs::new(cx.background_executor.clone());
11850 fs.insert_tree(
11851 path!("/dir"),
11852 json!({
11853 ".git": {},
11854 "file.txt": file_contents.clone()
11855 }),
11856 )
11857 .await;
11858
11859 fs.set_head_and_index_for_repo(
11860 path!("/dir/.git").as_ref(),
11861 &[("file.txt", committed_contents.clone())],
11862 );
11863
11864 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11865
11866 let buffer = project
11867 .update(cx, |project, cx| {
11868 project.open_local_buffer(path!("/dir/file.txt"), cx)
11869 })
11870 .await
11871 .unwrap();
11872 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11873 let uncommitted_diff = project
11874 .update(cx, |project, cx| {
11875 project.open_uncommitted_diff(buffer.clone(), cx)
11876 })
11877 .await
11878 .unwrap();
11879
11880 // The hunk is initially unstaged.
11881 uncommitted_diff.read_with(cx, |diff, cx| {
11882 assert_hunks(
11883 diff.snapshot(cx).hunks(&snapshot),
11884 &snapshot,
11885 &diff.base_text_string(cx).unwrap(),
11886 &[(
11887 1..2,
11888 "two\n",
11889 "TWO\n",
11890 DiffHunkStatus::modified(HasSecondaryHunk),
11891 )],
11892 );
11893 });
11894
11895 // Get the repository handle.
11896 let repo = project.read_with(cx, |project, cx| {
11897 project.repositories(cx).values().next().unwrap().clone()
11898 });
11899
11900 // Stage the file.
11901 let stage_task = repo.update(cx, |repo, cx| {
11902 repo.stage_entries(vec![repo_path("file.txt")], cx)
11903 });
11904
11905 // Run a few ticks to let the job start and mark hunks as pending,
11906 // but don't run_until_parked which would complete the entire operation.
11907 for _ in 0..10 {
11908 cx.executor().tick();
11909 let [hunk]: [_; 1] = uncommitted_diff
11910 .read_with(cx, |diff, cx| {
11911 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11912 })
11913 .try_into()
11914 .unwrap();
11915 match hunk.secondary_status {
11916 HasSecondaryHunk => {}
11917 SecondaryHunkRemovalPending => break,
11918 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11919 _ => panic!("unexpected hunk state"),
11920 }
11921 }
11922 uncommitted_diff.read_with(cx, |diff, cx| {
11923 assert_hunks(
11924 diff.snapshot(cx).hunks(&snapshot),
11925 &snapshot,
11926 &diff.base_text_string(cx).unwrap(),
11927 &[(
11928 1..2,
11929 "two\n",
11930 "TWO\n",
11931 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11932 )],
11933 );
11934 });
11935
11936 // Let the staging complete.
11937 stage_task.await.unwrap();
11938 cx.run_until_parked();
11939
11940 // The hunk is now fully staged.
11941 uncommitted_diff.read_with(cx, |diff, cx| {
11942 assert_hunks(
11943 diff.snapshot(cx).hunks(&snapshot),
11944 &snapshot,
11945 &diff.base_text_string(cx).unwrap(),
11946 &[(
11947 1..2,
11948 "two\n",
11949 "TWO\n",
11950 DiffHunkStatus::modified(NoSecondaryHunk),
11951 )],
11952 );
11953 });
11954
11955 // Simulate a commit by updating HEAD to match the current file contents.
11956 // The FakeGitRepository's commit method is a no-op, so we need to manually
11957 // update HEAD to simulate the commit completing.
11958 fs.set_head_for_repo(
11959 path!("/dir/.git").as_ref(),
11960 &[("file.txt", file_contents.clone())],
11961 "newhead",
11962 );
11963 cx.run_until_parked();
11964
11965 // After committing, there are no more hunks.
11966 uncommitted_diff.read_with(cx, |diff, cx| {
11967 assert_hunks(
11968 diff.snapshot(cx).hunks(&snapshot),
11969 &snapshot,
11970 &diff.base_text_string(cx).unwrap(),
11971 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11972 );
11973 });
11974}
11975
11976#[gpui::test]
11977async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11978 init_test(cx);
11979
11980 // Configure read_only_files setting
11981 cx.update(|cx| {
11982 cx.update_global::<SettingsStore, _>(|store, cx| {
11983 store.update_user_settings(cx, |settings| {
11984 settings.project.worktree.read_only_files = Some(vec![
11985 "**/generated/**".to_string(),
11986 "**/*.gen.rs".to_string(),
11987 ]);
11988 });
11989 });
11990 });
11991
11992 let fs = FakeFs::new(cx.background_executor.clone());
11993 fs.insert_tree(
11994 path!("/root"),
11995 json!({
11996 "src": {
11997 "main.rs": "fn main() {}",
11998 "types.gen.rs": "// Generated file",
11999 },
12000 "generated": {
12001 "schema.rs": "// Auto-generated schema",
12002 }
12003 }),
12004 )
12005 .await;
12006
12007 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12008
12009 // Open a regular file - should be read-write
12010 let regular_buffer = project
12011 .update(cx, |project, cx| {
12012 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12013 })
12014 .await
12015 .unwrap();
12016
12017 regular_buffer.read_with(cx, |buffer, _| {
12018 assert!(!buffer.read_only(), "Regular file should not be read-only");
12019 });
12020
12021 // Open a file matching *.gen.rs pattern - should be read-only
12022 let gen_buffer = project
12023 .update(cx, |project, cx| {
12024 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12025 })
12026 .await
12027 .unwrap();
12028
12029 gen_buffer.read_with(cx, |buffer, _| {
12030 assert!(
12031 buffer.read_only(),
12032 "File matching *.gen.rs pattern should be read-only"
12033 );
12034 });
12035
12036 // Open a file in generated directory - should be read-only
12037 let generated_buffer = project
12038 .update(cx, |project, cx| {
12039 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12040 })
12041 .await
12042 .unwrap();
12043
12044 generated_buffer.read_with(cx, |buffer, _| {
12045 assert!(
12046 buffer.read_only(),
12047 "File in generated directory should be read-only"
12048 );
12049 });
12050}
12051
12052#[gpui::test]
12053async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12054 init_test(cx);
12055
12056 // Explicitly set read_only_files to empty (default behavior)
12057 cx.update(|cx| {
12058 cx.update_global::<SettingsStore, _>(|store, cx| {
12059 store.update_user_settings(cx, |settings| {
12060 settings.project.worktree.read_only_files = Some(vec![]);
12061 });
12062 });
12063 });
12064
12065 let fs = FakeFs::new(cx.background_executor.clone());
12066 fs.insert_tree(
12067 path!("/root"),
12068 json!({
12069 "src": {
12070 "main.rs": "fn main() {}",
12071 },
12072 "generated": {
12073 "schema.rs": "// Auto-generated schema",
12074 }
12075 }),
12076 )
12077 .await;
12078
12079 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12080
12081 // All files should be read-write when read_only_files is empty
12082 let main_buffer = project
12083 .update(cx, |project, cx| {
12084 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12085 })
12086 .await
12087 .unwrap();
12088
12089 main_buffer.read_with(cx, |buffer, _| {
12090 assert!(
12091 !buffer.read_only(),
12092 "Files should not be read-only when read_only_files is empty"
12093 );
12094 });
12095
12096 let generated_buffer = project
12097 .update(cx, |project, cx| {
12098 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12099 })
12100 .await
12101 .unwrap();
12102
12103 generated_buffer.read_with(cx, |buffer, _| {
12104 assert!(
12105 !buffer.read_only(),
12106 "Generated files should not be read-only when read_only_files is empty"
12107 );
12108 });
12109}
12110
12111#[gpui::test]
12112async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12113 init_test(cx);
12114
12115 // Configure to make lock files read-only
12116 cx.update(|cx| {
12117 cx.update_global::<SettingsStore, _>(|store, cx| {
12118 store.update_user_settings(cx, |settings| {
12119 settings.project.worktree.read_only_files = Some(vec![
12120 "**/*.lock".to_string(),
12121 "**/package-lock.json".to_string(),
12122 ]);
12123 });
12124 });
12125 });
12126
12127 let fs = FakeFs::new(cx.background_executor.clone());
12128 fs.insert_tree(
12129 path!("/root"),
12130 json!({
12131 "Cargo.lock": "# Lock file",
12132 "Cargo.toml": "[package]",
12133 "package-lock.json": "{}",
12134 "package.json": "{}",
12135 }),
12136 )
12137 .await;
12138
12139 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12140
12141 // Cargo.lock should be read-only
12142 let cargo_lock = project
12143 .update(cx, |project, cx| {
12144 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12145 })
12146 .await
12147 .unwrap();
12148
12149 cargo_lock.read_with(cx, |buffer, _| {
12150 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12151 });
12152
12153 // Cargo.toml should be read-write
12154 let cargo_toml = project
12155 .update(cx, |project, cx| {
12156 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12157 })
12158 .await
12159 .unwrap();
12160
12161 cargo_toml.read_with(cx, |buffer, _| {
12162 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12163 });
12164
12165 // package-lock.json should be read-only
12166 let package_lock = project
12167 .update(cx, |project, cx| {
12168 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12169 })
12170 .await
12171 .unwrap();
12172
12173 package_lock.read_with(cx, |buffer, _| {
12174 assert!(buffer.read_only(), "package-lock.json should be read-only");
12175 });
12176
12177 // package.json should be read-write
12178 let package_json = project
12179 .update(cx, |project, cx| {
12180 project.open_local_buffer(path!("/root/package.json"), cx)
12181 })
12182 .await
12183 .unwrap();
12184
12185 package_json.read_with(cx, |buffer, _| {
12186 assert!(!buffer.read_only(), "package.json should not be read-only");
12187 });
12188}
12189
12190mod disable_ai_settings_tests {
12191 use gpui::TestAppContext;
12192 use project::*;
12193 use settings::{Settings, SettingsStore};
12194
12195 #[gpui::test]
12196 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12197 cx.update(|cx| {
12198 settings::init(cx);
12199
12200 // Test 1: Default is false (AI enabled)
12201 assert!(
12202 !DisableAiSettings::get_global(cx).disable_ai,
12203 "Default should allow AI"
12204 );
12205 });
12206
12207 let disable_true = serde_json::json!({
12208 "disable_ai": true
12209 })
12210 .to_string();
12211 let disable_false = serde_json::json!({
12212 "disable_ai": false
12213 })
12214 .to_string();
12215
12216 cx.update_global::<SettingsStore, _>(|store, cx| {
12217 store.set_user_settings(&disable_false, cx).unwrap();
12218 store.set_global_settings(&disable_true, cx).unwrap();
12219 });
12220 cx.update(|cx| {
12221 assert!(
12222 DisableAiSettings::get_global(cx).disable_ai,
12223 "Local false cannot override global true"
12224 );
12225 });
12226
12227 cx.update_global::<SettingsStore, _>(|store, cx| {
12228 store.set_global_settings(&disable_false, cx).unwrap();
12229 store.set_user_settings(&disable_true, cx).unwrap();
12230 });
12231
12232 cx.update(|cx| {
12233 assert!(
12234 DisableAiSettings::get_global(cx).disable_ai,
12235 "Local false cannot override global true"
12236 );
12237 });
12238 }
12239
12240 #[gpui::test]
12241 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12242 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12243 use worktree::WorktreeId;
12244
12245 cx.update(|cx| {
12246 settings::init(cx);
12247
12248 // Default should allow AI
12249 assert!(
12250 !DisableAiSettings::get_global(cx).disable_ai,
12251 "Default should allow AI"
12252 );
12253 });
12254
12255 let worktree_id = WorktreeId::from_usize(1);
12256 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12257 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12258 };
12259 let project_path = rel_path("project");
12260 let settings_location = SettingsLocation {
12261 worktree_id,
12262 path: project_path.as_ref(),
12263 };
12264
12265 // Test: Project-level disable_ai=true should disable AI for files in that project
12266 cx.update_global::<SettingsStore, _>(|store, cx| {
12267 store
12268 .set_local_settings(
12269 worktree_id,
12270 LocalSettingsPath::InWorktree(project_path.clone()),
12271 LocalSettingsKind::Settings,
12272 Some(r#"{ "disable_ai": true }"#),
12273 cx,
12274 )
12275 .unwrap();
12276 });
12277
12278 cx.update(|cx| {
12279 let settings = DisableAiSettings::get(Some(settings_location), cx);
12280 assert!(
12281 settings.disable_ai,
12282 "Project-level disable_ai=true should disable AI for files in that project"
12283 );
12284 // Global should now also be true since project-level disable_ai is merged into global
12285 assert!(
12286 DisableAiSettings::get_global(cx).disable_ai,
12287 "Global setting should be affected by project-level disable_ai=true"
12288 );
12289 });
12290
12291 // Test: Setting project-level to false should allow AI for that project
12292 cx.update_global::<SettingsStore, _>(|store, cx| {
12293 store
12294 .set_local_settings(
12295 worktree_id,
12296 LocalSettingsPath::InWorktree(project_path.clone()),
12297 LocalSettingsKind::Settings,
12298 Some(r#"{ "disable_ai": false }"#),
12299 cx,
12300 )
12301 .unwrap();
12302 });
12303
12304 cx.update(|cx| {
12305 let settings = DisableAiSettings::get(Some(settings_location), cx);
12306 assert!(
12307 !settings.disable_ai,
12308 "Project-level disable_ai=false should allow AI"
12309 );
12310 // Global should also be false now
12311 assert!(
12312 !DisableAiSettings::get_global(cx).disable_ai,
12313 "Global setting should be false when project-level is false"
12314 );
12315 });
12316
12317 // Test: User-level true + project-level false = AI disabled (saturation)
12318 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12319 cx.update_global::<SettingsStore, _>(|store, cx| {
12320 store.set_user_settings(&disable_true, cx).unwrap();
12321 store
12322 .set_local_settings(
12323 worktree_id,
12324 LocalSettingsPath::InWorktree(project_path.clone()),
12325 LocalSettingsKind::Settings,
12326 Some(r#"{ "disable_ai": false }"#),
12327 cx,
12328 )
12329 .unwrap();
12330 });
12331
12332 cx.update(|cx| {
12333 let settings = DisableAiSettings::get(Some(settings_location), cx);
12334 assert!(
12335 settings.disable_ai,
12336 "Project-level false cannot override user-level true (SaturatingBool)"
12337 );
12338 });
12339 }
12340}