1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(
2206 lsp::RegistrationParams {
2207 registrations: vec![lsp::Registration {
2208 id: Default::default(),
2209 method: "workspace/didChangeWatchedFiles".to_string(),
2210 register_options: serde_json::to_value(
2211 lsp::DidChangeWatchedFilesRegistrationOptions {
2212 watchers: vec![
2213 lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.toml").to_string(),
2216 ),
2217 kind: None,
2218 },
2219 lsp::FileSystemWatcher {
2220 glob_pattern: lsp::GlobPattern::String(
2221 path!("/the-root/src/*.{rs,c}").to_string(),
2222 ),
2223 kind: None,
2224 },
2225 lsp::FileSystemWatcher {
2226 glob_pattern: lsp::GlobPattern::String(
2227 path!("/the-root/target/y/**/*.rs").to_string(),
2228 ),
2229 kind: None,
2230 },
2231 lsp::FileSystemWatcher {
2232 glob_pattern: lsp::GlobPattern::String(
2233 path!("/the/stdlib/src/**/*.rs").to_string(),
2234 ),
2235 kind: None,
2236 },
2237 lsp::FileSystemWatcher {
2238 glob_pattern: lsp::GlobPattern::String(
2239 path!("**/Cargo.lock").to_string(),
2240 ),
2241 kind: None,
2242 },
2243 ],
2244 },
2245 )
2246 .ok(),
2247 }],
2248 },
2249 DEFAULT_LSP_REQUEST_TIMEOUT,
2250 )
2251 .await
2252 .into_response()
2253 .unwrap();
2254 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2255 let file_changes = file_changes.clone();
2256 move |params, _| {
2257 let mut file_changes = file_changes.lock();
2258 file_changes.extend(params.changes);
2259 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2260 }
2261 });
2262
2263 cx.executor().run_until_parked();
2264 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2265 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2266
2267 let mut new_watched_paths = fs.watched_paths();
2268 new_watched_paths.retain(|path| {
2269 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2270 });
2271 assert_eq!(
2272 &new_watched_paths,
2273 &[
2274 Path::new(path!("/the-root")),
2275 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2276 Path::new(path!("/the/stdlib/src"))
2277 ]
2278 );
2279
2280 // Now the language server has asked us to watch an ignored directory path,
2281 // so we recursively load it.
2282 project.update(cx, |project, cx| {
2283 let worktree = project.visible_worktrees(cx).next().unwrap();
2284 assert_eq!(
2285 worktree
2286 .read(cx)
2287 .snapshot()
2288 .entries(true, 0)
2289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2290 .collect::<Vec<_>>(),
2291 &[
2292 ("", false),
2293 (".gitignore", false),
2294 ("Cargo.lock", false),
2295 ("src", false),
2296 ("src/a.rs", false),
2297 ("src/b.rs", false),
2298 ("target", true),
2299 ("target/x", true),
2300 ("target/y", true),
2301 ("target/y/out", true),
2302 ("target/y/out/y.rs", true),
2303 ("target/z", true),
2304 ]
2305 );
2306 });
2307
2308 // Perform some file system mutations, two of which match the watched patterns,
2309 // and one of which does not.
2310 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2317 .await
2318 .unwrap();
2319 fs.create_file(
2320 path!("/the-root/target/x/out/x2.rs").as_ref(),
2321 Default::default(),
2322 )
2323 .await
2324 .unwrap();
2325 fs.create_file(
2326 path!("/the-root/target/y/out/y2.rs").as_ref(),
2327 Default::default(),
2328 )
2329 .await
2330 .unwrap();
2331 fs.save(
2332 path!("/the-root/Cargo.lock").as_ref(),
2333 &"".into(),
2334 Default::default(),
2335 )
2336 .await
2337 .unwrap();
2338 fs.save(
2339 path!("/the-stdlib/LICENSE").as_ref(),
2340 &"".into(),
2341 Default::default(),
2342 )
2343 .await
2344 .unwrap();
2345 fs.save(
2346 path!("/the/stdlib/src/string.rs").as_ref(),
2347 &"".into(),
2348 Default::default(),
2349 )
2350 .await
2351 .unwrap();
2352
2353 // The language server receives events for the FS mutations that match its watch patterns.
2354 cx.executor().run_until_parked();
2355 assert_eq!(
2356 &*file_changes.lock(),
2357 &[
2358 lsp::FileEvent {
2359 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2360 typ: lsp::FileChangeType::CHANGED,
2361 },
2362 lsp::FileEvent {
2363 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2364 typ: lsp::FileChangeType::DELETED,
2365 },
2366 lsp::FileEvent {
2367 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2368 typ: lsp::FileChangeType::CREATED,
2369 },
2370 lsp::FileEvent {
2371 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2372 typ: lsp::FileChangeType::CREATED,
2373 },
2374 lsp::FileEvent {
2375 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2376 typ: lsp::FileChangeType::CHANGED,
2377 },
2378 ]
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_multiple_did_change_watched_files_registrations(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let fs = FakeFs::new(cx.executor());
2387 fs.insert_tree(
2388 path!("/root"),
2389 json!({
2390 "src": {
2391 "a.rs": "",
2392 "b.rs": "",
2393 },
2394 "docs": {
2395 "readme.md": "",
2396 },
2397 }),
2398 )
2399 .await;
2400
2401 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2402 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2403 language_registry.add(rust_lang());
2404 let mut fake_servers = language_registry.register_fake_lsp(
2405 "Rust",
2406 FakeLspAdapter {
2407 name: "the-language-server",
2408 ..Default::default()
2409 },
2410 );
2411
2412 cx.executor().run_until_parked();
2413
2414 project
2415 .update(cx, |project, cx| {
2416 project.open_local_buffer_with_lsp(path!("/root/src/a.rs"), cx)
2417 })
2418 .await
2419 .unwrap();
2420
2421 let fake_server = fake_servers.next().await.unwrap();
2422 cx.executor().run_until_parked();
2423
2424 let file_changes = Arc::new(Mutex::new(Vec::new()));
2425
2426 // Register two separate watched file registrations.
2427 fake_server
2428 .request::<lsp::request::RegisterCapability>(
2429 lsp::RegistrationParams {
2430 registrations: vec![lsp::Registration {
2431 id: "reg-1".to_string(),
2432 method: "workspace/didChangeWatchedFiles".to_string(),
2433 register_options: serde_json::to_value(
2434 lsp::DidChangeWatchedFilesRegistrationOptions {
2435 watchers: vec![lsp::FileSystemWatcher {
2436 glob_pattern: lsp::GlobPattern::String(
2437 path!("/root/src/*.rs").to_string(),
2438 ),
2439 kind: None,
2440 }],
2441 },
2442 )
2443 .ok(),
2444 }],
2445 },
2446 DEFAULT_LSP_REQUEST_TIMEOUT,
2447 )
2448 .await
2449 .into_response()
2450 .unwrap();
2451
2452 fake_server
2453 .request::<lsp::request::RegisterCapability>(
2454 lsp::RegistrationParams {
2455 registrations: vec![lsp::Registration {
2456 id: "reg-2".to_string(),
2457 method: "workspace/didChangeWatchedFiles".to_string(),
2458 register_options: serde_json::to_value(
2459 lsp::DidChangeWatchedFilesRegistrationOptions {
2460 watchers: vec![lsp::FileSystemWatcher {
2461 glob_pattern: lsp::GlobPattern::String(
2462 path!("/root/docs/*.md").to_string(),
2463 ),
2464 kind: None,
2465 }],
2466 },
2467 )
2468 .ok(),
2469 }],
2470 },
2471 DEFAULT_LSP_REQUEST_TIMEOUT,
2472 )
2473 .await
2474 .into_response()
2475 .unwrap();
2476
2477 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2478 let file_changes = file_changes.clone();
2479 move |params, _| {
2480 let mut file_changes = file_changes.lock();
2481 file_changes.extend(params.changes);
2482 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2483 }
2484 });
2485
2486 cx.executor().run_until_parked();
2487
2488 // Both registrations should match their respective patterns.
2489 fs.create_file(path!("/root/src/c.rs").as_ref(), Default::default())
2490 .await
2491 .unwrap();
2492 fs.create_file(path!("/root/docs/guide.md").as_ref(), Default::default())
2493 .await
2494 .unwrap();
2495 cx.executor().run_until_parked();
2496
2497 assert_eq!(
2498 &*file_changes.lock(),
2499 &[
2500 lsp::FileEvent {
2501 uri: lsp::Uri::from_file_path(path!("/root/docs/guide.md")).unwrap(),
2502 typ: lsp::FileChangeType::CREATED,
2503 },
2504 lsp::FileEvent {
2505 uri: lsp::Uri::from_file_path(path!("/root/src/c.rs")).unwrap(),
2506 typ: lsp::FileChangeType::CREATED,
2507 },
2508 ]
2509 );
2510 file_changes.lock().clear();
2511
2512 // Unregister the first registration.
2513 fake_server
2514 .request::<lsp::request::UnregisterCapability>(
2515 lsp::UnregistrationParams {
2516 unregisterations: vec![lsp::Unregistration {
2517 id: "reg-1".to_string(),
2518 method: "workspace/didChangeWatchedFiles".to_string(),
2519 }],
2520 },
2521 DEFAULT_LSP_REQUEST_TIMEOUT,
2522 )
2523 .await
2524 .into_response()
2525 .unwrap();
2526 cx.executor().run_until_parked();
2527
2528 // Only the second registration should still match.
2529 fs.create_file(path!("/root/src/d.rs").as_ref(), Default::default())
2530 .await
2531 .unwrap();
2532 fs.create_file(path!("/root/docs/notes.md").as_ref(), Default::default())
2533 .await
2534 .unwrap();
2535 cx.executor().run_until_parked();
2536
2537 assert_eq!(
2538 &*file_changes.lock(),
2539 &[lsp::FileEvent {
2540 uri: lsp::Uri::from_file_path(path!("/root/docs/notes.md")).unwrap(),
2541 typ: lsp::FileChangeType::CREATED,
2542 }]
2543 );
2544}
2545
2546#[gpui::test]
2547async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.executor());
2551 fs.insert_tree(
2552 path!("/dir"),
2553 json!({
2554 "a.rs": "let a = 1;",
2555 "b.rs": "let b = 2;"
2556 }),
2557 )
2558 .await;
2559
2560 let project = Project::test(
2561 fs,
2562 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2563 cx,
2564 )
2565 .await;
2566 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2567
2568 let buffer_a = project
2569 .update(cx, |project, cx| {
2570 project.open_local_buffer(path!("/dir/a.rs"), cx)
2571 })
2572 .await
2573 .unwrap();
2574 let buffer_b = project
2575 .update(cx, |project, cx| {
2576 project.open_local_buffer(path!("/dir/b.rs"), cx)
2577 })
2578 .await
2579 .unwrap();
2580
2581 lsp_store.update(cx, |lsp_store, cx| {
2582 lsp_store
2583 .update_diagnostics(
2584 LanguageServerId(0),
2585 lsp::PublishDiagnosticsParams {
2586 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2587 version: None,
2588 diagnostics: vec![lsp::Diagnostic {
2589 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2590 severity: Some(lsp::DiagnosticSeverity::ERROR),
2591 message: "error 1".to_string(),
2592 ..Default::default()
2593 }],
2594 },
2595 None,
2596 DiagnosticSourceKind::Pushed,
2597 &[],
2598 cx,
2599 )
2600 .unwrap();
2601 lsp_store
2602 .update_diagnostics(
2603 LanguageServerId(0),
2604 lsp::PublishDiagnosticsParams {
2605 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2606 version: None,
2607 diagnostics: vec![lsp::Diagnostic {
2608 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2609 severity: Some(DiagnosticSeverity::WARNING),
2610 message: "error 2".to_string(),
2611 ..Default::default()
2612 }],
2613 },
2614 None,
2615 DiagnosticSourceKind::Pushed,
2616 &[],
2617 cx,
2618 )
2619 .unwrap();
2620 });
2621
2622 buffer_a.update(cx, |buffer, _| {
2623 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2624 assert_eq!(
2625 chunks
2626 .iter()
2627 .map(|(s, d)| (s.as_str(), *d))
2628 .collect::<Vec<_>>(),
2629 &[
2630 ("let ", None),
2631 ("a", Some(DiagnosticSeverity::ERROR)),
2632 (" = 1;", None),
2633 ]
2634 );
2635 });
2636 buffer_b.update(cx, |buffer, _| {
2637 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2638 assert_eq!(
2639 chunks
2640 .iter()
2641 .map(|(s, d)| (s.as_str(), *d))
2642 .collect::<Vec<_>>(),
2643 &[
2644 ("let ", None),
2645 ("b", Some(DiagnosticSeverity::WARNING)),
2646 (" = 2;", None),
2647 ]
2648 );
2649 });
2650}
2651
2652#[gpui::test]
2653async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2654 init_test(cx);
2655
2656 let fs = FakeFs::new(cx.executor());
2657 fs.insert_tree(
2658 path!("/root"),
2659 json!({
2660 "dir": {
2661 ".git": {
2662 "HEAD": "ref: refs/heads/main",
2663 },
2664 ".gitignore": "b.rs",
2665 "a.rs": "let a = 1;",
2666 "b.rs": "let b = 2;",
2667 },
2668 "other.rs": "let b = c;"
2669 }),
2670 )
2671 .await;
2672
2673 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2674 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2675 let (worktree, _) = project
2676 .update(cx, |project, cx| {
2677 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2678 })
2679 .await
2680 .unwrap();
2681 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2682
2683 let (worktree, _) = project
2684 .update(cx, |project, cx| {
2685 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2686 })
2687 .await
2688 .unwrap();
2689 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2690
2691 let server_id = LanguageServerId(0);
2692 lsp_store.update(cx, |lsp_store, cx| {
2693 lsp_store
2694 .update_diagnostics(
2695 server_id,
2696 lsp::PublishDiagnosticsParams {
2697 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2698 version: None,
2699 diagnostics: vec![lsp::Diagnostic {
2700 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2701 severity: Some(lsp::DiagnosticSeverity::ERROR),
2702 message: "unused variable 'b'".to_string(),
2703 ..Default::default()
2704 }],
2705 },
2706 None,
2707 DiagnosticSourceKind::Pushed,
2708 &[],
2709 cx,
2710 )
2711 .unwrap();
2712 lsp_store
2713 .update_diagnostics(
2714 server_id,
2715 lsp::PublishDiagnosticsParams {
2716 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2717 version: None,
2718 diagnostics: vec![lsp::Diagnostic {
2719 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2720 severity: Some(lsp::DiagnosticSeverity::ERROR),
2721 message: "unknown variable 'c'".to_string(),
2722 ..Default::default()
2723 }],
2724 },
2725 None,
2726 DiagnosticSourceKind::Pushed,
2727 &[],
2728 cx,
2729 )
2730 .unwrap();
2731 });
2732
2733 let main_ignored_buffer = project
2734 .update(cx, |project, cx| {
2735 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2736 })
2737 .await
2738 .unwrap();
2739 main_ignored_buffer.update(cx, |buffer, _| {
2740 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2741 assert_eq!(
2742 chunks
2743 .iter()
2744 .map(|(s, d)| (s.as_str(), *d))
2745 .collect::<Vec<_>>(),
2746 &[
2747 ("let ", None),
2748 ("b", Some(DiagnosticSeverity::ERROR)),
2749 (" = 2;", None),
2750 ],
2751 "Gigitnored buffers should still get in-buffer diagnostics",
2752 );
2753 });
2754 let other_buffer = project
2755 .update(cx, |project, cx| {
2756 project.open_buffer((other_worktree_id, rel_path("")), cx)
2757 })
2758 .await
2759 .unwrap();
2760 other_buffer.update(cx, |buffer, _| {
2761 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2762 assert_eq!(
2763 chunks
2764 .iter()
2765 .map(|(s, d)| (s.as_str(), *d))
2766 .collect::<Vec<_>>(),
2767 &[
2768 ("let b = ", None),
2769 ("c", Some(DiagnosticSeverity::ERROR)),
2770 (";", None),
2771 ],
2772 "Buffers from hidden projects should still get in-buffer diagnostics"
2773 );
2774 });
2775
2776 project.update(cx, |project, cx| {
2777 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2778 assert_eq!(
2779 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2780 vec![(
2781 ProjectPath {
2782 worktree_id: main_worktree_id,
2783 path: rel_path("b.rs").into(),
2784 },
2785 server_id,
2786 DiagnosticSummary {
2787 error_count: 1,
2788 warning_count: 0,
2789 }
2790 )]
2791 );
2792 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2793 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2794 });
2795}
2796
2797#[gpui::test]
2798async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2799 init_test(cx);
2800
2801 let progress_token = "the-progress-token";
2802
2803 let fs = FakeFs::new(cx.executor());
2804 fs.insert_tree(
2805 path!("/dir"),
2806 json!({
2807 "a.rs": "fn a() { A }",
2808 "b.rs": "const y: i32 = 1",
2809 }),
2810 )
2811 .await;
2812
2813 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2814 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2815
2816 language_registry.add(rust_lang());
2817 let mut fake_servers = language_registry.register_fake_lsp(
2818 "Rust",
2819 FakeLspAdapter {
2820 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2821 disk_based_diagnostics_sources: vec!["disk".into()],
2822 ..Default::default()
2823 },
2824 );
2825
2826 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2827
2828 // Cause worktree to start the fake language server
2829 let _ = project
2830 .update(cx, |project, cx| {
2831 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2832 })
2833 .await
2834 .unwrap();
2835
2836 let mut events = cx.events(&project);
2837
2838 let fake_server = fake_servers.next().await.unwrap();
2839 assert_eq!(
2840 events.next().await.unwrap(),
2841 Event::LanguageServerAdded(
2842 LanguageServerId(0),
2843 fake_server.server.name(),
2844 Some(worktree_id)
2845 ),
2846 );
2847
2848 fake_server
2849 .start_progress(format!("{}/0", progress_token))
2850 .await;
2851 assert_eq!(
2852 events.next().await.unwrap(),
2853 Event::DiskBasedDiagnosticsStarted {
2854 language_server_id: LanguageServerId(0),
2855 }
2856 );
2857
2858 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2859 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2860 version: None,
2861 diagnostics: vec![lsp::Diagnostic {
2862 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2863 severity: Some(lsp::DiagnosticSeverity::ERROR),
2864 message: "undefined variable 'A'".to_string(),
2865 ..Default::default()
2866 }],
2867 });
2868 assert_eq!(
2869 events.next().await.unwrap(),
2870 Event::DiagnosticsUpdated {
2871 language_server_id: LanguageServerId(0),
2872 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2873 }
2874 );
2875
2876 fake_server.end_progress(format!("{}/0", progress_token));
2877 assert_eq!(
2878 events.next().await.unwrap(),
2879 Event::DiskBasedDiagnosticsFinished {
2880 language_server_id: LanguageServerId(0)
2881 }
2882 );
2883
2884 let buffer = project
2885 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2886 .await
2887 .unwrap();
2888
2889 buffer.update(cx, |buffer, _| {
2890 let snapshot = buffer.snapshot();
2891 let diagnostics = snapshot
2892 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2893 .collect::<Vec<_>>();
2894 assert_eq!(
2895 diagnostics,
2896 &[DiagnosticEntryRef {
2897 range: Point::new(0, 9)..Point::new(0, 10),
2898 diagnostic: &Diagnostic {
2899 severity: lsp::DiagnosticSeverity::ERROR,
2900 message: "undefined variable 'A'".to_string(),
2901 group_id: 0,
2902 is_primary: true,
2903 source_kind: DiagnosticSourceKind::Pushed,
2904 ..Diagnostic::default()
2905 }
2906 }]
2907 )
2908 });
2909
2910 // Ensure publishing empty diagnostics twice only results in one update event.
2911 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2912 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2913 version: None,
2914 diagnostics: Default::default(),
2915 });
2916 assert_eq!(
2917 events.next().await.unwrap(),
2918 Event::DiagnosticsUpdated {
2919 language_server_id: LanguageServerId(0),
2920 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2921 }
2922 );
2923
2924 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2925 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2926 version: None,
2927 diagnostics: Default::default(),
2928 });
2929 cx.executor().run_until_parked();
2930 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2931}
2932
2933#[gpui::test]
2934async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2935 init_test(cx);
2936
2937 let progress_token = "the-progress-token";
2938
2939 let fs = FakeFs::new(cx.executor());
2940 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2941
2942 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2943
2944 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2945 language_registry.add(rust_lang());
2946 let mut fake_servers = language_registry.register_fake_lsp(
2947 "Rust",
2948 FakeLspAdapter {
2949 name: "the-language-server",
2950 disk_based_diagnostics_sources: vec!["disk".into()],
2951 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2952 ..FakeLspAdapter::default()
2953 },
2954 );
2955
2956 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2957
2958 let (buffer, _handle) = project
2959 .update(cx, |project, cx| {
2960 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2961 })
2962 .await
2963 .unwrap();
2964 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2965 // Simulate diagnostics starting to update.
2966 let fake_server = fake_servers.next().await.unwrap();
2967 cx.executor().run_until_parked();
2968 fake_server.start_progress(progress_token).await;
2969
2970 // Restart the server before the diagnostics finish updating.
2971 project.update(cx, |project, cx| {
2972 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2973 });
2974 let mut events = cx.events(&project);
2975
2976 // Simulate the newly started server sending more diagnostics.
2977 let fake_server = fake_servers.next().await.unwrap();
2978 cx.executor().run_until_parked();
2979 assert_eq!(
2980 events.next().await.unwrap(),
2981 Event::LanguageServerRemoved(LanguageServerId(0))
2982 );
2983 assert_eq!(
2984 events.next().await.unwrap(),
2985 Event::LanguageServerAdded(
2986 LanguageServerId(1),
2987 fake_server.server.name(),
2988 Some(worktree_id)
2989 )
2990 );
2991 fake_server.start_progress(progress_token).await;
2992 assert_eq!(
2993 events.next().await.unwrap(),
2994 Event::LanguageServerBufferRegistered {
2995 server_id: LanguageServerId(1),
2996 buffer_id,
2997 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2998 name: Some(fake_server.server.name())
2999 }
3000 );
3001 assert_eq!(
3002 events.next().await.unwrap(),
3003 Event::DiskBasedDiagnosticsStarted {
3004 language_server_id: LanguageServerId(1)
3005 }
3006 );
3007 project.update(cx, |project, cx| {
3008 assert_eq!(
3009 project
3010 .language_servers_running_disk_based_diagnostics(cx)
3011 .collect::<Vec<_>>(),
3012 [LanguageServerId(1)]
3013 );
3014 });
3015
3016 // All diagnostics are considered done, despite the old server's diagnostic
3017 // task never completing.
3018 fake_server.end_progress(progress_token);
3019 assert_eq!(
3020 events.next().await.unwrap(),
3021 Event::DiskBasedDiagnosticsFinished {
3022 language_server_id: LanguageServerId(1)
3023 }
3024 );
3025 project.update(cx, |project, cx| {
3026 assert_eq!(
3027 project
3028 .language_servers_running_disk_based_diagnostics(cx)
3029 .collect::<Vec<_>>(),
3030 [] as [language::LanguageServerId; 0]
3031 );
3032 });
3033}
3034
3035#[gpui::test]
3036async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
3037 init_test(cx);
3038
3039 let fs = FakeFs::new(cx.executor());
3040 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3041
3042 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3043
3044 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3045 language_registry.add(rust_lang());
3046 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3047
3048 let (buffer, _) = project
3049 .update(cx, |project, cx| {
3050 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3051 })
3052 .await
3053 .unwrap();
3054
3055 // Publish diagnostics
3056 let fake_server = fake_servers.next().await.unwrap();
3057 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3058 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3059 version: None,
3060 diagnostics: vec![lsp::Diagnostic {
3061 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3062 severity: Some(lsp::DiagnosticSeverity::ERROR),
3063 message: "the message".to_string(),
3064 ..Default::default()
3065 }],
3066 });
3067
3068 cx.executor().run_until_parked();
3069 buffer.update(cx, |buffer, _| {
3070 assert_eq!(
3071 buffer
3072 .snapshot()
3073 .diagnostics_in_range::<_, usize>(0..1, false)
3074 .map(|entry| entry.diagnostic.message.clone())
3075 .collect::<Vec<_>>(),
3076 ["the message".to_string()]
3077 );
3078 });
3079 project.update(cx, |project, cx| {
3080 assert_eq!(
3081 project.diagnostic_summary(false, cx),
3082 DiagnosticSummary {
3083 error_count: 1,
3084 warning_count: 0,
3085 }
3086 );
3087 });
3088
3089 project.update(cx, |project, cx| {
3090 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3091 });
3092
3093 // The diagnostics are cleared.
3094 cx.executor().run_until_parked();
3095 buffer.update(cx, |buffer, _| {
3096 assert_eq!(
3097 buffer
3098 .snapshot()
3099 .diagnostics_in_range::<_, usize>(0..1, false)
3100 .map(|entry| entry.diagnostic.message.clone())
3101 .collect::<Vec<_>>(),
3102 Vec::<String>::new(),
3103 );
3104 });
3105 project.update(cx, |project, cx| {
3106 assert_eq!(
3107 project.diagnostic_summary(false, cx),
3108 DiagnosticSummary {
3109 error_count: 0,
3110 warning_count: 0,
3111 }
3112 );
3113 });
3114}
3115
3116#[gpui::test]
3117async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3118 init_test(cx);
3119
3120 let fs = FakeFs::new(cx.executor());
3121 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3122
3123 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3125
3126 language_registry.add(rust_lang());
3127 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3128
3129 let (buffer, _handle) = project
3130 .update(cx, |project, cx| {
3131 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3132 })
3133 .await
3134 .unwrap();
3135
3136 // Before restarting the server, report diagnostics with an unknown buffer version.
3137 let fake_server = fake_servers.next().await.unwrap();
3138 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3139 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3140 version: Some(10000),
3141 diagnostics: Vec::new(),
3142 });
3143 cx.executor().run_until_parked();
3144 project.update(cx, |project, cx| {
3145 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3146 });
3147
3148 let mut fake_server = fake_servers.next().await.unwrap();
3149 let notification = fake_server
3150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3151 .await
3152 .text_document;
3153 assert_eq!(notification.version, 0);
3154}
3155
3156#[gpui::test]
3157async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3158 init_test(cx);
3159
3160 let progress_token = "the-progress-token";
3161
3162 let fs = FakeFs::new(cx.executor());
3163 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3164
3165 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3166
3167 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3168 language_registry.add(rust_lang());
3169 let mut fake_servers = language_registry.register_fake_lsp(
3170 "Rust",
3171 FakeLspAdapter {
3172 name: "the-language-server",
3173 disk_based_diagnostics_sources: vec!["disk".into()],
3174 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3175 ..Default::default()
3176 },
3177 );
3178
3179 let (buffer, _handle) = project
3180 .update(cx, |project, cx| {
3181 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3182 })
3183 .await
3184 .unwrap();
3185
3186 // Simulate diagnostics starting to update.
3187 let mut fake_server = fake_servers.next().await.unwrap();
3188 fake_server
3189 .start_progress_with(
3190 "another-token",
3191 lsp::WorkDoneProgressBegin {
3192 cancellable: Some(false),
3193 ..Default::default()
3194 },
3195 DEFAULT_LSP_REQUEST_TIMEOUT,
3196 )
3197 .await;
3198 // Ensure progress notification is fully processed before starting the next one
3199 cx.executor().run_until_parked();
3200
3201 fake_server
3202 .start_progress_with(
3203 progress_token,
3204 lsp::WorkDoneProgressBegin {
3205 cancellable: Some(true),
3206 ..Default::default()
3207 },
3208 DEFAULT_LSP_REQUEST_TIMEOUT,
3209 )
3210 .await;
3211 // Ensure progress notification is fully processed before cancelling
3212 cx.executor().run_until_parked();
3213
3214 project.update(cx, |project, cx| {
3215 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3216 });
3217 cx.executor().run_until_parked();
3218
3219 let cancel_notification = fake_server
3220 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3221 .await;
3222 assert_eq!(
3223 cancel_notification.token,
3224 NumberOrString::String(progress_token.into())
3225 );
3226}
3227
3228#[gpui::test]
3229async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3230 init_test(cx);
3231
3232 let fs = FakeFs::new(cx.executor());
3233 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3234 .await;
3235
3236 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3237 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3238
3239 let mut fake_rust_servers = language_registry.register_fake_lsp(
3240 "Rust",
3241 FakeLspAdapter {
3242 name: "rust-lsp",
3243 ..Default::default()
3244 },
3245 );
3246 let mut fake_js_servers = language_registry.register_fake_lsp(
3247 "JavaScript",
3248 FakeLspAdapter {
3249 name: "js-lsp",
3250 ..Default::default()
3251 },
3252 );
3253 language_registry.add(rust_lang());
3254 language_registry.add(js_lang());
3255
3256 let _rs_buffer = project
3257 .update(cx, |project, cx| {
3258 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3259 })
3260 .await
3261 .unwrap();
3262 let _js_buffer = project
3263 .update(cx, |project, cx| {
3264 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3265 })
3266 .await
3267 .unwrap();
3268
3269 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3270 assert_eq!(
3271 fake_rust_server_1
3272 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3273 .await
3274 .text_document
3275 .uri
3276 .as_str(),
3277 uri!("file:///dir/a.rs")
3278 );
3279
3280 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3281 assert_eq!(
3282 fake_js_server
3283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3284 .await
3285 .text_document
3286 .uri
3287 .as_str(),
3288 uri!("file:///dir/b.js")
3289 );
3290
3291 // Disable Rust language server, ensuring only that server gets stopped.
3292 cx.update(|cx| {
3293 SettingsStore::update_global(cx, |settings, cx| {
3294 settings.update_user_settings(cx, |settings| {
3295 settings.languages_mut().insert(
3296 "Rust".into(),
3297 LanguageSettingsContent {
3298 enable_language_server: Some(false),
3299 ..Default::default()
3300 },
3301 );
3302 });
3303 })
3304 });
3305 fake_rust_server_1
3306 .receive_notification::<lsp::notification::Exit>()
3307 .await;
3308
3309 // Enable Rust and disable JavaScript language servers, ensuring that the
3310 // former gets started again and that the latter stops.
3311 cx.update(|cx| {
3312 SettingsStore::update_global(cx, |settings, cx| {
3313 settings.update_user_settings(cx, |settings| {
3314 settings.languages_mut().insert(
3315 "Rust".into(),
3316 LanguageSettingsContent {
3317 enable_language_server: Some(true),
3318 ..Default::default()
3319 },
3320 );
3321 settings.languages_mut().insert(
3322 "JavaScript".into(),
3323 LanguageSettingsContent {
3324 enable_language_server: Some(false),
3325 ..Default::default()
3326 },
3327 );
3328 });
3329 })
3330 });
3331 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3332 assert_eq!(
3333 fake_rust_server_2
3334 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3335 .await
3336 .text_document
3337 .uri
3338 .as_str(),
3339 uri!("file:///dir/a.rs")
3340 );
3341 fake_js_server
3342 .receive_notification::<lsp::notification::Exit>()
3343 .await;
3344}
3345
3346#[gpui::test(iterations = 3)]
3347async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3348 init_test(cx);
3349
3350 let text = "
3351 fn a() { A }
3352 fn b() { BB }
3353 fn c() { CCC }
3354 "
3355 .unindent();
3356
3357 let fs = FakeFs::new(cx.executor());
3358 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3359
3360 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3361 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3362
3363 language_registry.add(rust_lang());
3364 let mut fake_servers = language_registry.register_fake_lsp(
3365 "Rust",
3366 FakeLspAdapter {
3367 disk_based_diagnostics_sources: vec!["disk".into()],
3368 ..Default::default()
3369 },
3370 );
3371
3372 let buffer = project
3373 .update(cx, |project, cx| {
3374 project.open_local_buffer(path!("/dir/a.rs"), cx)
3375 })
3376 .await
3377 .unwrap();
3378
3379 let _handle = project.update(cx, |project, cx| {
3380 project.register_buffer_with_language_servers(&buffer, cx)
3381 });
3382
3383 let mut fake_server = fake_servers.next().await.unwrap();
3384 let open_notification = fake_server
3385 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3386 .await;
3387
3388 // Edit the buffer, moving the content down
3389 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3390 let change_notification_1 = fake_server
3391 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3392 .await;
3393 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3394
3395 // Report some diagnostics for the initial version of the buffer
3396 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3397 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3398 version: Some(open_notification.text_document.version),
3399 diagnostics: vec![
3400 lsp::Diagnostic {
3401 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3402 severity: Some(DiagnosticSeverity::ERROR),
3403 message: "undefined variable 'A'".to_string(),
3404 source: Some("disk".to_string()),
3405 ..Default::default()
3406 },
3407 lsp::Diagnostic {
3408 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3409 severity: Some(DiagnosticSeverity::ERROR),
3410 message: "undefined variable 'BB'".to_string(),
3411 source: Some("disk".to_string()),
3412 ..Default::default()
3413 },
3414 lsp::Diagnostic {
3415 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3416 severity: Some(DiagnosticSeverity::ERROR),
3417 source: Some("disk".to_string()),
3418 message: "undefined variable 'CCC'".to_string(),
3419 ..Default::default()
3420 },
3421 ],
3422 });
3423
3424 // The diagnostics have moved down since they were created.
3425 cx.executor().run_until_parked();
3426 buffer.update(cx, |buffer, _| {
3427 assert_eq!(
3428 buffer
3429 .snapshot()
3430 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3431 .collect::<Vec<_>>(),
3432 &[
3433 DiagnosticEntry {
3434 range: Point::new(3, 9)..Point::new(3, 11),
3435 diagnostic: Diagnostic {
3436 source: Some("disk".into()),
3437 severity: DiagnosticSeverity::ERROR,
3438 message: "undefined variable 'BB'".to_string(),
3439 is_disk_based: true,
3440 group_id: 1,
3441 is_primary: true,
3442 source_kind: DiagnosticSourceKind::Pushed,
3443 ..Diagnostic::default()
3444 },
3445 },
3446 DiagnosticEntry {
3447 range: Point::new(4, 9)..Point::new(4, 12),
3448 diagnostic: Diagnostic {
3449 source: Some("disk".into()),
3450 severity: DiagnosticSeverity::ERROR,
3451 message: "undefined variable 'CCC'".to_string(),
3452 is_disk_based: true,
3453 group_id: 2,
3454 is_primary: true,
3455 source_kind: DiagnosticSourceKind::Pushed,
3456 ..Diagnostic::default()
3457 }
3458 }
3459 ]
3460 );
3461 assert_eq!(
3462 chunks_with_diagnostics(buffer, 0..buffer.len()),
3463 [
3464 ("\n\nfn a() { ".to_string(), None),
3465 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3466 (" }\nfn b() { ".to_string(), None),
3467 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3468 (" }\nfn c() { ".to_string(), None),
3469 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3470 (" }\n".to_string(), None),
3471 ]
3472 );
3473 assert_eq!(
3474 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3475 [
3476 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3477 (" }\nfn c() { ".to_string(), None),
3478 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3479 ]
3480 );
3481 });
3482
3483 // Ensure overlapping diagnostics are highlighted correctly.
3484 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3485 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3486 version: Some(open_notification.text_document.version),
3487 diagnostics: vec![
3488 lsp::Diagnostic {
3489 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3490 severity: Some(DiagnosticSeverity::ERROR),
3491 message: "undefined variable 'A'".to_string(),
3492 source: Some("disk".to_string()),
3493 ..Default::default()
3494 },
3495 lsp::Diagnostic {
3496 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3497 severity: Some(DiagnosticSeverity::WARNING),
3498 message: "unreachable statement".to_string(),
3499 source: Some("disk".to_string()),
3500 ..Default::default()
3501 },
3502 ],
3503 });
3504
3505 cx.executor().run_until_parked();
3506 buffer.update(cx, |buffer, _| {
3507 assert_eq!(
3508 buffer
3509 .snapshot()
3510 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3511 .collect::<Vec<_>>(),
3512 &[
3513 DiagnosticEntry {
3514 range: Point::new(2, 9)..Point::new(2, 12),
3515 diagnostic: Diagnostic {
3516 source: Some("disk".into()),
3517 severity: DiagnosticSeverity::WARNING,
3518 message: "unreachable statement".to_string(),
3519 is_disk_based: true,
3520 group_id: 4,
3521 is_primary: true,
3522 source_kind: DiagnosticSourceKind::Pushed,
3523 ..Diagnostic::default()
3524 }
3525 },
3526 DiagnosticEntry {
3527 range: Point::new(2, 9)..Point::new(2, 10),
3528 diagnostic: Diagnostic {
3529 source: Some("disk".into()),
3530 severity: DiagnosticSeverity::ERROR,
3531 message: "undefined variable 'A'".to_string(),
3532 is_disk_based: true,
3533 group_id: 3,
3534 is_primary: true,
3535 source_kind: DiagnosticSourceKind::Pushed,
3536 ..Diagnostic::default()
3537 },
3538 }
3539 ]
3540 );
3541 assert_eq!(
3542 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3543 [
3544 ("fn a() { ".to_string(), None),
3545 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3546 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3547 ("\n".to_string(), None),
3548 ]
3549 );
3550 assert_eq!(
3551 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3552 [
3553 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3554 ("\n".to_string(), None),
3555 ]
3556 );
3557 });
3558
3559 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3560 // changes since the last save.
3561 buffer.update(cx, |buffer, cx| {
3562 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3563 buffer.edit(
3564 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3565 None,
3566 cx,
3567 );
3568 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3569 });
3570 let change_notification_2 = fake_server
3571 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3572 .await;
3573 assert!(
3574 change_notification_2.text_document.version > change_notification_1.text_document.version
3575 );
3576
3577 // Handle out-of-order diagnostics
3578 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3579 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3580 version: Some(change_notification_2.text_document.version),
3581 diagnostics: vec![
3582 lsp::Diagnostic {
3583 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3584 severity: Some(DiagnosticSeverity::ERROR),
3585 message: "undefined variable 'BB'".to_string(),
3586 source: Some("disk".to_string()),
3587 ..Default::default()
3588 },
3589 lsp::Diagnostic {
3590 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3591 severity: Some(DiagnosticSeverity::WARNING),
3592 message: "undefined variable 'A'".to_string(),
3593 source: Some("disk".to_string()),
3594 ..Default::default()
3595 },
3596 ],
3597 });
3598
3599 cx.executor().run_until_parked();
3600 buffer.update(cx, |buffer, _| {
3601 assert_eq!(
3602 buffer
3603 .snapshot()
3604 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3605 .collect::<Vec<_>>(),
3606 &[
3607 DiagnosticEntry {
3608 range: Point::new(2, 21)..Point::new(2, 22),
3609 diagnostic: Diagnostic {
3610 source: Some("disk".into()),
3611 severity: DiagnosticSeverity::WARNING,
3612 message: "undefined variable 'A'".to_string(),
3613 is_disk_based: true,
3614 group_id: 6,
3615 is_primary: true,
3616 source_kind: DiagnosticSourceKind::Pushed,
3617 ..Diagnostic::default()
3618 }
3619 },
3620 DiagnosticEntry {
3621 range: Point::new(3, 9)..Point::new(3, 14),
3622 diagnostic: Diagnostic {
3623 source: Some("disk".into()),
3624 severity: DiagnosticSeverity::ERROR,
3625 message: "undefined variable 'BB'".to_string(),
3626 is_disk_based: true,
3627 group_id: 5,
3628 is_primary: true,
3629 source_kind: DiagnosticSourceKind::Pushed,
3630 ..Diagnostic::default()
3631 },
3632 }
3633 ]
3634 );
3635 });
3636}
3637
3638#[gpui::test]
3639async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3640 init_test(cx);
3641
3642 let text = concat!(
3643 "let one = ;\n", //
3644 "let two = \n",
3645 "let three = 3;\n",
3646 );
3647
3648 let fs = FakeFs::new(cx.executor());
3649 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3650
3651 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3652 let buffer = project
3653 .update(cx, |project, cx| {
3654 project.open_local_buffer(path!("/dir/a.rs"), cx)
3655 })
3656 .await
3657 .unwrap();
3658
3659 project.update(cx, |project, cx| {
3660 project.lsp_store().update(cx, |lsp_store, cx| {
3661 lsp_store
3662 .update_diagnostic_entries(
3663 LanguageServerId(0),
3664 PathBuf::from(path!("/dir/a.rs")),
3665 None,
3666 None,
3667 vec![
3668 DiagnosticEntry {
3669 range: Unclipped(PointUtf16::new(0, 10))
3670 ..Unclipped(PointUtf16::new(0, 10)),
3671 diagnostic: Diagnostic {
3672 severity: DiagnosticSeverity::ERROR,
3673 message: "syntax error 1".to_string(),
3674 source_kind: DiagnosticSourceKind::Pushed,
3675 ..Diagnostic::default()
3676 },
3677 },
3678 DiagnosticEntry {
3679 range: Unclipped(PointUtf16::new(1, 10))
3680 ..Unclipped(PointUtf16::new(1, 10)),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::ERROR,
3683 message: "syntax error 2".to_string(),
3684 source_kind: DiagnosticSourceKind::Pushed,
3685 ..Diagnostic::default()
3686 },
3687 },
3688 ],
3689 cx,
3690 )
3691 .unwrap();
3692 })
3693 });
3694
3695 // An empty range is extended forward to include the following character.
3696 // At the end of a line, an empty range is extended backward to include
3697 // the preceding character.
3698 buffer.update(cx, |buffer, _| {
3699 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3700 assert_eq!(
3701 chunks
3702 .iter()
3703 .map(|(s, d)| (s.as_str(), *d))
3704 .collect::<Vec<_>>(),
3705 &[
3706 ("let one = ", None),
3707 (";", Some(DiagnosticSeverity::ERROR)),
3708 ("\nlet two =", None),
3709 (" ", Some(DiagnosticSeverity::ERROR)),
3710 ("\nlet three = 3;\n", None)
3711 ]
3712 );
3713 });
3714}
3715
3716#[gpui::test]
3717async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3718 init_test(cx);
3719
3720 let fs = FakeFs::new(cx.executor());
3721 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3722 .await;
3723
3724 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3725 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3726
3727 lsp_store.update(cx, |lsp_store, cx| {
3728 lsp_store
3729 .update_diagnostic_entries(
3730 LanguageServerId(0),
3731 Path::new(path!("/dir/a.rs")).to_owned(),
3732 None,
3733 None,
3734 vec![DiagnosticEntry {
3735 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3736 diagnostic: Diagnostic {
3737 severity: DiagnosticSeverity::ERROR,
3738 is_primary: true,
3739 message: "syntax error a1".to_string(),
3740 source_kind: DiagnosticSourceKind::Pushed,
3741 ..Diagnostic::default()
3742 },
3743 }],
3744 cx,
3745 )
3746 .unwrap();
3747 lsp_store
3748 .update_diagnostic_entries(
3749 LanguageServerId(1),
3750 Path::new(path!("/dir/a.rs")).to_owned(),
3751 None,
3752 None,
3753 vec![DiagnosticEntry {
3754 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3755 diagnostic: Diagnostic {
3756 severity: DiagnosticSeverity::ERROR,
3757 is_primary: true,
3758 message: "syntax error b1".to_string(),
3759 source_kind: DiagnosticSourceKind::Pushed,
3760 ..Diagnostic::default()
3761 },
3762 }],
3763 cx,
3764 )
3765 .unwrap();
3766
3767 assert_eq!(
3768 lsp_store.diagnostic_summary(false, cx),
3769 DiagnosticSummary {
3770 error_count: 2,
3771 warning_count: 0,
3772 }
3773 );
3774 });
3775}
3776
3777#[gpui::test]
3778async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3779 init_test(cx);
3780
3781 let text = "
3782 fn a() {
3783 f1();
3784 }
3785 fn b() {
3786 f2();
3787 }
3788 fn c() {
3789 f3();
3790 }
3791 "
3792 .unindent();
3793
3794 let fs = FakeFs::new(cx.executor());
3795 fs.insert_tree(
3796 path!("/dir"),
3797 json!({
3798 "a.rs": text.clone(),
3799 }),
3800 )
3801 .await;
3802
3803 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3804 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3805
3806 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3807 language_registry.add(rust_lang());
3808 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3809
3810 let (buffer, _handle) = project
3811 .update(cx, |project, cx| {
3812 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3813 })
3814 .await
3815 .unwrap();
3816
3817 let mut fake_server = fake_servers.next().await.unwrap();
3818 let lsp_document_version = fake_server
3819 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3820 .await
3821 .text_document
3822 .version;
3823
3824 // Simulate editing the buffer after the language server computes some edits.
3825 buffer.update(cx, |buffer, cx| {
3826 buffer.edit(
3827 [(
3828 Point::new(0, 0)..Point::new(0, 0),
3829 "// above first function\n",
3830 )],
3831 None,
3832 cx,
3833 );
3834 buffer.edit(
3835 [(
3836 Point::new(2, 0)..Point::new(2, 0),
3837 " // inside first function\n",
3838 )],
3839 None,
3840 cx,
3841 );
3842 buffer.edit(
3843 [(
3844 Point::new(6, 4)..Point::new(6, 4),
3845 "// inside second function ",
3846 )],
3847 None,
3848 cx,
3849 );
3850
3851 assert_eq!(
3852 buffer.text(),
3853 "
3854 // above first function
3855 fn a() {
3856 // inside first function
3857 f1();
3858 }
3859 fn b() {
3860 // inside second function f2();
3861 }
3862 fn c() {
3863 f3();
3864 }
3865 "
3866 .unindent()
3867 );
3868 });
3869
3870 let edits = lsp_store
3871 .update(cx, |lsp_store, cx| {
3872 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3873 &buffer,
3874 vec![
3875 // replace body of first function
3876 lsp::TextEdit {
3877 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3878 new_text: "
3879 fn a() {
3880 f10();
3881 }
3882 "
3883 .unindent(),
3884 },
3885 // edit inside second function
3886 lsp::TextEdit {
3887 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3888 new_text: "00".into(),
3889 },
3890 // edit inside third function via two distinct edits
3891 lsp::TextEdit {
3892 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3893 new_text: "4000".into(),
3894 },
3895 lsp::TextEdit {
3896 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3897 new_text: "".into(),
3898 },
3899 ],
3900 LanguageServerId(0),
3901 Some(lsp_document_version),
3902 cx,
3903 )
3904 })
3905 .await
3906 .unwrap();
3907
3908 buffer.update(cx, |buffer, cx| {
3909 for (range, new_text) in edits {
3910 buffer.edit([(range, new_text)], None, cx);
3911 }
3912 assert_eq!(
3913 buffer.text(),
3914 "
3915 // above first function
3916 fn a() {
3917 // inside first function
3918 f10();
3919 }
3920 fn b() {
3921 // inside second function f200();
3922 }
3923 fn c() {
3924 f4000();
3925 }
3926 "
3927 .unindent()
3928 );
3929 });
3930}
3931
3932#[gpui::test]
3933async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3934 init_test(cx);
3935
3936 let text = "
3937 use a::b;
3938 use a::c;
3939
3940 fn f() {
3941 b();
3942 c();
3943 }
3944 "
3945 .unindent();
3946
3947 let fs = FakeFs::new(cx.executor());
3948 fs.insert_tree(
3949 path!("/dir"),
3950 json!({
3951 "a.rs": text.clone(),
3952 }),
3953 )
3954 .await;
3955
3956 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3957 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3958 let buffer = project
3959 .update(cx, |project, cx| {
3960 project.open_local_buffer(path!("/dir/a.rs"), cx)
3961 })
3962 .await
3963 .unwrap();
3964
3965 // Simulate the language server sending us a small edit in the form of a very large diff.
3966 // Rust-analyzer does this when performing a merge-imports code action.
3967 let edits = lsp_store
3968 .update(cx, |lsp_store, cx| {
3969 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3970 &buffer,
3971 [
3972 // Replace the first use statement without editing the semicolon.
3973 lsp::TextEdit {
3974 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3975 new_text: "a::{b, c}".into(),
3976 },
3977 // Reinsert the remainder of the file between the semicolon and the final
3978 // newline of the file.
3979 lsp::TextEdit {
3980 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3981 new_text: "\n\n".into(),
3982 },
3983 lsp::TextEdit {
3984 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3985 new_text: "
3986 fn f() {
3987 b();
3988 c();
3989 }"
3990 .unindent(),
3991 },
3992 // Delete everything after the first newline of the file.
3993 lsp::TextEdit {
3994 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3995 new_text: "".into(),
3996 },
3997 ],
3998 LanguageServerId(0),
3999 None,
4000 cx,
4001 )
4002 })
4003 .await
4004 .unwrap();
4005
4006 buffer.update(cx, |buffer, cx| {
4007 let edits = edits
4008 .into_iter()
4009 .map(|(range, text)| {
4010 (
4011 range.start.to_point(buffer)..range.end.to_point(buffer),
4012 text,
4013 )
4014 })
4015 .collect::<Vec<_>>();
4016
4017 assert_eq!(
4018 edits,
4019 [
4020 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4021 (Point::new(1, 0)..Point::new(2, 0), "".into())
4022 ]
4023 );
4024
4025 for (range, new_text) in edits {
4026 buffer.edit([(range, new_text)], None, cx);
4027 }
4028 assert_eq!(
4029 buffer.text(),
4030 "
4031 use a::{b, c};
4032
4033 fn f() {
4034 b();
4035 c();
4036 }
4037 "
4038 .unindent()
4039 );
4040 });
4041}
4042
4043#[gpui::test]
4044async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4045 cx: &mut gpui::TestAppContext,
4046) {
4047 init_test(cx);
4048
4049 let text = "Path()";
4050
4051 let fs = FakeFs::new(cx.executor());
4052 fs.insert_tree(
4053 path!("/dir"),
4054 json!({
4055 "a.rs": text
4056 }),
4057 )
4058 .await;
4059
4060 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4061 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4062 let buffer = project
4063 .update(cx, |project, cx| {
4064 project.open_local_buffer(path!("/dir/a.rs"), cx)
4065 })
4066 .await
4067 .unwrap();
4068
4069 // Simulate the language server sending us a pair of edits at the same location,
4070 // with an insertion following a replacement (which violates the LSP spec).
4071 let edits = lsp_store
4072 .update(cx, |lsp_store, cx| {
4073 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4074 &buffer,
4075 [
4076 lsp::TextEdit {
4077 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4078 new_text: "Path".into(),
4079 },
4080 lsp::TextEdit {
4081 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4082 new_text: "from path import Path\n\n\n".into(),
4083 },
4084 ],
4085 LanguageServerId(0),
4086 None,
4087 cx,
4088 )
4089 })
4090 .await
4091 .unwrap();
4092
4093 buffer.update(cx, |buffer, cx| {
4094 buffer.edit(edits, None, cx);
4095 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4096 });
4097}
4098
4099#[gpui::test]
4100async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4101 init_test(cx);
4102
4103 let text = "
4104 use a::b;
4105 use a::c;
4106
4107 fn f() {
4108 b();
4109 c();
4110 }
4111 "
4112 .unindent();
4113
4114 let fs = FakeFs::new(cx.executor());
4115 fs.insert_tree(
4116 path!("/dir"),
4117 json!({
4118 "a.rs": text.clone(),
4119 }),
4120 )
4121 .await;
4122
4123 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4124 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4125 let buffer = project
4126 .update(cx, |project, cx| {
4127 project.open_local_buffer(path!("/dir/a.rs"), cx)
4128 })
4129 .await
4130 .unwrap();
4131
4132 // Simulate the language server sending us edits in a non-ordered fashion,
4133 // with ranges sometimes being inverted or pointing to invalid locations.
4134 let edits = lsp_store
4135 .update(cx, |lsp_store, cx| {
4136 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4137 &buffer,
4138 [
4139 lsp::TextEdit {
4140 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4141 new_text: "\n\n".into(),
4142 },
4143 lsp::TextEdit {
4144 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4145 new_text: "a::{b, c}".into(),
4146 },
4147 lsp::TextEdit {
4148 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4149 new_text: "".into(),
4150 },
4151 lsp::TextEdit {
4152 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4153 new_text: "
4154 fn f() {
4155 b();
4156 c();
4157 }"
4158 .unindent(),
4159 },
4160 ],
4161 LanguageServerId(0),
4162 None,
4163 cx,
4164 )
4165 })
4166 .await
4167 .unwrap();
4168
4169 buffer.update(cx, |buffer, cx| {
4170 let edits = edits
4171 .into_iter()
4172 .map(|(range, text)| {
4173 (
4174 range.start.to_point(buffer)..range.end.to_point(buffer),
4175 text,
4176 )
4177 })
4178 .collect::<Vec<_>>();
4179
4180 assert_eq!(
4181 edits,
4182 [
4183 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4184 (Point::new(1, 0)..Point::new(2, 0), "".into())
4185 ]
4186 );
4187
4188 for (range, new_text) in edits {
4189 buffer.edit([(range, new_text)], None, cx);
4190 }
4191 assert_eq!(
4192 buffer.text(),
4193 "
4194 use a::{b, c};
4195
4196 fn f() {
4197 b();
4198 c();
4199 }
4200 "
4201 .unindent()
4202 );
4203 });
4204}
4205
4206fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4207 buffer: &Buffer,
4208 range: Range<T>,
4209) -> Vec<(String, Option<DiagnosticSeverity>)> {
4210 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4211 for chunk in buffer.snapshot().chunks(range, true) {
4212 if chunks
4213 .last()
4214 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4215 {
4216 chunks.last_mut().unwrap().0.push_str(chunk.text);
4217 } else {
4218 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4219 }
4220 }
4221 chunks
4222}
4223
4224#[gpui::test(iterations = 10)]
4225async fn test_definition(cx: &mut gpui::TestAppContext) {
4226 init_test(cx);
4227
4228 let fs = FakeFs::new(cx.executor());
4229 fs.insert_tree(
4230 path!("/dir"),
4231 json!({
4232 "a.rs": "const fn a() { A }",
4233 "b.rs": "const y: i32 = crate::a()",
4234 }),
4235 )
4236 .await;
4237
4238 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4239
4240 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4241 language_registry.add(rust_lang());
4242 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4243
4244 let (buffer, _handle) = project
4245 .update(cx, |project, cx| {
4246 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4247 })
4248 .await
4249 .unwrap();
4250
4251 let fake_server = fake_servers.next().await.unwrap();
4252 cx.executor().run_until_parked();
4253
4254 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4255 let params = params.text_document_position_params;
4256 assert_eq!(
4257 params.text_document.uri.to_file_path().unwrap(),
4258 Path::new(path!("/dir/b.rs")),
4259 );
4260 assert_eq!(params.position, lsp::Position::new(0, 22));
4261
4262 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4263 lsp::Location::new(
4264 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4265 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4266 ),
4267 )))
4268 });
4269 let mut definitions = project
4270 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4271 .await
4272 .unwrap()
4273 .unwrap();
4274
4275 // Assert no new language server started
4276 cx.executor().run_until_parked();
4277 assert!(fake_servers.try_next().is_err());
4278
4279 assert_eq!(definitions.len(), 1);
4280 let definition = definitions.pop().unwrap();
4281 cx.update(|cx| {
4282 let target_buffer = definition.target.buffer.read(cx);
4283 assert_eq!(
4284 target_buffer
4285 .file()
4286 .unwrap()
4287 .as_local()
4288 .unwrap()
4289 .abs_path(cx),
4290 Path::new(path!("/dir/a.rs")),
4291 );
4292 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4293 assert_eq!(
4294 list_worktrees(&project, cx),
4295 [
4296 (path!("/dir/a.rs").as_ref(), false),
4297 (path!("/dir/b.rs").as_ref(), true)
4298 ],
4299 );
4300
4301 drop(definition);
4302 });
4303 cx.update(|cx| {
4304 assert_eq!(
4305 list_worktrees(&project, cx),
4306 [(path!("/dir/b.rs").as_ref(), true)]
4307 );
4308 });
4309
4310 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4311 project
4312 .read(cx)
4313 .worktrees(cx)
4314 .map(|worktree| {
4315 let worktree = worktree.read(cx);
4316 (
4317 worktree.as_local().unwrap().abs_path().as_ref(),
4318 worktree.is_visible(),
4319 )
4320 })
4321 .collect::<Vec<_>>()
4322 }
4323}
4324
4325#[gpui::test]
4326async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4327 init_test(cx);
4328
4329 let fs = FakeFs::new(cx.executor());
4330 fs.insert_tree(
4331 path!("/dir"),
4332 json!({
4333 "a.ts": "",
4334 }),
4335 )
4336 .await;
4337
4338 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4339
4340 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4341 language_registry.add(typescript_lang());
4342 let mut fake_language_servers = language_registry.register_fake_lsp(
4343 "TypeScript",
4344 FakeLspAdapter {
4345 capabilities: lsp::ServerCapabilities {
4346 completion_provider: Some(lsp::CompletionOptions {
4347 trigger_characters: Some(vec![".".to_string()]),
4348 ..Default::default()
4349 }),
4350 ..Default::default()
4351 },
4352 ..Default::default()
4353 },
4354 );
4355
4356 let (buffer, _handle) = project
4357 .update(cx, |p, cx| {
4358 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4359 })
4360 .await
4361 .unwrap();
4362
4363 let fake_server = fake_language_servers.next().await.unwrap();
4364 cx.executor().run_until_parked();
4365
4366 // When text_edit exists, it takes precedence over insert_text and label
4367 let text = "let a = obj.fqn";
4368 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4369 let completions = project.update(cx, |project, cx| {
4370 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4371 });
4372
4373 fake_server
4374 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4375 Ok(Some(lsp::CompletionResponse::Array(vec![
4376 lsp::CompletionItem {
4377 label: "labelText".into(),
4378 insert_text: Some("insertText".into()),
4379 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4380 range: lsp::Range::new(
4381 lsp::Position::new(0, text.len() as u32 - 3),
4382 lsp::Position::new(0, text.len() as u32),
4383 ),
4384 new_text: "textEditText".into(),
4385 })),
4386 ..Default::default()
4387 },
4388 ])))
4389 })
4390 .next()
4391 .await;
4392
4393 let completions = completions
4394 .await
4395 .unwrap()
4396 .into_iter()
4397 .flat_map(|response| response.completions)
4398 .collect::<Vec<_>>();
4399 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4400
4401 assert_eq!(completions.len(), 1);
4402 assert_eq!(completions[0].new_text, "textEditText");
4403 assert_eq!(
4404 completions[0].replace_range.to_offset(&snapshot),
4405 text.len() - 3..text.len()
4406 );
4407}
4408
4409#[gpui::test]
4410async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4411 init_test(cx);
4412
4413 let fs = FakeFs::new(cx.executor());
4414 fs.insert_tree(
4415 path!("/dir"),
4416 json!({
4417 "a.ts": "",
4418 }),
4419 )
4420 .await;
4421
4422 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4423
4424 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4425 language_registry.add(typescript_lang());
4426 let mut fake_language_servers = language_registry.register_fake_lsp(
4427 "TypeScript",
4428 FakeLspAdapter {
4429 capabilities: lsp::ServerCapabilities {
4430 completion_provider: Some(lsp::CompletionOptions {
4431 trigger_characters: Some(vec![".".to_string()]),
4432 ..Default::default()
4433 }),
4434 ..Default::default()
4435 },
4436 ..Default::default()
4437 },
4438 );
4439
4440 let (buffer, _handle) = project
4441 .update(cx, |p, cx| {
4442 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4443 })
4444 .await
4445 .unwrap();
4446
4447 let fake_server = fake_language_servers.next().await.unwrap();
4448 cx.executor().run_until_parked();
4449 let text = "let a = obj.fqn";
4450
4451 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4452 {
4453 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4454 let completions = project.update(cx, |project, cx| {
4455 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4456 });
4457
4458 fake_server
4459 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4460 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4461 is_incomplete: false,
4462 item_defaults: Some(lsp::CompletionListItemDefaults {
4463 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4464 lsp::Range::new(
4465 lsp::Position::new(0, text.len() as u32 - 3),
4466 lsp::Position::new(0, text.len() as u32),
4467 ),
4468 )),
4469 ..Default::default()
4470 }),
4471 items: vec![lsp::CompletionItem {
4472 label: "labelText".into(),
4473 text_edit_text: Some("textEditText".into()),
4474 text_edit: None,
4475 ..Default::default()
4476 }],
4477 })))
4478 })
4479 .next()
4480 .await;
4481
4482 let completions = completions
4483 .await
4484 .unwrap()
4485 .into_iter()
4486 .flat_map(|response| response.completions)
4487 .collect::<Vec<_>>();
4488 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4489
4490 assert_eq!(completions.len(), 1);
4491 assert_eq!(completions[0].new_text, "textEditText");
4492 assert_eq!(
4493 completions[0].replace_range.to_offset(&snapshot),
4494 text.len() - 3..text.len()
4495 );
4496 }
4497
4498 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4499 {
4500 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4501 let completions = project.update(cx, |project, cx| {
4502 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4503 });
4504
4505 fake_server
4506 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4507 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4508 is_incomplete: false,
4509 item_defaults: Some(lsp::CompletionListItemDefaults {
4510 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4511 lsp::Range::new(
4512 lsp::Position::new(0, text.len() as u32 - 3),
4513 lsp::Position::new(0, text.len() as u32),
4514 ),
4515 )),
4516 ..Default::default()
4517 }),
4518 items: vec![lsp::CompletionItem {
4519 label: "labelText".into(),
4520 text_edit_text: None,
4521 insert_text: Some("irrelevant".into()),
4522 text_edit: None,
4523 ..Default::default()
4524 }],
4525 })))
4526 })
4527 .next()
4528 .await;
4529
4530 let completions = completions
4531 .await
4532 .unwrap()
4533 .into_iter()
4534 .flat_map(|response| response.completions)
4535 .collect::<Vec<_>>();
4536 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4537
4538 assert_eq!(completions.len(), 1);
4539 assert_eq!(completions[0].new_text, "labelText");
4540 assert_eq!(
4541 completions[0].replace_range.to_offset(&snapshot),
4542 text.len() - 3..text.len()
4543 );
4544 }
4545}
4546
4547#[gpui::test]
4548async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4549 init_test(cx);
4550
4551 let fs = FakeFs::new(cx.executor());
4552 fs.insert_tree(
4553 path!("/dir"),
4554 json!({
4555 "a.ts": "",
4556 }),
4557 )
4558 .await;
4559
4560 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4561
4562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4563 language_registry.add(typescript_lang());
4564 let mut fake_language_servers = language_registry.register_fake_lsp(
4565 "TypeScript",
4566 FakeLspAdapter {
4567 capabilities: lsp::ServerCapabilities {
4568 completion_provider: Some(lsp::CompletionOptions {
4569 trigger_characters: Some(vec![":".to_string()]),
4570 ..Default::default()
4571 }),
4572 ..Default::default()
4573 },
4574 ..Default::default()
4575 },
4576 );
4577
4578 let (buffer, _handle) = project
4579 .update(cx, |p, cx| {
4580 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4581 })
4582 .await
4583 .unwrap();
4584
4585 let fake_server = fake_language_servers.next().await.unwrap();
4586 cx.executor().run_until_parked();
4587
4588 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4589 let text = "let a = b.fqn";
4590 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4591 let completions = project.update(cx, |project, cx| {
4592 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4593 });
4594
4595 fake_server
4596 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4597 Ok(Some(lsp::CompletionResponse::Array(vec![
4598 lsp::CompletionItem {
4599 label: "fullyQualifiedName?".into(),
4600 insert_text: Some("fullyQualifiedName".into()),
4601 ..Default::default()
4602 },
4603 ])))
4604 })
4605 .next()
4606 .await;
4607 let completions = completions
4608 .await
4609 .unwrap()
4610 .into_iter()
4611 .flat_map(|response| response.completions)
4612 .collect::<Vec<_>>();
4613 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4614 assert_eq!(completions.len(), 1);
4615 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4616 assert_eq!(
4617 completions[0].replace_range.to_offset(&snapshot),
4618 text.len() - 3..text.len()
4619 );
4620
4621 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4622 let text = "let a = \"atoms/cmp\"";
4623 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4624 let completions = project.update(cx, |project, cx| {
4625 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4626 });
4627
4628 fake_server
4629 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4630 Ok(Some(lsp::CompletionResponse::Array(vec![
4631 lsp::CompletionItem {
4632 label: "component".into(),
4633 ..Default::default()
4634 },
4635 ])))
4636 })
4637 .next()
4638 .await;
4639 let completions = completions
4640 .await
4641 .unwrap()
4642 .into_iter()
4643 .flat_map(|response| response.completions)
4644 .collect::<Vec<_>>();
4645 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4646 assert_eq!(completions.len(), 1);
4647 assert_eq!(completions[0].new_text, "component");
4648 assert_eq!(
4649 completions[0].replace_range.to_offset(&snapshot),
4650 text.len() - 4..text.len() - 1
4651 );
4652}
4653
4654#[gpui::test]
4655async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4656 init_test(cx);
4657
4658 let fs = FakeFs::new(cx.executor());
4659 fs.insert_tree(
4660 path!("/dir"),
4661 json!({
4662 "a.ts": "",
4663 }),
4664 )
4665 .await;
4666
4667 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4668
4669 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4670 language_registry.add(typescript_lang());
4671 let mut fake_language_servers = language_registry.register_fake_lsp(
4672 "TypeScript",
4673 FakeLspAdapter {
4674 capabilities: lsp::ServerCapabilities {
4675 completion_provider: Some(lsp::CompletionOptions {
4676 trigger_characters: Some(vec![":".to_string()]),
4677 ..Default::default()
4678 }),
4679 ..Default::default()
4680 },
4681 ..Default::default()
4682 },
4683 );
4684
4685 let (buffer, _handle) = project
4686 .update(cx, |p, cx| {
4687 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4688 })
4689 .await
4690 .unwrap();
4691
4692 let fake_server = fake_language_servers.next().await.unwrap();
4693 cx.executor().run_until_parked();
4694
4695 let text = "let a = b.fqn";
4696 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4697 let completions = project.update(cx, |project, cx| {
4698 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4699 });
4700
4701 fake_server
4702 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4703 Ok(Some(lsp::CompletionResponse::Array(vec![
4704 lsp::CompletionItem {
4705 label: "fullyQualifiedName?".into(),
4706 insert_text: Some("fully\rQualified\r\nName".into()),
4707 ..Default::default()
4708 },
4709 ])))
4710 })
4711 .next()
4712 .await;
4713 let completions = completions
4714 .await
4715 .unwrap()
4716 .into_iter()
4717 .flat_map(|response| response.completions)
4718 .collect::<Vec<_>>();
4719 assert_eq!(completions.len(), 1);
4720 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4721}
4722
4723#[gpui::test(iterations = 10)]
4724async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4725 init_test(cx);
4726
4727 let fs = FakeFs::new(cx.executor());
4728 fs.insert_tree(
4729 path!("/dir"),
4730 json!({
4731 "a.ts": "a",
4732 }),
4733 )
4734 .await;
4735
4736 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4737
4738 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4739 language_registry.add(typescript_lang());
4740 let mut fake_language_servers = language_registry.register_fake_lsp(
4741 "TypeScript",
4742 FakeLspAdapter {
4743 capabilities: lsp::ServerCapabilities {
4744 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4745 lsp::CodeActionOptions {
4746 resolve_provider: Some(true),
4747 ..lsp::CodeActionOptions::default()
4748 },
4749 )),
4750 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4751 commands: vec!["_the/command".to_string()],
4752 ..lsp::ExecuteCommandOptions::default()
4753 }),
4754 ..lsp::ServerCapabilities::default()
4755 },
4756 ..FakeLspAdapter::default()
4757 },
4758 );
4759
4760 let (buffer, _handle) = project
4761 .update(cx, |p, cx| {
4762 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4763 })
4764 .await
4765 .unwrap();
4766
4767 let fake_server = fake_language_servers.next().await.unwrap();
4768 cx.executor().run_until_parked();
4769
4770 // Language server returns code actions that contain commands, and not edits.
4771 let actions = project.update(cx, |project, cx| {
4772 project.code_actions(&buffer, 0..0, None, cx)
4773 });
4774 fake_server
4775 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4776 Ok(Some(vec![
4777 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4778 title: "The code action".into(),
4779 data: Some(serde_json::json!({
4780 "command": "_the/command",
4781 })),
4782 ..lsp::CodeAction::default()
4783 }),
4784 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4785 title: "two".into(),
4786 ..lsp::CodeAction::default()
4787 }),
4788 ]))
4789 })
4790 .next()
4791 .await;
4792
4793 let action = actions.await.unwrap().unwrap()[0].clone();
4794 let apply = project.update(cx, |project, cx| {
4795 project.apply_code_action(buffer.clone(), action, true, cx)
4796 });
4797
4798 // Resolving the code action does not populate its edits. In absence of
4799 // edits, we must execute the given command.
4800 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4801 |mut action, _| async move {
4802 if action.data.is_some() {
4803 action.command = Some(lsp::Command {
4804 title: "The command".into(),
4805 command: "_the/command".into(),
4806 arguments: Some(vec![json!("the-argument")]),
4807 });
4808 }
4809 Ok(action)
4810 },
4811 );
4812
4813 // While executing the command, the language server sends the editor
4814 // a `workspaceEdit` request.
4815 fake_server
4816 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4817 let fake = fake_server.clone();
4818 move |params, _| {
4819 assert_eq!(params.command, "_the/command");
4820 let fake = fake.clone();
4821 async move {
4822 fake.server
4823 .request::<lsp::request::ApplyWorkspaceEdit>(
4824 lsp::ApplyWorkspaceEditParams {
4825 label: None,
4826 edit: lsp::WorkspaceEdit {
4827 changes: Some(
4828 [(
4829 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4830 vec![lsp::TextEdit {
4831 range: lsp::Range::new(
4832 lsp::Position::new(0, 0),
4833 lsp::Position::new(0, 0),
4834 ),
4835 new_text: "X".into(),
4836 }],
4837 )]
4838 .into_iter()
4839 .collect(),
4840 ),
4841 ..Default::default()
4842 },
4843 },
4844 DEFAULT_LSP_REQUEST_TIMEOUT,
4845 )
4846 .await
4847 .into_response()
4848 .unwrap();
4849 Ok(Some(json!(null)))
4850 }
4851 }
4852 })
4853 .next()
4854 .await;
4855
4856 // Applying the code action returns a project transaction containing the edits
4857 // sent by the language server in its `workspaceEdit` request.
4858 let transaction = apply.await.unwrap();
4859 assert!(transaction.0.contains_key(&buffer));
4860 buffer.update(cx, |buffer, cx| {
4861 assert_eq!(buffer.text(), "Xa");
4862 buffer.undo(cx);
4863 assert_eq!(buffer.text(), "a");
4864 });
4865}
4866
4867#[gpui::test]
4868async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4869 init_test(cx);
4870 let fs = FakeFs::new(cx.background_executor.clone());
4871 let expected_contents = "content";
4872 fs.as_fake()
4873 .insert_tree(
4874 "/root",
4875 json!({
4876 "test.txt": expected_contents
4877 }),
4878 )
4879 .await;
4880
4881 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4882
4883 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4884 let worktree = project.worktrees(cx).next().unwrap();
4885 let entry_id = worktree
4886 .read(cx)
4887 .entry_for_path(rel_path("test.txt"))
4888 .unwrap()
4889 .id;
4890 (worktree, entry_id)
4891 });
4892 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4893 let _result = project
4894 .update(cx, |project, cx| {
4895 project.rename_entry(
4896 entry_id,
4897 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4898 cx,
4899 )
4900 })
4901 .await
4902 .unwrap();
4903 worktree.read_with(cx, |worktree, _| {
4904 assert!(
4905 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4906 "Old file should have been removed"
4907 );
4908 assert!(
4909 worktree
4910 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4911 .is_some(),
4912 "Whole directory hierarchy and the new file should have been created"
4913 );
4914 });
4915 assert_eq!(
4916 worktree
4917 .update(cx, |worktree, cx| {
4918 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4919 })
4920 .await
4921 .unwrap()
4922 .text,
4923 expected_contents,
4924 "Moved file's contents should be preserved"
4925 );
4926
4927 let entry_id = worktree.read_with(cx, |worktree, _| {
4928 worktree
4929 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4930 .unwrap()
4931 .id
4932 });
4933
4934 let _result = project
4935 .update(cx, |project, cx| {
4936 project.rename_entry(
4937 entry_id,
4938 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4939 cx,
4940 )
4941 })
4942 .await
4943 .unwrap();
4944 worktree.read_with(cx, |worktree, _| {
4945 assert!(
4946 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4947 "First file should not reappear"
4948 );
4949 assert!(
4950 worktree
4951 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4952 .is_none(),
4953 "Old file should have been removed"
4954 );
4955 assert!(
4956 worktree
4957 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4958 .is_some(),
4959 "No error should have occurred after moving into existing directory"
4960 );
4961 });
4962 assert_eq!(
4963 worktree
4964 .update(cx, |worktree, cx| {
4965 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4966 })
4967 .await
4968 .unwrap()
4969 .text,
4970 expected_contents,
4971 "Moved file's contents should be preserved"
4972 );
4973}
4974
4975#[gpui::test(iterations = 10)]
4976async fn test_save_file(cx: &mut gpui::TestAppContext) {
4977 init_test(cx);
4978
4979 let fs = FakeFs::new(cx.executor());
4980 fs.insert_tree(
4981 path!("/dir"),
4982 json!({
4983 "file1": "the old contents",
4984 }),
4985 )
4986 .await;
4987
4988 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4989 let buffer = project
4990 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4991 .await
4992 .unwrap();
4993 buffer.update(cx, |buffer, cx| {
4994 assert_eq!(buffer.text(), "the old contents");
4995 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4996 });
4997
4998 project
4999 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5000 .await
5001 .unwrap();
5002
5003 let new_text = fs
5004 .load(Path::new(path!("/dir/file1")))
5005 .await
5006 .unwrap()
5007 .replace("\r\n", "\n");
5008 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5009}
5010
5011#[gpui::test(iterations = 10)]
5012async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5013 // Issue: #24349
5014 init_test(cx);
5015
5016 let fs = FakeFs::new(cx.executor());
5017 fs.insert_tree(path!("/dir"), json!({})).await;
5018
5019 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5020 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5021
5022 language_registry.add(rust_lang());
5023 let mut fake_rust_servers = language_registry.register_fake_lsp(
5024 "Rust",
5025 FakeLspAdapter {
5026 name: "the-rust-language-server",
5027 capabilities: lsp::ServerCapabilities {
5028 completion_provider: Some(lsp::CompletionOptions {
5029 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5030 ..Default::default()
5031 }),
5032 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5033 lsp::TextDocumentSyncOptions {
5034 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5035 ..Default::default()
5036 },
5037 )),
5038 ..Default::default()
5039 },
5040 ..Default::default()
5041 },
5042 );
5043
5044 let buffer = project
5045 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5046 .unwrap()
5047 .await;
5048 project.update(cx, |this, cx| {
5049 this.register_buffer_with_language_servers(&buffer, cx);
5050 buffer.update(cx, |buffer, cx| {
5051 assert!(!this.has_language_servers_for(buffer, cx));
5052 })
5053 });
5054
5055 project
5056 .update(cx, |this, cx| {
5057 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5058 this.save_buffer_as(
5059 buffer.clone(),
5060 ProjectPath {
5061 worktree_id,
5062 path: rel_path("file.rs").into(),
5063 },
5064 cx,
5065 )
5066 })
5067 .await
5068 .unwrap();
5069 // A server is started up, and it is notified about Rust files.
5070 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5071 assert_eq!(
5072 fake_rust_server
5073 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5074 .await
5075 .text_document,
5076 lsp::TextDocumentItem {
5077 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5078 version: 0,
5079 text: "".to_string(),
5080 language_id: "rust".to_string(),
5081 }
5082 );
5083
5084 project.update(cx, |this, cx| {
5085 buffer.update(cx, |buffer, cx| {
5086 assert!(this.has_language_servers_for(buffer, cx));
5087 })
5088 });
5089}
5090
5091#[gpui::test(iterations = 30)]
5092async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5093 init_test(cx);
5094
5095 let fs = FakeFs::new(cx.executor());
5096 fs.insert_tree(
5097 path!("/dir"),
5098 json!({
5099 "file1": "the original contents",
5100 }),
5101 )
5102 .await;
5103
5104 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5105 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5106 let buffer = project
5107 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5108 .await
5109 .unwrap();
5110
5111 // Change the buffer's file on disk, and then wait for the file change
5112 // to be detected by the worktree, so that the buffer starts reloading.
5113 fs.save(
5114 path!("/dir/file1").as_ref(),
5115 &"the first contents".into(),
5116 Default::default(),
5117 )
5118 .await
5119 .unwrap();
5120 worktree.next_event(cx).await;
5121
5122 // Change the buffer's file again. Depending on the random seed, the
5123 // previous file change may still be in progress.
5124 fs.save(
5125 path!("/dir/file1").as_ref(),
5126 &"the second contents".into(),
5127 Default::default(),
5128 )
5129 .await
5130 .unwrap();
5131 worktree.next_event(cx).await;
5132
5133 cx.executor().run_until_parked();
5134 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5135 buffer.read_with(cx, |buffer, _| {
5136 assert_eq!(buffer.text(), on_disk_text);
5137 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5138 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5139 });
5140}
5141
5142#[gpui::test(iterations = 30)]
5143async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5144 init_test(cx);
5145
5146 let fs = FakeFs::new(cx.executor());
5147 fs.insert_tree(
5148 path!("/dir"),
5149 json!({
5150 "file1": "the original contents",
5151 }),
5152 )
5153 .await;
5154
5155 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5156 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5157 let buffer = project
5158 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5159 .await
5160 .unwrap();
5161
5162 // Change the buffer's file on disk, and then wait for the file change
5163 // to be detected by the worktree, so that the buffer starts reloading.
5164 fs.save(
5165 path!("/dir/file1").as_ref(),
5166 &"the first contents".into(),
5167 Default::default(),
5168 )
5169 .await
5170 .unwrap();
5171 worktree.next_event(cx).await;
5172
5173 cx.executor()
5174 .spawn(cx.executor().simulate_random_delay())
5175 .await;
5176
5177 // Perform a noop edit, causing the buffer's version to increase.
5178 buffer.update(cx, |buffer, cx| {
5179 buffer.edit([(0..0, " ")], None, cx);
5180 buffer.undo(cx);
5181 });
5182
5183 cx.executor().run_until_parked();
5184 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5185 buffer.read_with(cx, |buffer, _| {
5186 let buffer_text = buffer.text();
5187 if buffer_text == on_disk_text {
5188 assert!(
5189 !buffer.is_dirty() && !buffer.has_conflict(),
5190 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5191 );
5192 }
5193 // If the file change occurred while the buffer was processing the first
5194 // change, the buffer will be in a conflicting state.
5195 else {
5196 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5197 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5198 }
5199 });
5200}
5201
5202#[gpui::test]
5203async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5204 init_test(cx);
5205
5206 let fs = FakeFs::new(cx.executor());
5207 fs.insert_tree(
5208 path!("/dir"),
5209 json!({
5210 "file1": "the old contents",
5211 }),
5212 )
5213 .await;
5214
5215 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5216 let buffer = project
5217 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5218 .await
5219 .unwrap();
5220 buffer.update(cx, |buffer, cx| {
5221 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5222 });
5223
5224 project
5225 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5226 .await
5227 .unwrap();
5228
5229 let new_text = fs
5230 .load(Path::new(path!("/dir/file1")))
5231 .await
5232 .unwrap()
5233 .replace("\r\n", "\n");
5234 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5235}
5236
5237#[gpui::test]
5238async fn test_save_as(cx: &mut gpui::TestAppContext) {
5239 init_test(cx);
5240
5241 let fs = FakeFs::new(cx.executor());
5242 fs.insert_tree("/dir", json!({})).await;
5243
5244 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5245
5246 let languages = project.update(cx, |project, _| project.languages().clone());
5247 languages.add(rust_lang());
5248
5249 let buffer = project.update(cx, |project, cx| {
5250 project.create_local_buffer("", None, false, cx)
5251 });
5252 buffer.update(cx, |buffer, cx| {
5253 buffer.edit([(0..0, "abc")], None, cx);
5254 assert!(buffer.is_dirty());
5255 assert!(!buffer.has_conflict());
5256 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5257 });
5258 project
5259 .update(cx, |project, cx| {
5260 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5261 let path = ProjectPath {
5262 worktree_id,
5263 path: rel_path("file1.rs").into(),
5264 };
5265 project.save_buffer_as(buffer.clone(), path, cx)
5266 })
5267 .await
5268 .unwrap();
5269 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5270
5271 cx.executor().run_until_parked();
5272 buffer.update(cx, |buffer, cx| {
5273 assert_eq!(
5274 buffer.file().unwrap().full_path(cx),
5275 Path::new("dir/file1.rs")
5276 );
5277 assert!(!buffer.is_dirty());
5278 assert!(!buffer.has_conflict());
5279 assert_eq!(buffer.language().unwrap().name(), "Rust");
5280 });
5281
5282 let opened_buffer = project
5283 .update(cx, |project, cx| {
5284 project.open_local_buffer("/dir/file1.rs", cx)
5285 })
5286 .await
5287 .unwrap();
5288 assert_eq!(opened_buffer, buffer);
5289}
5290
5291#[gpui::test]
5292async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5293 init_test(cx);
5294
5295 let fs = FakeFs::new(cx.executor());
5296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5297
5298 fs.insert_tree(
5299 path!("/dir"),
5300 json!({
5301 "data_a.txt": "data about a"
5302 }),
5303 )
5304 .await;
5305
5306 let buffer = project
5307 .update(cx, |project, cx| {
5308 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5309 })
5310 .await
5311 .unwrap();
5312
5313 buffer.update(cx, |buffer, cx| {
5314 buffer.edit([(11..12, "b")], None, cx);
5315 });
5316
5317 // Save buffer's contents as a new file and confirm that the buffer's now
5318 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5319 // file associated with the buffer has now been updated to `data_b.txt`
5320 project
5321 .update(cx, |project, cx| {
5322 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5323 let new_path = ProjectPath {
5324 worktree_id,
5325 path: rel_path("data_b.txt").into(),
5326 };
5327
5328 project.save_buffer_as(buffer.clone(), new_path, cx)
5329 })
5330 .await
5331 .unwrap();
5332
5333 buffer.update(cx, |buffer, cx| {
5334 assert_eq!(
5335 buffer.file().unwrap().full_path(cx),
5336 Path::new("dir/data_b.txt")
5337 )
5338 });
5339
5340 // Open the original `data_a.txt` file, confirming that its contents are
5341 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5342 let original_buffer = project
5343 .update(cx, |project, cx| {
5344 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5345 })
5346 .await
5347 .unwrap();
5348
5349 original_buffer.update(cx, |buffer, cx| {
5350 assert_eq!(buffer.text(), "data about a");
5351 assert_eq!(
5352 buffer.file().unwrap().full_path(cx),
5353 Path::new("dir/data_a.txt")
5354 )
5355 });
5356}
5357
5358#[gpui::test(retries = 5)]
5359async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5360 use worktree::WorktreeModelHandle as _;
5361
5362 init_test(cx);
5363 cx.executor().allow_parking();
5364
5365 let dir = TempTree::new(json!({
5366 "a": {
5367 "file1": "",
5368 "file2": "",
5369 "file3": "",
5370 },
5371 "b": {
5372 "c": {
5373 "file4": "",
5374 "file5": "",
5375 }
5376 }
5377 }));
5378
5379 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5380
5381 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5382 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5383 async move { buffer.await.unwrap() }
5384 };
5385 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5386 project.update(cx, |project, cx| {
5387 let tree = project.worktrees(cx).next().unwrap();
5388 tree.read(cx)
5389 .entry_for_path(rel_path(path))
5390 .unwrap_or_else(|| panic!("no entry for path {}", path))
5391 .id
5392 })
5393 };
5394
5395 let buffer2 = buffer_for_path("a/file2", cx).await;
5396 let buffer3 = buffer_for_path("a/file3", cx).await;
5397 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5398 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5399
5400 let file2_id = id_for_path("a/file2", cx);
5401 let file3_id = id_for_path("a/file3", cx);
5402 let file4_id = id_for_path("b/c/file4", cx);
5403
5404 // Create a remote copy of this worktree.
5405 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5406 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5407
5408 let updates = Arc::new(Mutex::new(Vec::new()));
5409 tree.update(cx, |tree, cx| {
5410 let updates = updates.clone();
5411 tree.observe_updates(0, cx, move |update| {
5412 updates.lock().push(update);
5413 async { true }
5414 });
5415 });
5416
5417 let remote = cx.update(|cx| {
5418 Worktree::remote(
5419 0,
5420 ReplicaId::REMOTE_SERVER,
5421 metadata,
5422 project.read(cx).client().into(),
5423 project.read(cx).path_style(cx),
5424 cx,
5425 )
5426 });
5427
5428 cx.executor().run_until_parked();
5429
5430 cx.update(|cx| {
5431 assert!(!buffer2.read(cx).is_dirty());
5432 assert!(!buffer3.read(cx).is_dirty());
5433 assert!(!buffer4.read(cx).is_dirty());
5434 assert!(!buffer5.read(cx).is_dirty());
5435 });
5436
5437 // Rename and delete files and directories.
5438 tree.flush_fs_events(cx).await;
5439 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5440 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5441 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5442 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5443 tree.flush_fs_events(cx).await;
5444
5445 cx.update(|app| {
5446 assert_eq!(
5447 tree.read(app).paths().collect::<Vec<_>>(),
5448 vec![
5449 rel_path("a"),
5450 rel_path("a/file1"),
5451 rel_path("a/file2.new"),
5452 rel_path("b"),
5453 rel_path("d"),
5454 rel_path("d/file3"),
5455 rel_path("d/file4"),
5456 ]
5457 );
5458 });
5459
5460 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5461 assert_eq!(id_for_path("d/file3", cx), file3_id);
5462 assert_eq!(id_for_path("d/file4", cx), file4_id);
5463
5464 cx.update(|cx| {
5465 assert_eq!(
5466 buffer2.read(cx).file().unwrap().path().as_ref(),
5467 rel_path("a/file2.new")
5468 );
5469 assert_eq!(
5470 buffer3.read(cx).file().unwrap().path().as_ref(),
5471 rel_path("d/file3")
5472 );
5473 assert_eq!(
5474 buffer4.read(cx).file().unwrap().path().as_ref(),
5475 rel_path("d/file4")
5476 );
5477 assert_eq!(
5478 buffer5.read(cx).file().unwrap().path().as_ref(),
5479 rel_path("b/c/file5")
5480 );
5481
5482 assert_matches!(
5483 buffer2.read(cx).file().unwrap().disk_state(),
5484 DiskState::Present { .. }
5485 );
5486 assert_matches!(
5487 buffer3.read(cx).file().unwrap().disk_state(),
5488 DiskState::Present { .. }
5489 );
5490 assert_matches!(
5491 buffer4.read(cx).file().unwrap().disk_state(),
5492 DiskState::Present { .. }
5493 );
5494 assert_eq!(
5495 buffer5.read(cx).file().unwrap().disk_state(),
5496 DiskState::Deleted
5497 );
5498 });
5499
5500 // Update the remote worktree. Check that it becomes consistent with the
5501 // local worktree.
5502 cx.executor().run_until_parked();
5503
5504 remote.update(cx, |remote, _| {
5505 for update in updates.lock().drain(..) {
5506 remote.as_remote_mut().unwrap().update_from_remote(update);
5507 }
5508 });
5509 cx.executor().run_until_parked();
5510 remote.update(cx, |remote, _| {
5511 assert_eq!(
5512 remote.paths().collect::<Vec<_>>(),
5513 vec![
5514 rel_path("a"),
5515 rel_path("a/file1"),
5516 rel_path("a/file2.new"),
5517 rel_path("b"),
5518 rel_path("d"),
5519 rel_path("d/file3"),
5520 rel_path("d/file4"),
5521 ]
5522 );
5523 });
5524}
5525
5526#[gpui::test(iterations = 10)]
5527async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5528 init_test(cx);
5529
5530 let fs = FakeFs::new(cx.executor());
5531 fs.insert_tree(
5532 path!("/dir"),
5533 json!({
5534 "a": {
5535 "file1": "",
5536 }
5537 }),
5538 )
5539 .await;
5540
5541 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5542 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5543 let tree_id = tree.update(cx, |tree, _| tree.id());
5544
5545 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5546 project.update(cx, |project, cx| {
5547 let tree = project.worktrees(cx).next().unwrap();
5548 tree.read(cx)
5549 .entry_for_path(rel_path(path))
5550 .unwrap_or_else(|| panic!("no entry for path {}", path))
5551 .id
5552 })
5553 };
5554
5555 let dir_id = id_for_path("a", cx);
5556 let file_id = id_for_path("a/file1", cx);
5557 let buffer = project
5558 .update(cx, |p, cx| {
5559 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5560 })
5561 .await
5562 .unwrap();
5563 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5564
5565 project
5566 .update(cx, |project, cx| {
5567 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5568 })
5569 .unwrap()
5570 .await
5571 .into_included()
5572 .unwrap();
5573 cx.executor().run_until_parked();
5574
5575 assert_eq!(id_for_path("b", cx), dir_id);
5576 assert_eq!(id_for_path("b/file1", cx), file_id);
5577 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5578}
5579
5580#[gpui::test]
5581async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5582 init_test(cx);
5583
5584 let fs = FakeFs::new(cx.executor());
5585 fs.insert_tree(
5586 "/dir",
5587 json!({
5588 "a.txt": "a-contents",
5589 "b.txt": "b-contents",
5590 }),
5591 )
5592 .await;
5593
5594 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5595
5596 // Spawn multiple tasks to open paths, repeating some paths.
5597 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5598 (
5599 p.open_local_buffer("/dir/a.txt", cx),
5600 p.open_local_buffer("/dir/b.txt", cx),
5601 p.open_local_buffer("/dir/a.txt", cx),
5602 )
5603 });
5604
5605 let buffer_a_1 = buffer_a_1.await.unwrap();
5606 let buffer_a_2 = buffer_a_2.await.unwrap();
5607 let buffer_b = buffer_b.await.unwrap();
5608 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5609 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5610
5611 // There is only one buffer per path.
5612 let buffer_a_id = buffer_a_1.entity_id();
5613 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5614
5615 // Open the same path again while it is still open.
5616 drop(buffer_a_1);
5617 let buffer_a_3 = project
5618 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5619 .await
5620 .unwrap();
5621
5622 // There's still only one buffer per path.
5623 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5624}
5625
5626#[gpui::test]
5627async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5628 init_test(cx);
5629
5630 let fs = FakeFs::new(cx.executor());
5631 fs.insert_tree(
5632 path!("/dir"),
5633 json!({
5634 "file1": "abc",
5635 "file2": "def",
5636 "file3": "ghi",
5637 }),
5638 )
5639 .await;
5640
5641 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5642
5643 let buffer1 = project
5644 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5645 .await
5646 .unwrap();
5647 let events = Arc::new(Mutex::new(Vec::new()));
5648
5649 // initially, the buffer isn't dirty.
5650 buffer1.update(cx, |buffer, cx| {
5651 cx.subscribe(&buffer1, {
5652 let events = events.clone();
5653 move |_, _, event, _| match event {
5654 BufferEvent::Operation { .. } => {}
5655 _ => events.lock().push(event.clone()),
5656 }
5657 })
5658 .detach();
5659
5660 assert!(!buffer.is_dirty());
5661 assert!(events.lock().is_empty());
5662
5663 buffer.edit([(1..2, "")], None, cx);
5664 });
5665
5666 // after the first edit, the buffer is dirty, and emits a dirtied event.
5667 buffer1.update(cx, |buffer, cx| {
5668 assert!(buffer.text() == "ac");
5669 assert!(buffer.is_dirty());
5670 assert_eq!(
5671 *events.lock(),
5672 &[
5673 language::BufferEvent::Edited,
5674 language::BufferEvent::DirtyChanged
5675 ]
5676 );
5677 events.lock().clear();
5678 buffer.did_save(
5679 buffer.version(),
5680 buffer.file().unwrap().disk_state().mtime(),
5681 cx,
5682 );
5683 });
5684
5685 // after saving, the buffer is not dirty, and emits a saved event.
5686 buffer1.update(cx, |buffer, cx| {
5687 assert!(!buffer.is_dirty());
5688 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5689 events.lock().clear();
5690
5691 buffer.edit([(1..1, "B")], None, cx);
5692 buffer.edit([(2..2, "D")], None, cx);
5693 });
5694
5695 // after editing again, the buffer is dirty, and emits another dirty event.
5696 buffer1.update(cx, |buffer, cx| {
5697 assert!(buffer.text() == "aBDc");
5698 assert!(buffer.is_dirty());
5699 assert_eq!(
5700 *events.lock(),
5701 &[
5702 language::BufferEvent::Edited,
5703 language::BufferEvent::DirtyChanged,
5704 language::BufferEvent::Edited,
5705 ],
5706 );
5707 events.lock().clear();
5708
5709 // After restoring the buffer to its previously-saved state,
5710 // the buffer is not considered dirty anymore.
5711 buffer.edit([(1..3, "")], None, cx);
5712 assert!(buffer.text() == "ac");
5713 assert!(!buffer.is_dirty());
5714 });
5715
5716 assert_eq!(
5717 *events.lock(),
5718 &[
5719 language::BufferEvent::Edited,
5720 language::BufferEvent::DirtyChanged
5721 ]
5722 );
5723
5724 // When a file is deleted, it is not considered dirty.
5725 let events = Arc::new(Mutex::new(Vec::new()));
5726 let buffer2 = project
5727 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5728 .await
5729 .unwrap();
5730 buffer2.update(cx, |_, cx| {
5731 cx.subscribe(&buffer2, {
5732 let events = events.clone();
5733 move |_, _, event, _| match event {
5734 BufferEvent::Operation { .. } => {}
5735 _ => events.lock().push(event.clone()),
5736 }
5737 })
5738 .detach();
5739 });
5740
5741 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5742 .await
5743 .unwrap();
5744 cx.executor().run_until_parked();
5745 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5746 assert_eq!(
5747 mem::take(&mut *events.lock()),
5748 &[language::BufferEvent::FileHandleChanged]
5749 );
5750
5751 // Buffer becomes dirty when edited.
5752 buffer2.update(cx, |buffer, cx| {
5753 buffer.edit([(2..3, "")], None, cx);
5754 assert_eq!(buffer.is_dirty(), true);
5755 });
5756 assert_eq!(
5757 mem::take(&mut *events.lock()),
5758 &[
5759 language::BufferEvent::Edited,
5760 language::BufferEvent::DirtyChanged
5761 ]
5762 );
5763
5764 // Buffer becomes clean again when all of its content is removed, because
5765 // the file was deleted.
5766 buffer2.update(cx, |buffer, cx| {
5767 buffer.edit([(0..2, "")], None, cx);
5768 assert_eq!(buffer.is_empty(), true);
5769 assert_eq!(buffer.is_dirty(), false);
5770 });
5771 assert_eq!(
5772 *events.lock(),
5773 &[
5774 language::BufferEvent::Edited,
5775 language::BufferEvent::DirtyChanged
5776 ]
5777 );
5778
5779 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5780 let events = Arc::new(Mutex::new(Vec::new()));
5781 let buffer3 = project
5782 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5783 .await
5784 .unwrap();
5785 buffer3.update(cx, |_, cx| {
5786 cx.subscribe(&buffer3, {
5787 let events = events.clone();
5788 move |_, _, event, _| match event {
5789 BufferEvent::Operation { .. } => {}
5790 _ => events.lock().push(event.clone()),
5791 }
5792 })
5793 .detach();
5794 });
5795
5796 buffer3.update(cx, |buffer, cx| {
5797 buffer.edit([(0..0, "x")], None, cx);
5798 });
5799 events.lock().clear();
5800 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5801 .await
5802 .unwrap();
5803 cx.executor().run_until_parked();
5804 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5805 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5806}
5807
5808#[gpui::test]
5809async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5810 init_test(cx);
5811
5812 let (initial_contents, initial_offsets) =
5813 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5814 let fs = FakeFs::new(cx.executor());
5815 fs.insert_tree(
5816 path!("/dir"),
5817 json!({
5818 "the-file": initial_contents,
5819 }),
5820 )
5821 .await;
5822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5823 let buffer = project
5824 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5825 .await
5826 .unwrap();
5827
5828 let anchors = initial_offsets
5829 .iter()
5830 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5831 .collect::<Vec<_>>();
5832
5833 // Change the file on disk, adding two new lines of text, and removing
5834 // one line.
5835 buffer.update(cx, |buffer, _| {
5836 assert!(!buffer.is_dirty());
5837 assert!(!buffer.has_conflict());
5838 });
5839
5840 let (new_contents, new_offsets) =
5841 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5842 fs.save(
5843 path!("/dir/the-file").as_ref(),
5844 &new_contents.as_str().into(),
5845 LineEnding::Unix,
5846 )
5847 .await
5848 .unwrap();
5849
5850 // Because the buffer was not modified, it is reloaded from disk. Its
5851 // contents are edited according to the diff between the old and new
5852 // file contents.
5853 cx.executor().run_until_parked();
5854 buffer.update(cx, |buffer, _| {
5855 assert_eq!(buffer.text(), new_contents);
5856 assert!(!buffer.is_dirty());
5857 assert!(!buffer.has_conflict());
5858
5859 let anchor_offsets = anchors
5860 .iter()
5861 .map(|anchor| anchor.to_offset(&*buffer))
5862 .collect::<Vec<_>>();
5863 assert_eq!(anchor_offsets, new_offsets);
5864 });
5865
5866 // Modify the buffer
5867 buffer.update(cx, |buffer, cx| {
5868 buffer.edit([(0..0, " ")], None, cx);
5869 assert!(buffer.is_dirty());
5870 assert!(!buffer.has_conflict());
5871 });
5872
5873 // Change the file on disk again, adding blank lines to the beginning.
5874 fs.save(
5875 path!("/dir/the-file").as_ref(),
5876 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5877 LineEnding::Unix,
5878 )
5879 .await
5880 .unwrap();
5881
5882 // Because the buffer is modified, it doesn't reload from disk, but is
5883 // marked as having a conflict.
5884 cx.executor().run_until_parked();
5885 buffer.update(cx, |buffer, _| {
5886 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5887 assert!(buffer.has_conflict());
5888 });
5889}
5890
5891#[gpui::test]
5892async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5893 init_test(cx);
5894
5895 let fs = FakeFs::new(cx.executor());
5896 fs.insert_tree(
5897 path!("/dir"),
5898 json!({
5899 "file1": "a\nb\nc\n",
5900 "file2": "one\r\ntwo\r\nthree\r\n",
5901 }),
5902 )
5903 .await;
5904
5905 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5906 let buffer1 = project
5907 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5908 .await
5909 .unwrap();
5910 let buffer2 = project
5911 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5912 .await
5913 .unwrap();
5914
5915 buffer1.update(cx, |buffer, _| {
5916 assert_eq!(buffer.text(), "a\nb\nc\n");
5917 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5918 });
5919 buffer2.update(cx, |buffer, _| {
5920 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5921 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5922 });
5923
5924 // Change a file's line endings on disk from unix to windows. The buffer's
5925 // state updates correctly.
5926 fs.save(
5927 path!("/dir/file1").as_ref(),
5928 &"aaa\nb\nc\n".into(),
5929 LineEnding::Windows,
5930 )
5931 .await
5932 .unwrap();
5933 cx.executor().run_until_parked();
5934 buffer1.update(cx, |buffer, _| {
5935 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5936 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5937 });
5938
5939 // Save a file with windows line endings. The file is written correctly.
5940 buffer2.update(cx, |buffer, cx| {
5941 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5942 });
5943 project
5944 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5945 .await
5946 .unwrap();
5947 assert_eq!(
5948 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5949 "one\r\ntwo\r\nthree\r\nfour\r\n",
5950 );
5951}
5952
5953#[gpui::test]
5954async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5955 init_test(cx);
5956
5957 let fs = FakeFs::new(cx.executor());
5958 fs.insert_tree(
5959 path!("/dir"),
5960 json!({
5961 "a.rs": "
5962 fn foo(mut v: Vec<usize>) {
5963 for x in &v {
5964 v.push(1);
5965 }
5966 }
5967 "
5968 .unindent(),
5969 }),
5970 )
5971 .await;
5972
5973 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5974 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5975 let buffer = project
5976 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5977 .await
5978 .unwrap();
5979
5980 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5981 let message = lsp::PublishDiagnosticsParams {
5982 uri: buffer_uri.clone(),
5983 diagnostics: vec![
5984 lsp::Diagnostic {
5985 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5986 severity: Some(DiagnosticSeverity::WARNING),
5987 message: "error 1".to_string(),
5988 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5989 location: lsp::Location {
5990 uri: buffer_uri.clone(),
5991 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5992 },
5993 message: "error 1 hint 1".to_string(),
5994 }]),
5995 ..Default::default()
5996 },
5997 lsp::Diagnostic {
5998 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5999 severity: Some(DiagnosticSeverity::HINT),
6000 message: "error 1 hint 1".to_string(),
6001 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6002 location: lsp::Location {
6003 uri: buffer_uri.clone(),
6004 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6005 },
6006 message: "original diagnostic".to_string(),
6007 }]),
6008 ..Default::default()
6009 },
6010 lsp::Diagnostic {
6011 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6012 severity: Some(DiagnosticSeverity::ERROR),
6013 message: "error 2".to_string(),
6014 related_information: Some(vec![
6015 lsp::DiagnosticRelatedInformation {
6016 location: lsp::Location {
6017 uri: buffer_uri.clone(),
6018 range: lsp::Range::new(
6019 lsp::Position::new(1, 13),
6020 lsp::Position::new(1, 15),
6021 ),
6022 },
6023 message: "error 2 hint 1".to_string(),
6024 },
6025 lsp::DiagnosticRelatedInformation {
6026 location: lsp::Location {
6027 uri: buffer_uri.clone(),
6028 range: lsp::Range::new(
6029 lsp::Position::new(1, 13),
6030 lsp::Position::new(1, 15),
6031 ),
6032 },
6033 message: "error 2 hint 2".to_string(),
6034 },
6035 ]),
6036 ..Default::default()
6037 },
6038 lsp::Diagnostic {
6039 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6040 severity: Some(DiagnosticSeverity::HINT),
6041 message: "error 2 hint 1".to_string(),
6042 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6043 location: lsp::Location {
6044 uri: buffer_uri.clone(),
6045 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6046 },
6047 message: "original diagnostic".to_string(),
6048 }]),
6049 ..Default::default()
6050 },
6051 lsp::Diagnostic {
6052 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6053 severity: Some(DiagnosticSeverity::HINT),
6054 message: "error 2 hint 2".to_string(),
6055 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6056 location: lsp::Location {
6057 uri: buffer_uri,
6058 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6059 },
6060 message: "original diagnostic".to_string(),
6061 }]),
6062 ..Default::default()
6063 },
6064 ],
6065 version: None,
6066 };
6067
6068 lsp_store
6069 .update(cx, |lsp_store, cx| {
6070 lsp_store.update_diagnostics(
6071 LanguageServerId(0),
6072 message,
6073 None,
6074 DiagnosticSourceKind::Pushed,
6075 &[],
6076 cx,
6077 )
6078 })
6079 .unwrap();
6080 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6081
6082 assert_eq!(
6083 buffer
6084 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6085 .collect::<Vec<_>>(),
6086 &[
6087 DiagnosticEntry {
6088 range: Point::new(1, 8)..Point::new(1, 9),
6089 diagnostic: Diagnostic {
6090 severity: DiagnosticSeverity::WARNING,
6091 message: "error 1".to_string(),
6092 group_id: 1,
6093 is_primary: true,
6094 source_kind: DiagnosticSourceKind::Pushed,
6095 ..Diagnostic::default()
6096 }
6097 },
6098 DiagnosticEntry {
6099 range: Point::new(1, 8)..Point::new(1, 9),
6100 diagnostic: Diagnostic {
6101 severity: DiagnosticSeverity::HINT,
6102 message: "error 1 hint 1".to_string(),
6103 group_id: 1,
6104 is_primary: false,
6105 source_kind: DiagnosticSourceKind::Pushed,
6106 ..Diagnostic::default()
6107 }
6108 },
6109 DiagnosticEntry {
6110 range: Point::new(1, 13)..Point::new(1, 15),
6111 diagnostic: Diagnostic {
6112 severity: DiagnosticSeverity::HINT,
6113 message: "error 2 hint 1".to_string(),
6114 group_id: 0,
6115 is_primary: false,
6116 source_kind: DiagnosticSourceKind::Pushed,
6117 ..Diagnostic::default()
6118 }
6119 },
6120 DiagnosticEntry {
6121 range: Point::new(1, 13)..Point::new(1, 15),
6122 diagnostic: Diagnostic {
6123 severity: DiagnosticSeverity::HINT,
6124 message: "error 2 hint 2".to_string(),
6125 group_id: 0,
6126 is_primary: false,
6127 source_kind: DiagnosticSourceKind::Pushed,
6128 ..Diagnostic::default()
6129 }
6130 },
6131 DiagnosticEntry {
6132 range: Point::new(2, 8)..Point::new(2, 17),
6133 diagnostic: Diagnostic {
6134 severity: DiagnosticSeverity::ERROR,
6135 message: "error 2".to_string(),
6136 group_id: 0,
6137 is_primary: true,
6138 source_kind: DiagnosticSourceKind::Pushed,
6139 ..Diagnostic::default()
6140 }
6141 }
6142 ]
6143 );
6144
6145 assert_eq!(
6146 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6147 &[
6148 DiagnosticEntry {
6149 range: Point::new(1, 13)..Point::new(1, 15),
6150 diagnostic: Diagnostic {
6151 severity: DiagnosticSeverity::HINT,
6152 message: "error 2 hint 1".to_string(),
6153 group_id: 0,
6154 is_primary: false,
6155 source_kind: DiagnosticSourceKind::Pushed,
6156 ..Diagnostic::default()
6157 }
6158 },
6159 DiagnosticEntry {
6160 range: Point::new(1, 13)..Point::new(1, 15),
6161 diagnostic: Diagnostic {
6162 severity: DiagnosticSeverity::HINT,
6163 message: "error 2 hint 2".to_string(),
6164 group_id: 0,
6165 is_primary: false,
6166 source_kind: DiagnosticSourceKind::Pushed,
6167 ..Diagnostic::default()
6168 }
6169 },
6170 DiagnosticEntry {
6171 range: Point::new(2, 8)..Point::new(2, 17),
6172 diagnostic: Diagnostic {
6173 severity: DiagnosticSeverity::ERROR,
6174 message: "error 2".to_string(),
6175 group_id: 0,
6176 is_primary: true,
6177 source_kind: DiagnosticSourceKind::Pushed,
6178 ..Diagnostic::default()
6179 }
6180 }
6181 ]
6182 );
6183
6184 assert_eq!(
6185 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6186 &[
6187 DiagnosticEntry {
6188 range: Point::new(1, 8)..Point::new(1, 9),
6189 diagnostic: Diagnostic {
6190 severity: DiagnosticSeverity::WARNING,
6191 message: "error 1".to_string(),
6192 group_id: 1,
6193 is_primary: true,
6194 source_kind: DiagnosticSourceKind::Pushed,
6195 ..Diagnostic::default()
6196 }
6197 },
6198 DiagnosticEntry {
6199 range: Point::new(1, 8)..Point::new(1, 9),
6200 diagnostic: Diagnostic {
6201 severity: DiagnosticSeverity::HINT,
6202 message: "error 1 hint 1".to_string(),
6203 group_id: 1,
6204 is_primary: false,
6205 source_kind: DiagnosticSourceKind::Pushed,
6206 ..Diagnostic::default()
6207 }
6208 },
6209 ]
6210 );
6211}
6212
6213#[gpui::test]
6214async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6215 init_test(cx);
6216
6217 let fs = FakeFs::new(cx.executor());
6218 fs.insert_tree(
6219 path!("/dir"),
6220 json!({
6221 "one.rs": "const ONE: usize = 1;",
6222 "two": {
6223 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6224 }
6225
6226 }),
6227 )
6228 .await;
6229 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6230
6231 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6232 language_registry.add(rust_lang());
6233 let watched_paths = lsp::FileOperationRegistrationOptions {
6234 filters: vec![
6235 FileOperationFilter {
6236 scheme: Some("file".to_owned()),
6237 pattern: lsp::FileOperationPattern {
6238 glob: "**/*.rs".to_owned(),
6239 matches: Some(lsp::FileOperationPatternKind::File),
6240 options: None,
6241 },
6242 },
6243 FileOperationFilter {
6244 scheme: Some("file".to_owned()),
6245 pattern: lsp::FileOperationPattern {
6246 glob: "**/**".to_owned(),
6247 matches: Some(lsp::FileOperationPatternKind::Folder),
6248 options: None,
6249 },
6250 },
6251 ],
6252 };
6253 let mut fake_servers = language_registry.register_fake_lsp(
6254 "Rust",
6255 FakeLspAdapter {
6256 capabilities: lsp::ServerCapabilities {
6257 workspace: Some(lsp::WorkspaceServerCapabilities {
6258 workspace_folders: None,
6259 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6260 did_rename: Some(watched_paths.clone()),
6261 will_rename: Some(watched_paths),
6262 ..Default::default()
6263 }),
6264 }),
6265 ..Default::default()
6266 },
6267 ..Default::default()
6268 },
6269 );
6270
6271 let _ = project
6272 .update(cx, |project, cx| {
6273 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6274 })
6275 .await
6276 .unwrap();
6277
6278 let fake_server = fake_servers.next().await.unwrap();
6279 cx.executor().run_until_parked();
6280 let response = project.update(cx, |project, cx| {
6281 let worktree = project.worktrees(cx).next().unwrap();
6282 let entry = worktree
6283 .read(cx)
6284 .entry_for_path(rel_path("one.rs"))
6285 .unwrap();
6286 project.rename_entry(
6287 entry.id,
6288 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6289 cx,
6290 )
6291 });
6292 let expected_edit = lsp::WorkspaceEdit {
6293 changes: None,
6294 document_changes: Some(DocumentChanges::Edits({
6295 vec![TextDocumentEdit {
6296 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6297 range: lsp::Range {
6298 start: lsp::Position {
6299 line: 0,
6300 character: 1,
6301 },
6302 end: lsp::Position {
6303 line: 0,
6304 character: 3,
6305 },
6306 },
6307 new_text: "This is not a drill".to_owned(),
6308 })],
6309 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6310 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6311 version: Some(1337),
6312 },
6313 }]
6314 })),
6315 change_annotations: None,
6316 };
6317 let resolved_workspace_edit = Arc::new(OnceLock::new());
6318 fake_server
6319 .set_request_handler::<WillRenameFiles, _, _>({
6320 let resolved_workspace_edit = resolved_workspace_edit.clone();
6321 let expected_edit = expected_edit.clone();
6322 move |params, _| {
6323 let resolved_workspace_edit = resolved_workspace_edit.clone();
6324 let expected_edit = expected_edit.clone();
6325 async move {
6326 assert_eq!(params.files.len(), 1);
6327 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6328 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6329 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6330 Ok(Some(expected_edit))
6331 }
6332 }
6333 })
6334 .next()
6335 .await
6336 .unwrap();
6337 let _ = response.await.unwrap();
6338 fake_server
6339 .handle_notification::<DidRenameFiles, _>(|params, _| {
6340 assert_eq!(params.files.len(), 1);
6341 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6342 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6343 })
6344 .next()
6345 .await
6346 .unwrap();
6347 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6348}
6349
6350#[gpui::test]
6351async fn test_rename(cx: &mut gpui::TestAppContext) {
6352 // hi
6353 init_test(cx);
6354
6355 let fs = FakeFs::new(cx.executor());
6356 fs.insert_tree(
6357 path!("/dir"),
6358 json!({
6359 "one.rs": "const ONE: usize = 1;",
6360 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6361 }),
6362 )
6363 .await;
6364
6365 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6366
6367 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6368 language_registry.add(rust_lang());
6369 let mut fake_servers = language_registry.register_fake_lsp(
6370 "Rust",
6371 FakeLspAdapter {
6372 capabilities: lsp::ServerCapabilities {
6373 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6374 prepare_provider: Some(true),
6375 work_done_progress_options: Default::default(),
6376 })),
6377 ..Default::default()
6378 },
6379 ..Default::default()
6380 },
6381 );
6382
6383 let (buffer, _handle) = project
6384 .update(cx, |project, cx| {
6385 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6386 })
6387 .await
6388 .unwrap();
6389
6390 let fake_server = fake_servers.next().await.unwrap();
6391 cx.executor().run_until_parked();
6392
6393 let response = project.update(cx, |project, cx| {
6394 project.prepare_rename(buffer.clone(), 7, cx)
6395 });
6396 fake_server
6397 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6398 assert_eq!(
6399 params.text_document.uri.as_str(),
6400 uri!("file:///dir/one.rs")
6401 );
6402 assert_eq!(params.position, lsp::Position::new(0, 7));
6403 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6404 lsp::Position::new(0, 6),
6405 lsp::Position::new(0, 9),
6406 ))))
6407 })
6408 .next()
6409 .await
6410 .unwrap();
6411 let response = response.await.unwrap();
6412 let PrepareRenameResponse::Success(range) = response else {
6413 panic!("{:?}", response);
6414 };
6415 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6416 assert_eq!(range, 6..9);
6417
6418 let response = project.update(cx, |project, cx| {
6419 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6420 });
6421 fake_server
6422 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6423 assert_eq!(
6424 params.text_document_position.text_document.uri.as_str(),
6425 uri!("file:///dir/one.rs")
6426 );
6427 assert_eq!(
6428 params.text_document_position.position,
6429 lsp::Position::new(0, 7)
6430 );
6431 assert_eq!(params.new_name, "THREE");
6432 Ok(Some(lsp::WorkspaceEdit {
6433 changes: Some(
6434 [
6435 (
6436 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6437 vec![lsp::TextEdit::new(
6438 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6439 "THREE".to_string(),
6440 )],
6441 ),
6442 (
6443 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6444 vec![
6445 lsp::TextEdit::new(
6446 lsp::Range::new(
6447 lsp::Position::new(0, 24),
6448 lsp::Position::new(0, 27),
6449 ),
6450 "THREE".to_string(),
6451 ),
6452 lsp::TextEdit::new(
6453 lsp::Range::new(
6454 lsp::Position::new(0, 35),
6455 lsp::Position::new(0, 38),
6456 ),
6457 "THREE".to_string(),
6458 ),
6459 ],
6460 ),
6461 ]
6462 .into_iter()
6463 .collect(),
6464 ),
6465 ..Default::default()
6466 }))
6467 })
6468 .next()
6469 .await
6470 .unwrap();
6471 let mut transaction = response.await.unwrap().0;
6472 assert_eq!(transaction.len(), 2);
6473 assert_eq!(
6474 transaction
6475 .remove_entry(&buffer)
6476 .unwrap()
6477 .0
6478 .update(cx, |buffer, _| buffer.text()),
6479 "const THREE: usize = 1;"
6480 );
6481 assert_eq!(
6482 transaction
6483 .into_keys()
6484 .next()
6485 .unwrap()
6486 .update(cx, |buffer, _| buffer.text()),
6487 "const TWO: usize = one::THREE + one::THREE;"
6488 );
6489}
6490
6491#[gpui::test]
6492async fn test_search(cx: &mut gpui::TestAppContext) {
6493 init_test(cx);
6494
6495 let fs = FakeFs::new(cx.executor());
6496 fs.insert_tree(
6497 path!("/dir"),
6498 json!({
6499 "one.rs": "const ONE: usize = 1;",
6500 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6501 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6502 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6503 }),
6504 )
6505 .await;
6506 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6507 assert_eq!(
6508 search(
6509 &project,
6510 SearchQuery::text(
6511 "TWO",
6512 false,
6513 true,
6514 false,
6515 Default::default(),
6516 Default::default(),
6517 false,
6518 None
6519 )
6520 .unwrap(),
6521 cx
6522 )
6523 .await
6524 .unwrap(),
6525 HashMap::from_iter([
6526 (path!("dir/two.rs").to_string(), vec![6..9]),
6527 (path!("dir/three.rs").to_string(), vec![37..40])
6528 ])
6529 );
6530
6531 let buffer_4 = project
6532 .update(cx, |project, cx| {
6533 project.open_local_buffer(path!("/dir/four.rs"), cx)
6534 })
6535 .await
6536 .unwrap();
6537 buffer_4.update(cx, |buffer, cx| {
6538 let text = "two::TWO";
6539 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6540 });
6541
6542 assert_eq!(
6543 search(
6544 &project,
6545 SearchQuery::text(
6546 "TWO",
6547 false,
6548 true,
6549 false,
6550 Default::default(),
6551 Default::default(),
6552 false,
6553 None,
6554 )
6555 .unwrap(),
6556 cx
6557 )
6558 .await
6559 .unwrap(),
6560 HashMap::from_iter([
6561 (path!("dir/two.rs").to_string(), vec![6..9]),
6562 (path!("dir/three.rs").to_string(), vec![37..40]),
6563 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6564 ])
6565 );
6566}
6567
6568#[gpui::test]
6569async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6570 init_test(cx);
6571
6572 let search_query = "file";
6573
6574 let fs = FakeFs::new(cx.executor());
6575 fs.insert_tree(
6576 path!("/dir"),
6577 json!({
6578 "one.rs": r#"// Rust file one"#,
6579 "one.ts": r#"// TypeScript file one"#,
6580 "two.rs": r#"// Rust file two"#,
6581 "two.ts": r#"// TypeScript file two"#,
6582 }),
6583 )
6584 .await;
6585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6586
6587 assert!(
6588 search(
6589 &project,
6590 SearchQuery::text(
6591 search_query,
6592 false,
6593 true,
6594 false,
6595 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6596 Default::default(),
6597 false,
6598 None
6599 )
6600 .unwrap(),
6601 cx
6602 )
6603 .await
6604 .unwrap()
6605 .is_empty(),
6606 "If no inclusions match, no files should be returned"
6607 );
6608
6609 assert_eq!(
6610 search(
6611 &project,
6612 SearchQuery::text(
6613 search_query,
6614 false,
6615 true,
6616 false,
6617 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6618 Default::default(),
6619 false,
6620 None
6621 )
6622 .unwrap(),
6623 cx
6624 )
6625 .await
6626 .unwrap(),
6627 HashMap::from_iter([
6628 (path!("dir/one.rs").to_string(), vec![8..12]),
6629 (path!("dir/two.rs").to_string(), vec![8..12]),
6630 ]),
6631 "Rust only search should give only Rust files"
6632 );
6633
6634 assert_eq!(
6635 search(
6636 &project,
6637 SearchQuery::text(
6638 search_query,
6639 false,
6640 true,
6641 false,
6642 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6643 .unwrap(),
6644 Default::default(),
6645 false,
6646 None,
6647 )
6648 .unwrap(),
6649 cx
6650 )
6651 .await
6652 .unwrap(),
6653 HashMap::from_iter([
6654 (path!("dir/one.ts").to_string(), vec![14..18]),
6655 (path!("dir/two.ts").to_string(), vec![14..18]),
6656 ]),
6657 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6658 );
6659
6660 assert_eq!(
6661 search(
6662 &project,
6663 SearchQuery::text(
6664 search_query,
6665 false,
6666 true,
6667 false,
6668 PathMatcher::new(
6669 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6670 PathStyle::local()
6671 )
6672 .unwrap(),
6673 Default::default(),
6674 false,
6675 None,
6676 )
6677 .unwrap(),
6678 cx
6679 )
6680 .await
6681 .unwrap(),
6682 HashMap::from_iter([
6683 (path!("dir/two.ts").to_string(), vec![14..18]),
6684 (path!("dir/one.rs").to_string(), vec![8..12]),
6685 (path!("dir/one.ts").to_string(), vec![14..18]),
6686 (path!("dir/two.rs").to_string(), vec![8..12]),
6687 ]),
6688 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6689 );
6690}
6691
6692#[gpui::test]
6693async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6694 init_test(cx);
6695
6696 let search_query = "file";
6697
6698 let fs = FakeFs::new(cx.executor());
6699 fs.insert_tree(
6700 path!("/dir"),
6701 json!({
6702 "one.rs": r#"// Rust file one"#,
6703 "one.ts": r#"// TypeScript file one"#,
6704 "two.rs": r#"// Rust file two"#,
6705 "two.ts": r#"// TypeScript file two"#,
6706 }),
6707 )
6708 .await;
6709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6710
6711 assert_eq!(
6712 search(
6713 &project,
6714 SearchQuery::text(
6715 search_query,
6716 false,
6717 true,
6718 false,
6719 Default::default(),
6720 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6721 false,
6722 None,
6723 )
6724 .unwrap(),
6725 cx
6726 )
6727 .await
6728 .unwrap(),
6729 HashMap::from_iter([
6730 (path!("dir/one.rs").to_string(), vec![8..12]),
6731 (path!("dir/one.ts").to_string(), vec![14..18]),
6732 (path!("dir/two.rs").to_string(), vec![8..12]),
6733 (path!("dir/two.ts").to_string(), vec![14..18]),
6734 ]),
6735 "If no exclusions match, all files should be returned"
6736 );
6737
6738 assert_eq!(
6739 search(
6740 &project,
6741 SearchQuery::text(
6742 search_query,
6743 false,
6744 true,
6745 false,
6746 Default::default(),
6747 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6748 false,
6749 None,
6750 )
6751 .unwrap(),
6752 cx
6753 )
6754 .await
6755 .unwrap(),
6756 HashMap::from_iter([
6757 (path!("dir/one.ts").to_string(), vec![14..18]),
6758 (path!("dir/two.ts").to_string(), vec![14..18]),
6759 ]),
6760 "Rust exclusion search should give only TypeScript files"
6761 );
6762
6763 assert_eq!(
6764 search(
6765 &project,
6766 SearchQuery::text(
6767 search_query,
6768 false,
6769 true,
6770 false,
6771 Default::default(),
6772 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6773 .unwrap(),
6774 false,
6775 None,
6776 )
6777 .unwrap(),
6778 cx
6779 )
6780 .await
6781 .unwrap(),
6782 HashMap::from_iter([
6783 (path!("dir/one.rs").to_string(), vec![8..12]),
6784 (path!("dir/two.rs").to_string(), vec![8..12]),
6785 ]),
6786 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6787 );
6788
6789 assert!(
6790 search(
6791 &project,
6792 SearchQuery::text(
6793 search_query,
6794 false,
6795 true,
6796 false,
6797 Default::default(),
6798 PathMatcher::new(
6799 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6800 PathStyle::local(),
6801 )
6802 .unwrap(),
6803 false,
6804 None,
6805 )
6806 .unwrap(),
6807 cx
6808 )
6809 .await
6810 .unwrap()
6811 .is_empty(),
6812 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6813 );
6814}
6815
6816#[gpui::test]
6817async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6818 init_test(cx);
6819
6820 let search_query = "file";
6821
6822 let fs = FakeFs::new(cx.executor());
6823 fs.insert_tree(
6824 path!("/dir"),
6825 json!({
6826 "one.rs": r#"// Rust file one"#,
6827 "one.ts": r#"// TypeScript file one"#,
6828 "two.rs": r#"// Rust file two"#,
6829 "two.ts": r#"// TypeScript file two"#,
6830 }),
6831 )
6832 .await;
6833
6834 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6835 let path_style = PathStyle::local();
6836 let _buffer = project.update(cx, |project, cx| {
6837 project.create_local_buffer("file", None, false, cx)
6838 });
6839
6840 assert_eq!(
6841 search(
6842 &project,
6843 SearchQuery::text(
6844 search_query,
6845 false,
6846 true,
6847 false,
6848 Default::default(),
6849 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6850 false,
6851 None,
6852 )
6853 .unwrap(),
6854 cx
6855 )
6856 .await
6857 .unwrap(),
6858 HashMap::from_iter([
6859 (path!("dir/one.rs").to_string(), vec![8..12]),
6860 (path!("dir/one.ts").to_string(), vec![14..18]),
6861 (path!("dir/two.rs").to_string(), vec![8..12]),
6862 (path!("dir/two.ts").to_string(), vec![14..18]),
6863 ]),
6864 "If no exclusions match, all files should be returned"
6865 );
6866
6867 assert_eq!(
6868 search(
6869 &project,
6870 SearchQuery::text(
6871 search_query,
6872 false,
6873 true,
6874 false,
6875 Default::default(),
6876 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6877 false,
6878 None,
6879 )
6880 .unwrap(),
6881 cx
6882 )
6883 .await
6884 .unwrap(),
6885 HashMap::from_iter([
6886 (path!("dir/one.ts").to_string(), vec![14..18]),
6887 (path!("dir/two.ts").to_string(), vec![14..18]),
6888 ]),
6889 "Rust exclusion search should give only TypeScript files"
6890 );
6891
6892 assert_eq!(
6893 search(
6894 &project,
6895 SearchQuery::text(
6896 search_query,
6897 false,
6898 true,
6899 false,
6900 Default::default(),
6901 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6902 false,
6903 None,
6904 )
6905 .unwrap(),
6906 cx
6907 )
6908 .await
6909 .unwrap(),
6910 HashMap::from_iter([
6911 (path!("dir/one.rs").to_string(), vec![8..12]),
6912 (path!("dir/two.rs").to_string(), vec![8..12]),
6913 ]),
6914 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6915 );
6916
6917 assert!(
6918 search(
6919 &project,
6920 SearchQuery::text(
6921 search_query,
6922 false,
6923 true,
6924 false,
6925 Default::default(),
6926 PathMatcher::new(
6927 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6928 PathStyle::local(),
6929 )
6930 .unwrap(),
6931 false,
6932 None,
6933 )
6934 .unwrap(),
6935 cx
6936 )
6937 .await
6938 .unwrap()
6939 .is_empty(),
6940 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6941 );
6942}
6943
6944#[gpui::test]
6945async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6946 init_test(cx);
6947
6948 let search_query = "file";
6949
6950 let fs = FakeFs::new(cx.executor());
6951 fs.insert_tree(
6952 path!("/dir"),
6953 json!({
6954 "one.rs": r#"// Rust file one"#,
6955 "one.ts": r#"// TypeScript file one"#,
6956 "two.rs": r#"// Rust file two"#,
6957 "two.ts": r#"// TypeScript file two"#,
6958 }),
6959 )
6960 .await;
6961 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6962 assert!(
6963 search(
6964 &project,
6965 SearchQuery::text(
6966 search_query,
6967 false,
6968 true,
6969 false,
6970 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6971 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6972 false,
6973 None,
6974 )
6975 .unwrap(),
6976 cx
6977 )
6978 .await
6979 .unwrap()
6980 .is_empty(),
6981 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6982 );
6983
6984 assert!(
6985 search(
6986 &project,
6987 SearchQuery::text(
6988 search_query,
6989 false,
6990 true,
6991 false,
6992 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6993 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6994 false,
6995 None,
6996 )
6997 .unwrap(),
6998 cx
6999 )
7000 .await
7001 .unwrap()
7002 .is_empty(),
7003 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7004 );
7005
7006 assert!(
7007 search(
7008 &project,
7009 SearchQuery::text(
7010 search_query,
7011 false,
7012 true,
7013 false,
7014 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7015 .unwrap(),
7016 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7017 .unwrap(),
7018 false,
7019 None,
7020 )
7021 .unwrap(),
7022 cx
7023 )
7024 .await
7025 .unwrap()
7026 .is_empty(),
7027 "Non-matching inclusions and exclusions should not change that."
7028 );
7029
7030 assert_eq!(
7031 search(
7032 &project,
7033 SearchQuery::text(
7034 search_query,
7035 false,
7036 true,
7037 false,
7038 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7039 .unwrap(),
7040 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7041 .unwrap(),
7042 false,
7043 None,
7044 )
7045 .unwrap(),
7046 cx
7047 )
7048 .await
7049 .unwrap(),
7050 HashMap::from_iter([
7051 (path!("dir/one.ts").to_string(), vec![14..18]),
7052 (path!("dir/two.ts").to_string(), vec![14..18]),
7053 ]),
7054 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7055 );
7056}
7057
7058#[gpui::test]
7059async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7060 init_test(cx);
7061
7062 let fs = FakeFs::new(cx.executor());
7063 fs.insert_tree(
7064 path!("/worktree-a"),
7065 json!({
7066 "haystack.rs": r#"// NEEDLE"#,
7067 "haystack.ts": r#"// NEEDLE"#,
7068 }),
7069 )
7070 .await;
7071 fs.insert_tree(
7072 path!("/worktree-b"),
7073 json!({
7074 "haystack.rs": r#"// NEEDLE"#,
7075 "haystack.ts": r#"// NEEDLE"#,
7076 }),
7077 )
7078 .await;
7079
7080 let path_style = PathStyle::local();
7081 let project = Project::test(
7082 fs.clone(),
7083 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7084 cx,
7085 )
7086 .await;
7087
7088 assert_eq!(
7089 search(
7090 &project,
7091 SearchQuery::text(
7092 "NEEDLE",
7093 false,
7094 true,
7095 false,
7096 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7097 Default::default(),
7098 true,
7099 None,
7100 )
7101 .unwrap(),
7102 cx
7103 )
7104 .await
7105 .unwrap(),
7106 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7107 "should only return results from included worktree"
7108 );
7109 assert_eq!(
7110 search(
7111 &project,
7112 SearchQuery::text(
7113 "NEEDLE",
7114 false,
7115 true,
7116 false,
7117 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7118 Default::default(),
7119 true,
7120 None,
7121 )
7122 .unwrap(),
7123 cx
7124 )
7125 .await
7126 .unwrap(),
7127 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7128 "should only return results from included worktree"
7129 );
7130
7131 assert_eq!(
7132 search(
7133 &project,
7134 SearchQuery::text(
7135 "NEEDLE",
7136 false,
7137 true,
7138 false,
7139 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7140 Default::default(),
7141 false,
7142 None,
7143 )
7144 .unwrap(),
7145 cx
7146 )
7147 .await
7148 .unwrap(),
7149 HashMap::from_iter([
7150 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7151 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7152 ]),
7153 "should return results from both worktrees"
7154 );
7155}
7156
7157#[gpui::test]
7158async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7159 init_test(cx);
7160
7161 let fs = FakeFs::new(cx.background_executor.clone());
7162 fs.insert_tree(
7163 path!("/dir"),
7164 json!({
7165 ".git": {},
7166 ".gitignore": "**/target\n/node_modules\n",
7167 "target": {
7168 "index.txt": "index_key:index_value"
7169 },
7170 "node_modules": {
7171 "eslint": {
7172 "index.ts": "const eslint_key = 'eslint value'",
7173 "package.json": r#"{ "some_key": "some value" }"#,
7174 },
7175 "prettier": {
7176 "index.ts": "const prettier_key = 'prettier value'",
7177 "package.json": r#"{ "other_key": "other value" }"#,
7178 },
7179 },
7180 "package.json": r#"{ "main_key": "main value" }"#,
7181 }),
7182 )
7183 .await;
7184 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7185
7186 let query = "key";
7187 assert_eq!(
7188 search(
7189 &project,
7190 SearchQuery::text(
7191 query,
7192 false,
7193 false,
7194 false,
7195 Default::default(),
7196 Default::default(),
7197 false,
7198 None,
7199 )
7200 .unwrap(),
7201 cx
7202 )
7203 .await
7204 .unwrap(),
7205 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7206 "Only one non-ignored file should have the query"
7207 );
7208
7209 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7210 let path_style = PathStyle::local();
7211 assert_eq!(
7212 search(
7213 &project,
7214 SearchQuery::text(
7215 query,
7216 false,
7217 false,
7218 true,
7219 Default::default(),
7220 Default::default(),
7221 false,
7222 None,
7223 )
7224 .unwrap(),
7225 cx
7226 )
7227 .await
7228 .unwrap(),
7229 HashMap::from_iter([
7230 (path!("dir/package.json").to_string(), vec![8..11]),
7231 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7232 (
7233 path!("dir/node_modules/prettier/package.json").to_string(),
7234 vec![9..12]
7235 ),
7236 (
7237 path!("dir/node_modules/prettier/index.ts").to_string(),
7238 vec![15..18]
7239 ),
7240 (
7241 path!("dir/node_modules/eslint/index.ts").to_string(),
7242 vec![13..16]
7243 ),
7244 (
7245 path!("dir/node_modules/eslint/package.json").to_string(),
7246 vec![8..11]
7247 ),
7248 ]),
7249 "Unrestricted search with ignored directories should find every file with the query"
7250 );
7251
7252 let files_to_include =
7253 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7254 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7256 assert_eq!(
7257 search(
7258 &project,
7259 SearchQuery::text(
7260 query,
7261 false,
7262 false,
7263 true,
7264 files_to_include,
7265 files_to_exclude,
7266 false,
7267 None,
7268 )
7269 .unwrap(),
7270 cx
7271 )
7272 .await
7273 .unwrap(),
7274 HashMap::from_iter([(
7275 path!("dir/node_modules/prettier/package.json").to_string(),
7276 vec![9..12]
7277 )]),
7278 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7279 );
7280}
7281
7282#[gpui::test]
7283async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7284 init_test(cx);
7285
7286 let fs = FakeFs::new(cx.executor());
7287 fs.insert_tree(
7288 path!("/dir"),
7289 json!({
7290 "one.rs": "// ПРИВЕТ? привет!",
7291 "two.rs": "// ПРИВЕТ.",
7292 "three.rs": "// привет",
7293 }),
7294 )
7295 .await;
7296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7297 let unicode_case_sensitive_query = SearchQuery::text(
7298 "привет",
7299 false,
7300 true,
7301 false,
7302 Default::default(),
7303 Default::default(),
7304 false,
7305 None,
7306 );
7307 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7308 assert_eq!(
7309 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7310 .await
7311 .unwrap(),
7312 HashMap::from_iter([
7313 (path!("dir/one.rs").to_string(), vec![17..29]),
7314 (path!("dir/three.rs").to_string(), vec![3..15]),
7315 ])
7316 );
7317
7318 let unicode_case_insensitive_query = SearchQuery::text(
7319 "привет",
7320 false,
7321 false,
7322 false,
7323 Default::default(),
7324 Default::default(),
7325 false,
7326 None,
7327 );
7328 assert_matches!(
7329 unicode_case_insensitive_query,
7330 Ok(SearchQuery::Regex { .. })
7331 );
7332 assert_eq!(
7333 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7334 .await
7335 .unwrap(),
7336 HashMap::from_iter([
7337 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7338 (path!("dir/two.rs").to_string(), vec![3..15]),
7339 (path!("dir/three.rs").to_string(), vec![3..15]),
7340 ])
7341 );
7342
7343 assert_eq!(
7344 search(
7345 &project,
7346 SearchQuery::text(
7347 "привет.",
7348 false,
7349 false,
7350 false,
7351 Default::default(),
7352 Default::default(),
7353 false,
7354 None,
7355 )
7356 .unwrap(),
7357 cx
7358 )
7359 .await
7360 .unwrap(),
7361 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7362 );
7363}
7364
7365#[gpui::test]
7366async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7367 init_test(cx);
7368
7369 let fs = FakeFs::new(cx.executor());
7370 fs.insert_tree(
7371 "/one/two",
7372 json!({
7373 "three": {
7374 "a.txt": "",
7375 "four": {}
7376 },
7377 "c.rs": ""
7378 }),
7379 )
7380 .await;
7381
7382 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7383 project
7384 .update(cx, |project, cx| {
7385 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7386 project.create_entry((id, rel_path("b..")), true, cx)
7387 })
7388 .await
7389 .unwrap()
7390 .into_included()
7391 .unwrap();
7392
7393 assert_eq!(
7394 fs.paths(true),
7395 vec![
7396 PathBuf::from(path!("/")),
7397 PathBuf::from(path!("/one")),
7398 PathBuf::from(path!("/one/two")),
7399 PathBuf::from(path!("/one/two/c.rs")),
7400 PathBuf::from(path!("/one/two/three")),
7401 PathBuf::from(path!("/one/two/three/a.txt")),
7402 PathBuf::from(path!("/one/two/three/b..")),
7403 PathBuf::from(path!("/one/two/three/four")),
7404 ]
7405 );
7406}
7407
7408#[gpui::test]
7409async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7410 init_test(cx);
7411
7412 let fs = FakeFs::new(cx.executor());
7413 fs.insert_tree(
7414 path!("/dir"),
7415 json!({
7416 "a.tsx": "a",
7417 }),
7418 )
7419 .await;
7420
7421 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7422
7423 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7424 language_registry.add(tsx_lang());
7425 let language_server_names = [
7426 "TypeScriptServer",
7427 "TailwindServer",
7428 "ESLintServer",
7429 "NoHoverCapabilitiesServer",
7430 ];
7431 let mut language_servers = [
7432 language_registry.register_fake_lsp(
7433 "tsx",
7434 FakeLspAdapter {
7435 name: language_server_names[0],
7436 capabilities: lsp::ServerCapabilities {
7437 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7438 ..lsp::ServerCapabilities::default()
7439 },
7440 ..FakeLspAdapter::default()
7441 },
7442 ),
7443 language_registry.register_fake_lsp(
7444 "tsx",
7445 FakeLspAdapter {
7446 name: language_server_names[1],
7447 capabilities: lsp::ServerCapabilities {
7448 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7449 ..lsp::ServerCapabilities::default()
7450 },
7451 ..FakeLspAdapter::default()
7452 },
7453 ),
7454 language_registry.register_fake_lsp(
7455 "tsx",
7456 FakeLspAdapter {
7457 name: language_server_names[2],
7458 capabilities: lsp::ServerCapabilities {
7459 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7460 ..lsp::ServerCapabilities::default()
7461 },
7462 ..FakeLspAdapter::default()
7463 },
7464 ),
7465 language_registry.register_fake_lsp(
7466 "tsx",
7467 FakeLspAdapter {
7468 name: language_server_names[3],
7469 capabilities: lsp::ServerCapabilities {
7470 hover_provider: None,
7471 ..lsp::ServerCapabilities::default()
7472 },
7473 ..FakeLspAdapter::default()
7474 },
7475 ),
7476 ];
7477
7478 let (buffer, _handle) = project
7479 .update(cx, |p, cx| {
7480 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7481 })
7482 .await
7483 .unwrap();
7484 cx.executor().run_until_parked();
7485
7486 let mut servers_with_hover_requests = HashMap::default();
7487 for i in 0..language_server_names.len() {
7488 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7489 panic!(
7490 "Failed to get language server #{i} with name {}",
7491 &language_server_names[i]
7492 )
7493 });
7494 let new_server_name = new_server.server.name();
7495 assert!(
7496 !servers_with_hover_requests.contains_key(&new_server_name),
7497 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7498 );
7499 match new_server_name.as_ref() {
7500 "TailwindServer" | "TypeScriptServer" => {
7501 servers_with_hover_requests.insert(
7502 new_server_name.clone(),
7503 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7504 move |_, _| {
7505 let name = new_server_name.clone();
7506 async move {
7507 Ok(Some(lsp::Hover {
7508 contents: lsp::HoverContents::Scalar(
7509 lsp::MarkedString::String(format!("{name} hover")),
7510 ),
7511 range: None,
7512 }))
7513 }
7514 },
7515 ),
7516 );
7517 }
7518 "ESLintServer" => {
7519 servers_with_hover_requests.insert(
7520 new_server_name,
7521 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7522 |_, _| async move { Ok(None) },
7523 ),
7524 );
7525 }
7526 "NoHoverCapabilitiesServer" => {
7527 let _never_handled = new_server
7528 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7529 panic!(
7530 "Should not call for hovers server with no corresponding capabilities"
7531 )
7532 });
7533 }
7534 unexpected => panic!("Unexpected server name: {unexpected}"),
7535 }
7536 }
7537
7538 let hover_task = project.update(cx, |project, cx| {
7539 project.hover(&buffer, Point::new(0, 0), cx)
7540 });
7541 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7542 |mut hover_request| async move {
7543 hover_request
7544 .next()
7545 .await
7546 .expect("All hover requests should have been triggered")
7547 },
7548 ))
7549 .await;
7550 assert_eq!(
7551 vec!["TailwindServer hover", "TypeScriptServer hover"],
7552 hover_task
7553 .await
7554 .into_iter()
7555 .flatten()
7556 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7557 .sorted()
7558 .collect::<Vec<_>>(),
7559 "Should receive hover responses from all related servers with hover capabilities"
7560 );
7561}
7562
7563#[gpui::test]
7564async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7565 init_test(cx);
7566
7567 let fs = FakeFs::new(cx.executor());
7568 fs.insert_tree(
7569 path!("/dir"),
7570 json!({
7571 "a.ts": "a",
7572 }),
7573 )
7574 .await;
7575
7576 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7577
7578 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7579 language_registry.add(typescript_lang());
7580 let mut fake_language_servers = language_registry.register_fake_lsp(
7581 "TypeScript",
7582 FakeLspAdapter {
7583 capabilities: lsp::ServerCapabilities {
7584 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7585 ..lsp::ServerCapabilities::default()
7586 },
7587 ..FakeLspAdapter::default()
7588 },
7589 );
7590
7591 let (buffer, _handle) = project
7592 .update(cx, |p, cx| {
7593 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7594 })
7595 .await
7596 .unwrap();
7597 cx.executor().run_until_parked();
7598
7599 let fake_server = fake_language_servers
7600 .next()
7601 .await
7602 .expect("failed to get the language server");
7603
7604 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7605 move |_, _| async move {
7606 Ok(Some(lsp::Hover {
7607 contents: lsp::HoverContents::Array(vec![
7608 lsp::MarkedString::String("".to_string()),
7609 lsp::MarkedString::String(" ".to_string()),
7610 lsp::MarkedString::String("\n\n\n".to_string()),
7611 ]),
7612 range: None,
7613 }))
7614 },
7615 );
7616
7617 let hover_task = project.update(cx, |project, cx| {
7618 project.hover(&buffer, Point::new(0, 0), cx)
7619 });
7620 let () = request_handled
7621 .next()
7622 .await
7623 .expect("All hover requests should have been triggered");
7624 assert_eq!(
7625 Vec::<String>::new(),
7626 hover_task
7627 .await
7628 .into_iter()
7629 .flatten()
7630 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7631 .sorted()
7632 .collect::<Vec<_>>(),
7633 "Empty hover parts should be ignored"
7634 );
7635}
7636
7637#[gpui::test]
7638async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7639 init_test(cx);
7640
7641 let fs = FakeFs::new(cx.executor());
7642 fs.insert_tree(
7643 path!("/dir"),
7644 json!({
7645 "a.ts": "a",
7646 }),
7647 )
7648 .await;
7649
7650 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7651
7652 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7653 language_registry.add(typescript_lang());
7654 let mut fake_language_servers = language_registry.register_fake_lsp(
7655 "TypeScript",
7656 FakeLspAdapter {
7657 capabilities: lsp::ServerCapabilities {
7658 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7659 ..lsp::ServerCapabilities::default()
7660 },
7661 ..FakeLspAdapter::default()
7662 },
7663 );
7664
7665 let (buffer, _handle) = project
7666 .update(cx, |p, cx| {
7667 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7668 })
7669 .await
7670 .unwrap();
7671 cx.executor().run_until_parked();
7672
7673 let fake_server = fake_language_servers
7674 .next()
7675 .await
7676 .expect("failed to get the language server");
7677
7678 let mut request_handled = fake_server
7679 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7680 Ok(Some(vec![
7681 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7682 title: "organize imports".to_string(),
7683 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7684 ..lsp::CodeAction::default()
7685 }),
7686 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7687 title: "fix code".to_string(),
7688 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7689 ..lsp::CodeAction::default()
7690 }),
7691 ]))
7692 });
7693
7694 let code_actions_task = project.update(cx, |project, cx| {
7695 project.code_actions(
7696 &buffer,
7697 0..buffer.read(cx).len(),
7698 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7699 cx,
7700 )
7701 });
7702
7703 let () = request_handled
7704 .next()
7705 .await
7706 .expect("The code action request should have been triggered");
7707
7708 let code_actions = code_actions_task.await.unwrap().unwrap();
7709 assert_eq!(code_actions.len(), 1);
7710 assert_eq!(
7711 code_actions[0].lsp_action.action_kind(),
7712 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7713 );
7714}
7715
7716#[gpui::test]
7717async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7718 init_test(cx);
7719
7720 let fs = FakeFs::new(cx.executor());
7721 fs.insert_tree(
7722 path!("/dir"),
7723 json!({
7724 "a.tsx": "a",
7725 }),
7726 )
7727 .await;
7728
7729 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7730
7731 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7732 language_registry.add(tsx_lang());
7733 let language_server_names = [
7734 "TypeScriptServer",
7735 "TailwindServer",
7736 "ESLintServer",
7737 "NoActionsCapabilitiesServer",
7738 ];
7739
7740 let mut language_server_rxs = [
7741 language_registry.register_fake_lsp(
7742 "tsx",
7743 FakeLspAdapter {
7744 name: language_server_names[0],
7745 capabilities: lsp::ServerCapabilities {
7746 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7747 ..lsp::ServerCapabilities::default()
7748 },
7749 ..FakeLspAdapter::default()
7750 },
7751 ),
7752 language_registry.register_fake_lsp(
7753 "tsx",
7754 FakeLspAdapter {
7755 name: language_server_names[1],
7756 capabilities: lsp::ServerCapabilities {
7757 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7758 ..lsp::ServerCapabilities::default()
7759 },
7760 ..FakeLspAdapter::default()
7761 },
7762 ),
7763 language_registry.register_fake_lsp(
7764 "tsx",
7765 FakeLspAdapter {
7766 name: language_server_names[2],
7767 capabilities: lsp::ServerCapabilities {
7768 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7769 ..lsp::ServerCapabilities::default()
7770 },
7771 ..FakeLspAdapter::default()
7772 },
7773 ),
7774 language_registry.register_fake_lsp(
7775 "tsx",
7776 FakeLspAdapter {
7777 name: language_server_names[3],
7778 capabilities: lsp::ServerCapabilities {
7779 code_action_provider: None,
7780 ..lsp::ServerCapabilities::default()
7781 },
7782 ..FakeLspAdapter::default()
7783 },
7784 ),
7785 ];
7786
7787 let (buffer, _handle) = project
7788 .update(cx, |p, cx| {
7789 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7790 })
7791 .await
7792 .unwrap();
7793 cx.executor().run_until_parked();
7794
7795 let mut servers_with_actions_requests = HashMap::default();
7796 for i in 0..language_server_names.len() {
7797 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7798 panic!(
7799 "Failed to get language server #{i} with name {}",
7800 &language_server_names[i]
7801 )
7802 });
7803 let new_server_name = new_server.server.name();
7804
7805 assert!(
7806 !servers_with_actions_requests.contains_key(&new_server_name),
7807 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7808 );
7809 match new_server_name.0.as_ref() {
7810 "TailwindServer" | "TypeScriptServer" => {
7811 servers_with_actions_requests.insert(
7812 new_server_name.clone(),
7813 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7814 move |_, _| {
7815 let name = new_server_name.clone();
7816 async move {
7817 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7818 lsp::CodeAction {
7819 title: format!("{name} code action"),
7820 ..lsp::CodeAction::default()
7821 },
7822 )]))
7823 }
7824 },
7825 ),
7826 );
7827 }
7828 "ESLintServer" => {
7829 servers_with_actions_requests.insert(
7830 new_server_name,
7831 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7832 |_, _| async move { Ok(None) },
7833 ),
7834 );
7835 }
7836 "NoActionsCapabilitiesServer" => {
7837 let _never_handled = new_server
7838 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7839 panic!(
7840 "Should not call for code actions server with no corresponding capabilities"
7841 )
7842 });
7843 }
7844 unexpected => panic!("Unexpected server name: {unexpected}"),
7845 }
7846 }
7847
7848 let code_actions_task = project.update(cx, |project, cx| {
7849 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7850 });
7851
7852 // cx.run_until_parked();
7853 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7854 |mut code_actions_request| async move {
7855 code_actions_request
7856 .next()
7857 .await
7858 .expect("All code actions requests should have been triggered")
7859 },
7860 ))
7861 .await;
7862 assert_eq!(
7863 vec!["TailwindServer code action", "TypeScriptServer code action"],
7864 code_actions_task
7865 .await
7866 .unwrap()
7867 .unwrap()
7868 .into_iter()
7869 .map(|code_action| code_action.lsp_action.title().to_owned())
7870 .sorted()
7871 .collect::<Vec<_>>(),
7872 "Should receive code actions responses from all related servers with hover capabilities"
7873 );
7874}
7875
7876#[gpui::test]
7877async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7878 init_test(cx);
7879
7880 let fs = FakeFs::new(cx.executor());
7881 fs.insert_tree(
7882 "/dir",
7883 json!({
7884 "a.rs": "let a = 1;",
7885 "b.rs": "let b = 2;",
7886 "c.rs": "let c = 2;",
7887 }),
7888 )
7889 .await;
7890
7891 let project = Project::test(
7892 fs,
7893 [
7894 "/dir/a.rs".as_ref(),
7895 "/dir/b.rs".as_ref(),
7896 "/dir/c.rs".as_ref(),
7897 ],
7898 cx,
7899 )
7900 .await;
7901
7902 // check the initial state and get the worktrees
7903 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7904 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7905 assert_eq!(worktrees.len(), 3);
7906
7907 let worktree_a = worktrees[0].read(cx);
7908 let worktree_b = worktrees[1].read(cx);
7909 let worktree_c = worktrees[2].read(cx);
7910
7911 // check they start in the right order
7912 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7913 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7914 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7915
7916 (
7917 worktrees[0].clone(),
7918 worktrees[1].clone(),
7919 worktrees[2].clone(),
7920 )
7921 });
7922
7923 // move first worktree to after the second
7924 // [a, b, c] -> [b, a, c]
7925 project
7926 .update(cx, |project, cx| {
7927 let first = worktree_a.read(cx);
7928 let second = worktree_b.read(cx);
7929 project.move_worktree(first.id(), second.id(), cx)
7930 })
7931 .expect("moving first after second");
7932
7933 // check the state after moving
7934 project.update(cx, |project, cx| {
7935 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7936 assert_eq!(worktrees.len(), 3);
7937
7938 let first = worktrees[0].read(cx);
7939 let second = worktrees[1].read(cx);
7940 let third = worktrees[2].read(cx);
7941
7942 // check they are now in the right order
7943 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7944 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7945 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7946 });
7947
7948 // move the second worktree to before the first
7949 // [b, a, c] -> [a, b, c]
7950 project
7951 .update(cx, |project, cx| {
7952 let second = worktree_a.read(cx);
7953 let first = worktree_b.read(cx);
7954 project.move_worktree(first.id(), second.id(), cx)
7955 })
7956 .expect("moving second before first");
7957
7958 // check the state after moving
7959 project.update(cx, |project, cx| {
7960 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7961 assert_eq!(worktrees.len(), 3);
7962
7963 let first = worktrees[0].read(cx);
7964 let second = worktrees[1].read(cx);
7965 let third = worktrees[2].read(cx);
7966
7967 // check they are now in the right order
7968 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7969 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7970 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7971 });
7972
7973 // move the second worktree to after the third
7974 // [a, b, c] -> [a, c, b]
7975 project
7976 .update(cx, |project, cx| {
7977 let second = worktree_b.read(cx);
7978 let third = worktree_c.read(cx);
7979 project.move_worktree(second.id(), third.id(), cx)
7980 })
7981 .expect("moving second after third");
7982
7983 // check the state after moving
7984 project.update(cx, |project, cx| {
7985 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7986 assert_eq!(worktrees.len(), 3);
7987
7988 let first = worktrees[0].read(cx);
7989 let second = worktrees[1].read(cx);
7990 let third = worktrees[2].read(cx);
7991
7992 // check they are now in the right order
7993 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7994 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7995 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7996 });
7997
7998 // move the third worktree to before the second
7999 // [a, c, b] -> [a, b, c]
8000 project
8001 .update(cx, |project, cx| {
8002 let third = worktree_c.read(cx);
8003 let second = worktree_b.read(cx);
8004 project.move_worktree(third.id(), second.id(), cx)
8005 })
8006 .expect("moving third before second");
8007
8008 // check the state after moving
8009 project.update(cx, |project, cx| {
8010 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8011 assert_eq!(worktrees.len(), 3);
8012
8013 let first = worktrees[0].read(cx);
8014 let second = worktrees[1].read(cx);
8015 let third = worktrees[2].read(cx);
8016
8017 // check they are now in the right order
8018 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8019 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8020 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8021 });
8022
8023 // move the first worktree to after the third
8024 // [a, b, c] -> [b, c, a]
8025 project
8026 .update(cx, |project, cx| {
8027 let first = worktree_a.read(cx);
8028 let third = worktree_c.read(cx);
8029 project.move_worktree(first.id(), third.id(), cx)
8030 })
8031 .expect("moving first after third");
8032
8033 // check the state after moving
8034 project.update(cx, |project, cx| {
8035 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8036 assert_eq!(worktrees.len(), 3);
8037
8038 let first = worktrees[0].read(cx);
8039 let second = worktrees[1].read(cx);
8040 let third = worktrees[2].read(cx);
8041
8042 // check they are now in the right order
8043 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8044 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8045 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8046 });
8047
8048 // move the third worktree to before the first
8049 // [b, c, a] -> [a, b, c]
8050 project
8051 .update(cx, |project, cx| {
8052 let third = worktree_a.read(cx);
8053 let first = worktree_b.read(cx);
8054 project.move_worktree(third.id(), first.id(), cx)
8055 })
8056 .expect("moving third before first");
8057
8058 // check the state after moving
8059 project.update(cx, |project, cx| {
8060 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8061 assert_eq!(worktrees.len(), 3);
8062
8063 let first = worktrees[0].read(cx);
8064 let second = worktrees[1].read(cx);
8065 let third = worktrees[2].read(cx);
8066
8067 // check they are now in the right order
8068 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8069 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8070 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8071 });
8072}
8073
8074#[gpui::test]
8075async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8076 init_test(cx);
8077
8078 let staged_contents = r#"
8079 fn main() {
8080 println!("hello world");
8081 }
8082 "#
8083 .unindent();
8084 let file_contents = r#"
8085 // print goodbye
8086 fn main() {
8087 println!("goodbye world");
8088 }
8089 "#
8090 .unindent();
8091
8092 let fs = FakeFs::new(cx.background_executor.clone());
8093 fs.insert_tree(
8094 "/dir",
8095 json!({
8096 ".git": {},
8097 "src": {
8098 "main.rs": file_contents,
8099 }
8100 }),
8101 )
8102 .await;
8103
8104 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8105
8106 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8107
8108 let buffer = project
8109 .update(cx, |project, cx| {
8110 project.open_local_buffer("/dir/src/main.rs", cx)
8111 })
8112 .await
8113 .unwrap();
8114 let unstaged_diff = project
8115 .update(cx, |project, cx| {
8116 project.open_unstaged_diff(buffer.clone(), cx)
8117 })
8118 .await
8119 .unwrap();
8120
8121 cx.run_until_parked();
8122 unstaged_diff.update(cx, |unstaged_diff, cx| {
8123 let snapshot = buffer.read(cx).snapshot();
8124 assert_hunks(
8125 unstaged_diff.snapshot(cx).hunks(&snapshot),
8126 &snapshot,
8127 &unstaged_diff.base_text_string(cx).unwrap(),
8128 &[
8129 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8130 (
8131 2..3,
8132 " println!(\"hello world\");\n",
8133 " println!(\"goodbye world\");\n",
8134 DiffHunkStatus::modified_none(),
8135 ),
8136 ],
8137 );
8138 });
8139
8140 let staged_contents = r#"
8141 // print goodbye
8142 fn main() {
8143 }
8144 "#
8145 .unindent();
8146
8147 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8148
8149 cx.run_until_parked();
8150 unstaged_diff.update(cx, |unstaged_diff, cx| {
8151 let snapshot = buffer.read(cx).snapshot();
8152 assert_hunks(
8153 unstaged_diff
8154 .snapshot(cx)
8155 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8156 &snapshot,
8157 &unstaged_diff.base_text(cx).text(),
8158 &[(
8159 2..3,
8160 "",
8161 " println!(\"goodbye world\");\n",
8162 DiffHunkStatus::added_none(),
8163 )],
8164 );
8165 });
8166}
8167
8168#[gpui::test]
8169async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8170 init_test(cx);
8171
8172 let committed_contents = r#"
8173 fn main() {
8174 println!("hello world");
8175 }
8176 "#
8177 .unindent();
8178 let staged_contents = r#"
8179 fn main() {
8180 println!("goodbye world");
8181 }
8182 "#
8183 .unindent();
8184 let file_contents = r#"
8185 // print goodbye
8186 fn main() {
8187 println!("goodbye world");
8188 }
8189 "#
8190 .unindent();
8191
8192 let fs = FakeFs::new(cx.background_executor.clone());
8193 fs.insert_tree(
8194 "/dir",
8195 json!({
8196 ".git": {},
8197 "src": {
8198 "modification.rs": file_contents,
8199 }
8200 }),
8201 )
8202 .await;
8203
8204 fs.set_head_for_repo(
8205 Path::new("/dir/.git"),
8206 &[
8207 ("src/modification.rs", committed_contents),
8208 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8209 ],
8210 "deadbeef",
8211 );
8212 fs.set_index_for_repo(
8213 Path::new("/dir/.git"),
8214 &[
8215 ("src/modification.rs", staged_contents),
8216 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8217 ],
8218 );
8219
8220 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8221 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8222 let language = rust_lang();
8223 language_registry.add(language.clone());
8224
8225 let buffer_1 = project
8226 .update(cx, |project, cx| {
8227 project.open_local_buffer("/dir/src/modification.rs", cx)
8228 })
8229 .await
8230 .unwrap();
8231 let diff_1 = project
8232 .update(cx, |project, cx| {
8233 project.open_uncommitted_diff(buffer_1.clone(), cx)
8234 })
8235 .await
8236 .unwrap();
8237 diff_1.read_with(cx, |diff, cx| {
8238 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8239 });
8240 cx.run_until_parked();
8241 diff_1.update(cx, |diff, cx| {
8242 let snapshot = buffer_1.read(cx).snapshot();
8243 assert_hunks(
8244 diff.snapshot(cx)
8245 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8246 &snapshot,
8247 &diff.base_text_string(cx).unwrap(),
8248 &[
8249 (
8250 0..1,
8251 "",
8252 "// print goodbye\n",
8253 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8254 ),
8255 (
8256 2..3,
8257 " println!(\"hello world\");\n",
8258 " println!(\"goodbye world\");\n",
8259 DiffHunkStatus::modified_none(),
8260 ),
8261 ],
8262 );
8263 });
8264
8265 // Reset HEAD to a version that differs from both the buffer and the index.
8266 let committed_contents = r#"
8267 // print goodbye
8268 fn main() {
8269 }
8270 "#
8271 .unindent();
8272 fs.set_head_for_repo(
8273 Path::new("/dir/.git"),
8274 &[
8275 ("src/modification.rs", committed_contents.clone()),
8276 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8277 ],
8278 "deadbeef",
8279 );
8280
8281 // Buffer now has an unstaged hunk.
8282 cx.run_until_parked();
8283 diff_1.update(cx, |diff, cx| {
8284 let snapshot = buffer_1.read(cx).snapshot();
8285 assert_hunks(
8286 diff.snapshot(cx)
8287 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8288 &snapshot,
8289 &diff.base_text(cx).text(),
8290 &[(
8291 2..3,
8292 "",
8293 " println!(\"goodbye world\");\n",
8294 DiffHunkStatus::added_none(),
8295 )],
8296 );
8297 });
8298
8299 // Open a buffer for a file that's been deleted.
8300 let buffer_2 = project
8301 .update(cx, |project, cx| {
8302 project.open_local_buffer("/dir/src/deletion.rs", cx)
8303 })
8304 .await
8305 .unwrap();
8306 let diff_2 = project
8307 .update(cx, |project, cx| {
8308 project.open_uncommitted_diff(buffer_2.clone(), cx)
8309 })
8310 .await
8311 .unwrap();
8312 cx.run_until_parked();
8313 diff_2.update(cx, |diff, cx| {
8314 let snapshot = buffer_2.read(cx).snapshot();
8315 assert_hunks(
8316 diff.snapshot(cx)
8317 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8318 &snapshot,
8319 &diff.base_text_string(cx).unwrap(),
8320 &[(
8321 0..0,
8322 "// the-deleted-contents\n",
8323 "",
8324 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8325 )],
8326 );
8327 });
8328
8329 // Stage the deletion of this file
8330 fs.set_index_for_repo(
8331 Path::new("/dir/.git"),
8332 &[("src/modification.rs", committed_contents.clone())],
8333 );
8334 cx.run_until_parked();
8335 diff_2.update(cx, |diff, cx| {
8336 let snapshot = buffer_2.read(cx).snapshot();
8337 assert_hunks(
8338 diff.snapshot(cx)
8339 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8340 &snapshot,
8341 &diff.base_text_string(cx).unwrap(),
8342 &[(
8343 0..0,
8344 "// the-deleted-contents\n",
8345 "",
8346 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8347 )],
8348 );
8349 });
8350}
8351
8352#[gpui::test]
8353async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8354 use DiffHunkSecondaryStatus::*;
8355 init_test(cx);
8356
8357 let committed_contents = r#"
8358 zero
8359 one
8360 two
8361 three
8362 four
8363 five
8364 "#
8365 .unindent();
8366 let file_contents = r#"
8367 one
8368 TWO
8369 three
8370 FOUR
8371 five
8372 "#
8373 .unindent();
8374
8375 let fs = FakeFs::new(cx.background_executor.clone());
8376 fs.insert_tree(
8377 "/dir",
8378 json!({
8379 ".git": {},
8380 "file.txt": file_contents.clone()
8381 }),
8382 )
8383 .await;
8384
8385 fs.set_head_and_index_for_repo(
8386 path!("/dir/.git").as_ref(),
8387 &[("file.txt", committed_contents.clone())],
8388 );
8389
8390 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8391
8392 let buffer = project
8393 .update(cx, |project, cx| {
8394 project.open_local_buffer("/dir/file.txt", cx)
8395 })
8396 .await
8397 .unwrap();
8398 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8399 let uncommitted_diff = project
8400 .update(cx, |project, cx| {
8401 project.open_uncommitted_diff(buffer.clone(), cx)
8402 })
8403 .await
8404 .unwrap();
8405 let mut diff_events = cx.events(&uncommitted_diff);
8406
8407 // The hunks are initially unstaged.
8408 uncommitted_diff.read_with(cx, |diff, cx| {
8409 assert_hunks(
8410 diff.snapshot(cx).hunks(&snapshot),
8411 &snapshot,
8412 &diff.base_text_string(cx).unwrap(),
8413 &[
8414 (
8415 0..0,
8416 "zero\n",
8417 "",
8418 DiffHunkStatus::deleted(HasSecondaryHunk),
8419 ),
8420 (
8421 1..2,
8422 "two\n",
8423 "TWO\n",
8424 DiffHunkStatus::modified(HasSecondaryHunk),
8425 ),
8426 (
8427 3..4,
8428 "four\n",
8429 "FOUR\n",
8430 DiffHunkStatus::modified(HasSecondaryHunk),
8431 ),
8432 ],
8433 );
8434 });
8435
8436 // Stage a hunk. It appears as optimistically staged.
8437 uncommitted_diff.update(cx, |diff, cx| {
8438 let range =
8439 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8440 let hunks = diff
8441 .snapshot(cx)
8442 .hunks_intersecting_range(range, &snapshot)
8443 .collect::<Vec<_>>();
8444 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8445
8446 assert_hunks(
8447 diff.snapshot(cx).hunks(&snapshot),
8448 &snapshot,
8449 &diff.base_text_string(cx).unwrap(),
8450 &[
8451 (
8452 0..0,
8453 "zero\n",
8454 "",
8455 DiffHunkStatus::deleted(HasSecondaryHunk),
8456 ),
8457 (
8458 1..2,
8459 "two\n",
8460 "TWO\n",
8461 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8462 ),
8463 (
8464 3..4,
8465 "four\n",
8466 "FOUR\n",
8467 DiffHunkStatus::modified(HasSecondaryHunk),
8468 ),
8469 ],
8470 );
8471 });
8472
8473 // The diff emits a change event for the range of the staged hunk.
8474 assert!(matches!(
8475 diff_events.next().await.unwrap(),
8476 BufferDiffEvent::HunksStagedOrUnstaged(_)
8477 ));
8478 let event = diff_events.next().await.unwrap();
8479 if let BufferDiffEvent::DiffChanged(DiffChanged {
8480 changed_range: Some(changed_range),
8481 base_text_changed_range: _,
8482 extended_range: _,
8483 }) = event
8484 {
8485 let changed_range = changed_range.to_point(&snapshot);
8486 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8487 } else {
8488 panic!("Unexpected event {event:?}");
8489 }
8490
8491 // When the write to the index completes, it appears as staged.
8492 cx.run_until_parked();
8493 uncommitted_diff.update(cx, |diff, cx| {
8494 assert_hunks(
8495 diff.snapshot(cx).hunks(&snapshot),
8496 &snapshot,
8497 &diff.base_text_string(cx).unwrap(),
8498 &[
8499 (
8500 0..0,
8501 "zero\n",
8502 "",
8503 DiffHunkStatus::deleted(HasSecondaryHunk),
8504 ),
8505 (
8506 1..2,
8507 "two\n",
8508 "TWO\n",
8509 DiffHunkStatus::modified(NoSecondaryHunk),
8510 ),
8511 (
8512 3..4,
8513 "four\n",
8514 "FOUR\n",
8515 DiffHunkStatus::modified(HasSecondaryHunk),
8516 ),
8517 ],
8518 );
8519 });
8520
8521 // The diff emits a change event for the changed index text.
8522 let event = diff_events.next().await.unwrap();
8523 if let BufferDiffEvent::DiffChanged(DiffChanged {
8524 changed_range: Some(changed_range),
8525 base_text_changed_range: _,
8526 extended_range: _,
8527 }) = event
8528 {
8529 let changed_range = changed_range.to_point(&snapshot);
8530 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8531 } else {
8532 panic!("Unexpected event {event:?}");
8533 }
8534
8535 // Simulate a problem writing to the git index.
8536 fs.set_error_message_for_index_write(
8537 "/dir/.git".as_ref(),
8538 Some("failed to write git index".into()),
8539 );
8540
8541 // Stage another hunk.
8542 uncommitted_diff.update(cx, |diff, cx| {
8543 let range =
8544 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8545 let hunks = diff
8546 .snapshot(cx)
8547 .hunks_intersecting_range(range, &snapshot)
8548 .collect::<Vec<_>>();
8549 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8550
8551 assert_hunks(
8552 diff.snapshot(cx).hunks(&snapshot),
8553 &snapshot,
8554 &diff.base_text_string(cx).unwrap(),
8555 &[
8556 (
8557 0..0,
8558 "zero\n",
8559 "",
8560 DiffHunkStatus::deleted(HasSecondaryHunk),
8561 ),
8562 (
8563 1..2,
8564 "two\n",
8565 "TWO\n",
8566 DiffHunkStatus::modified(NoSecondaryHunk),
8567 ),
8568 (
8569 3..4,
8570 "four\n",
8571 "FOUR\n",
8572 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8573 ),
8574 ],
8575 );
8576 });
8577 assert!(matches!(
8578 diff_events.next().await.unwrap(),
8579 BufferDiffEvent::HunksStagedOrUnstaged(_)
8580 ));
8581 let event = diff_events.next().await.unwrap();
8582 if let BufferDiffEvent::DiffChanged(DiffChanged {
8583 changed_range: Some(changed_range),
8584 base_text_changed_range: _,
8585 extended_range: _,
8586 }) = event
8587 {
8588 let changed_range = changed_range.to_point(&snapshot);
8589 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8590 } else {
8591 panic!("Unexpected event {event:?}");
8592 }
8593
8594 // When the write fails, the hunk returns to being unstaged.
8595 cx.run_until_parked();
8596 uncommitted_diff.update(cx, |diff, cx| {
8597 assert_hunks(
8598 diff.snapshot(cx).hunks(&snapshot),
8599 &snapshot,
8600 &diff.base_text_string(cx).unwrap(),
8601 &[
8602 (
8603 0..0,
8604 "zero\n",
8605 "",
8606 DiffHunkStatus::deleted(HasSecondaryHunk),
8607 ),
8608 (
8609 1..2,
8610 "two\n",
8611 "TWO\n",
8612 DiffHunkStatus::modified(NoSecondaryHunk),
8613 ),
8614 (
8615 3..4,
8616 "four\n",
8617 "FOUR\n",
8618 DiffHunkStatus::modified(HasSecondaryHunk),
8619 ),
8620 ],
8621 );
8622 });
8623
8624 let event = diff_events.next().await.unwrap();
8625 if let BufferDiffEvent::DiffChanged(DiffChanged {
8626 changed_range: Some(changed_range),
8627 base_text_changed_range: _,
8628 extended_range: _,
8629 }) = event
8630 {
8631 let changed_range = changed_range.to_point(&snapshot);
8632 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8633 } else {
8634 panic!("Unexpected event {event:?}");
8635 }
8636
8637 // Allow writing to the git index to succeed again.
8638 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8639
8640 // Stage two hunks with separate operations.
8641 uncommitted_diff.update(cx, |diff, cx| {
8642 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8643 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8644 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8645 });
8646
8647 // Both staged hunks appear as pending.
8648 uncommitted_diff.update(cx, |diff, cx| {
8649 assert_hunks(
8650 diff.snapshot(cx).hunks(&snapshot),
8651 &snapshot,
8652 &diff.base_text_string(cx).unwrap(),
8653 &[
8654 (
8655 0..0,
8656 "zero\n",
8657 "",
8658 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8659 ),
8660 (
8661 1..2,
8662 "two\n",
8663 "TWO\n",
8664 DiffHunkStatus::modified(NoSecondaryHunk),
8665 ),
8666 (
8667 3..4,
8668 "four\n",
8669 "FOUR\n",
8670 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8671 ),
8672 ],
8673 );
8674 });
8675
8676 // Both staging operations take effect.
8677 cx.run_until_parked();
8678 uncommitted_diff.update(cx, |diff, cx| {
8679 assert_hunks(
8680 diff.snapshot(cx).hunks(&snapshot),
8681 &snapshot,
8682 &diff.base_text_string(cx).unwrap(),
8683 &[
8684 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8685 (
8686 1..2,
8687 "two\n",
8688 "TWO\n",
8689 DiffHunkStatus::modified(NoSecondaryHunk),
8690 ),
8691 (
8692 3..4,
8693 "four\n",
8694 "FOUR\n",
8695 DiffHunkStatus::modified(NoSecondaryHunk),
8696 ),
8697 ],
8698 );
8699 });
8700}
8701
8702#[gpui::test(seeds(340, 472))]
8703async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8704 use DiffHunkSecondaryStatus::*;
8705 init_test(cx);
8706
8707 let committed_contents = r#"
8708 zero
8709 one
8710 two
8711 three
8712 four
8713 five
8714 "#
8715 .unindent();
8716 let file_contents = r#"
8717 one
8718 TWO
8719 three
8720 FOUR
8721 five
8722 "#
8723 .unindent();
8724
8725 let fs = FakeFs::new(cx.background_executor.clone());
8726 fs.insert_tree(
8727 "/dir",
8728 json!({
8729 ".git": {},
8730 "file.txt": file_contents.clone()
8731 }),
8732 )
8733 .await;
8734
8735 fs.set_head_for_repo(
8736 "/dir/.git".as_ref(),
8737 &[("file.txt", committed_contents.clone())],
8738 "deadbeef",
8739 );
8740 fs.set_index_for_repo(
8741 "/dir/.git".as_ref(),
8742 &[("file.txt", committed_contents.clone())],
8743 );
8744
8745 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8746
8747 let buffer = project
8748 .update(cx, |project, cx| {
8749 project.open_local_buffer("/dir/file.txt", cx)
8750 })
8751 .await
8752 .unwrap();
8753 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8754 let uncommitted_diff = project
8755 .update(cx, |project, cx| {
8756 project.open_uncommitted_diff(buffer.clone(), cx)
8757 })
8758 .await
8759 .unwrap();
8760
8761 // The hunks are initially unstaged.
8762 uncommitted_diff.read_with(cx, |diff, cx| {
8763 assert_hunks(
8764 diff.snapshot(cx).hunks(&snapshot),
8765 &snapshot,
8766 &diff.base_text_string(cx).unwrap(),
8767 &[
8768 (
8769 0..0,
8770 "zero\n",
8771 "",
8772 DiffHunkStatus::deleted(HasSecondaryHunk),
8773 ),
8774 (
8775 1..2,
8776 "two\n",
8777 "TWO\n",
8778 DiffHunkStatus::modified(HasSecondaryHunk),
8779 ),
8780 (
8781 3..4,
8782 "four\n",
8783 "FOUR\n",
8784 DiffHunkStatus::modified(HasSecondaryHunk),
8785 ),
8786 ],
8787 );
8788 });
8789
8790 // Pause IO events
8791 fs.pause_events();
8792
8793 // Stage the first hunk.
8794 uncommitted_diff.update(cx, |diff, cx| {
8795 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8796 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8797 assert_hunks(
8798 diff.snapshot(cx).hunks(&snapshot),
8799 &snapshot,
8800 &diff.base_text_string(cx).unwrap(),
8801 &[
8802 (
8803 0..0,
8804 "zero\n",
8805 "",
8806 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8807 ),
8808 (
8809 1..2,
8810 "two\n",
8811 "TWO\n",
8812 DiffHunkStatus::modified(HasSecondaryHunk),
8813 ),
8814 (
8815 3..4,
8816 "four\n",
8817 "FOUR\n",
8818 DiffHunkStatus::modified(HasSecondaryHunk),
8819 ),
8820 ],
8821 );
8822 });
8823
8824 // Stage the second hunk *before* receiving the FS event for the first hunk.
8825 cx.run_until_parked();
8826 uncommitted_diff.update(cx, |diff, cx| {
8827 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8828 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8829 assert_hunks(
8830 diff.snapshot(cx).hunks(&snapshot),
8831 &snapshot,
8832 &diff.base_text_string(cx).unwrap(),
8833 &[
8834 (
8835 0..0,
8836 "zero\n",
8837 "",
8838 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8839 ),
8840 (
8841 1..2,
8842 "two\n",
8843 "TWO\n",
8844 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8845 ),
8846 (
8847 3..4,
8848 "four\n",
8849 "FOUR\n",
8850 DiffHunkStatus::modified(HasSecondaryHunk),
8851 ),
8852 ],
8853 );
8854 });
8855
8856 // Process the FS event for staging the first hunk (second event is still pending).
8857 fs.flush_events(1);
8858 cx.run_until_parked();
8859
8860 // Stage the third hunk before receiving the second FS event.
8861 uncommitted_diff.update(cx, |diff, cx| {
8862 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8863 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8864 });
8865
8866 // Wait for all remaining IO.
8867 cx.run_until_parked();
8868 fs.flush_events(fs.buffered_event_count());
8869
8870 // Now all hunks are staged.
8871 cx.run_until_parked();
8872 uncommitted_diff.update(cx, |diff, cx| {
8873 assert_hunks(
8874 diff.snapshot(cx).hunks(&snapshot),
8875 &snapshot,
8876 &diff.base_text_string(cx).unwrap(),
8877 &[
8878 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8879 (
8880 1..2,
8881 "two\n",
8882 "TWO\n",
8883 DiffHunkStatus::modified(NoSecondaryHunk),
8884 ),
8885 (
8886 3..4,
8887 "four\n",
8888 "FOUR\n",
8889 DiffHunkStatus::modified(NoSecondaryHunk),
8890 ),
8891 ],
8892 );
8893 });
8894}
8895
8896#[gpui::test(iterations = 25)]
8897async fn test_staging_random_hunks(
8898 mut rng: StdRng,
8899 _executor: BackgroundExecutor,
8900 cx: &mut gpui::TestAppContext,
8901) {
8902 let operations = env::var("OPERATIONS")
8903 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8904 .unwrap_or(20);
8905
8906 use DiffHunkSecondaryStatus::*;
8907 init_test(cx);
8908
8909 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8910 let index_text = committed_text.clone();
8911 let buffer_text = (0..30)
8912 .map(|i| match i % 5 {
8913 0 => format!("line {i} (modified)\n"),
8914 _ => format!("line {i}\n"),
8915 })
8916 .collect::<String>();
8917
8918 let fs = FakeFs::new(cx.background_executor.clone());
8919 fs.insert_tree(
8920 path!("/dir"),
8921 json!({
8922 ".git": {},
8923 "file.txt": buffer_text.clone()
8924 }),
8925 )
8926 .await;
8927 fs.set_head_for_repo(
8928 path!("/dir/.git").as_ref(),
8929 &[("file.txt", committed_text.clone())],
8930 "deadbeef",
8931 );
8932 fs.set_index_for_repo(
8933 path!("/dir/.git").as_ref(),
8934 &[("file.txt", index_text.clone())],
8935 );
8936 let repo = fs
8937 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8938 .unwrap();
8939
8940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8941 let buffer = project
8942 .update(cx, |project, cx| {
8943 project.open_local_buffer(path!("/dir/file.txt"), cx)
8944 })
8945 .await
8946 .unwrap();
8947 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8948 let uncommitted_diff = project
8949 .update(cx, |project, cx| {
8950 project.open_uncommitted_diff(buffer.clone(), cx)
8951 })
8952 .await
8953 .unwrap();
8954
8955 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8956 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8957 });
8958 assert_eq!(hunks.len(), 6);
8959
8960 for _i in 0..operations {
8961 let hunk_ix = rng.random_range(0..hunks.len());
8962 let hunk = &mut hunks[hunk_ix];
8963 let row = hunk.range.start.row;
8964
8965 if hunk.status().has_secondary_hunk() {
8966 log::info!("staging hunk at {row}");
8967 uncommitted_diff.update(cx, |diff, cx| {
8968 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8969 });
8970 hunk.secondary_status = SecondaryHunkRemovalPending;
8971 } else {
8972 log::info!("unstaging hunk at {row}");
8973 uncommitted_diff.update(cx, |diff, cx| {
8974 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8975 });
8976 hunk.secondary_status = SecondaryHunkAdditionPending;
8977 }
8978
8979 for _ in 0..rng.random_range(0..10) {
8980 log::info!("yielding");
8981 cx.executor().simulate_random_delay().await;
8982 }
8983 }
8984
8985 cx.executor().run_until_parked();
8986
8987 for hunk in &mut hunks {
8988 if hunk.secondary_status == SecondaryHunkRemovalPending {
8989 hunk.secondary_status = NoSecondaryHunk;
8990 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8991 hunk.secondary_status = HasSecondaryHunk;
8992 }
8993 }
8994
8995 log::info!(
8996 "index text:\n{}",
8997 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8998 .await
8999 .unwrap()
9000 );
9001
9002 uncommitted_diff.update(cx, |diff, cx| {
9003 let expected_hunks = hunks
9004 .iter()
9005 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9006 .collect::<Vec<_>>();
9007 let actual_hunks = diff
9008 .snapshot(cx)
9009 .hunks(&snapshot)
9010 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9011 .collect::<Vec<_>>();
9012 assert_eq!(actual_hunks, expected_hunks);
9013 });
9014}
9015
9016#[gpui::test]
9017async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9018 init_test(cx);
9019
9020 let committed_contents = r#"
9021 fn main() {
9022 println!("hello from HEAD");
9023 }
9024 "#
9025 .unindent();
9026 let file_contents = r#"
9027 fn main() {
9028 println!("hello from the working copy");
9029 }
9030 "#
9031 .unindent();
9032
9033 let fs = FakeFs::new(cx.background_executor.clone());
9034 fs.insert_tree(
9035 "/dir",
9036 json!({
9037 ".git": {},
9038 "src": {
9039 "main.rs": file_contents,
9040 }
9041 }),
9042 )
9043 .await;
9044
9045 fs.set_head_for_repo(
9046 Path::new("/dir/.git"),
9047 &[("src/main.rs", committed_contents.clone())],
9048 "deadbeef",
9049 );
9050 fs.set_index_for_repo(
9051 Path::new("/dir/.git"),
9052 &[("src/main.rs", committed_contents.clone())],
9053 );
9054
9055 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9056
9057 let buffer = project
9058 .update(cx, |project, cx| {
9059 project.open_local_buffer("/dir/src/main.rs", cx)
9060 })
9061 .await
9062 .unwrap();
9063 let uncommitted_diff = project
9064 .update(cx, |project, cx| {
9065 project.open_uncommitted_diff(buffer.clone(), cx)
9066 })
9067 .await
9068 .unwrap();
9069
9070 cx.run_until_parked();
9071 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9072 let snapshot = buffer.read(cx).snapshot();
9073 assert_hunks(
9074 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9075 &snapshot,
9076 &uncommitted_diff.base_text_string(cx).unwrap(),
9077 &[(
9078 1..2,
9079 " println!(\"hello from HEAD\");\n",
9080 " println!(\"hello from the working copy\");\n",
9081 DiffHunkStatus {
9082 kind: DiffHunkStatusKind::Modified,
9083 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9084 },
9085 )],
9086 );
9087 });
9088}
9089
9090// TODO: Should we test this on Windows also?
9091#[gpui::test]
9092#[cfg(not(windows))]
9093async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9094 use std::os::unix::fs::PermissionsExt;
9095 init_test(cx);
9096 cx.executor().allow_parking();
9097 let committed_contents = "bar\n";
9098 let file_contents = "baz\n";
9099 let root = TempTree::new(json!({
9100 "project": {
9101 "foo": committed_contents
9102 },
9103 }));
9104
9105 let work_dir = root.path().join("project");
9106 let file_path = work_dir.join("foo");
9107 let repo = git_init(work_dir.as_path());
9108 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9109 perms.set_mode(0o755);
9110 std::fs::set_permissions(&file_path, perms).unwrap();
9111 git_add("foo", &repo);
9112 git_commit("Initial commit", &repo);
9113 std::fs::write(&file_path, file_contents).unwrap();
9114
9115 let project = Project::test(
9116 Arc::new(RealFs::new(None, cx.executor())),
9117 [root.path()],
9118 cx,
9119 )
9120 .await;
9121
9122 let buffer = project
9123 .update(cx, |project, cx| {
9124 project.open_local_buffer(file_path.as_path(), cx)
9125 })
9126 .await
9127 .unwrap();
9128
9129 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9130
9131 let uncommitted_diff = project
9132 .update(cx, |project, cx| {
9133 project.open_uncommitted_diff(buffer.clone(), cx)
9134 })
9135 .await
9136 .unwrap();
9137
9138 uncommitted_diff.update(cx, |diff, cx| {
9139 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9140 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9141 });
9142
9143 cx.run_until_parked();
9144
9145 let output = smol::process::Command::new("git")
9146 .current_dir(&work_dir)
9147 .args(["diff", "--staged"])
9148 .output()
9149 .await
9150 .unwrap();
9151
9152 let staged_diff = String::from_utf8_lossy(&output.stdout);
9153
9154 assert!(
9155 !staged_diff.contains("new mode 100644"),
9156 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9157 staged_diff
9158 );
9159
9160 let output = smol::process::Command::new("git")
9161 .current_dir(&work_dir)
9162 .args(["ls-files", "-s"])
9163 .output()
9164 .await
9165 .unwrap();
9166 let index_contents = String::from_utf8_lossy(&output.stdout);
9167
9168 assert!(
9169 index_contents.contains("100755"),
9170 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9171 index_contents
9172 );
9173}
9174
9175#[gpui::test]
9176async fn test_repository_and_path_for_project_path(
9177 background_executor: BackgroundExecutor,
9178 cx: &mut gpui::TestAppContext,
9179) {
9180 init_test(cx);
9181 let fs = FakeFs::new(background_executor);
9182 fs.insert_tree(
9183 path!("/root"),
9184 json!({
9185 "c.txt": "",
9186 "dir1": {
9187 ".git": {},
9188 "deps": {
9189 "dep1": {
9190 ".git": {},
9191 "src": {
9192 "a.txt": ""
9193 }
9194 }
9195 },
9196 "src": {
9197 "b.txt": ""
9198 }
9199 },
9200 }),
9201 )
9202 .await;
9203
9204 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9205 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9206 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9207 project
9208 .update(cx, |project, cx| project.git_scans_complete(cx))
9209 .await;
9210 cx.run_until_parked();
9211
9212 project.read_with(cx, |project, cx| {
9213 let git_store = project.git_store().read(cx);
9214 let pairs = [
9215 ("c.txt", None),
9216 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9217 (
9218 "dir1/deps/dep1/src/a.txt",
9219 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9220 ),
9221 ];
9222 let expected = pairs
9223 .iter()
9224 .map(|(path, result)| {
9225 (
9226 path,
9227 result.map(|(repo, repo_path)| {
9228 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9229 }),
9230 )
9231 })
9232 .collect::<Vec<_>>();
9233 let actual = pairs
9234 .iter()
9235 .map(|(path, _)| {
9236 let project_path = (tree_id, rel_path(path)).into();
9237 let result = maybe!({
9238 let (repo, repo_path) =
9239 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9240 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9241 });
9242 (path, result)
9243 })
9244 .collect::<Vec<_>>();
9245 pretty_assertions::assert_eq!(expected, actual);
9246 });
9247
9248 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9249 .await
9250 .unwrap();
9251 cx.run_until_parked();
9252
9253 project.read_with(cx, |project, cx| {
9254 let git_store = project.git_store().read(cx);
9255 assert_eq!(
9256 git_store.repository_and_path_for_project_path(
9257 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9258 cx
9259 ),
9260 None
9261 );
9262 });
9263}
9264
9265#[gpui::test]
9266async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9267 init_test(cx);
9268 let fs = FakeFs::new(cx.background_executor.clone());
9269 let home = paths::home_dir();
9270 fs.insert_tree(
9271 home,
9272 json!({
9273 ".git": {},
9274 "project": {
9275 "a.txt": "A"
9276 },
9277 }),
9278 )
9279 .await;
9280
9281 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9282 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9283 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9284
9285 project
9286 .update(cx, |project, cx| project.git_scans_complete(cx))
9287 .await;
9288 tree.flush_fs_events(cx).await;
9289
9290 project.read_with(cx, |project, cx| {
9291 let containing = project
9292 .git_store()
9293 .read(cx)
9294 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9295 assert!(containing.is_none());
9296 });
9297
9298 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9299 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9300 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9301 project
9302 .update(cx, |project, cx| project.git_scans_complete(cx))
9303 .await;
9304 tree.flush_fs_events(cx).await;
9305
9306 project.read_with(cx, |project, cx| {
9307 let containing = project
9308 .git_store()
9309 .read(cx)
9310 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9311 assert_eq!(
9312 containing
9313 .unwrap()
9314 .0
9315 .read(cx)
9316 .work_directory_abs_path
9317 .as_ref(),
9318 home,
9319 );
9320 });
9321}
9322
9323#[gpui::test]
9324async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9325 init_test(cx);
9326 cx.executor().allow_parking();
9327
9328 let root = TempTree::new(json!({
9329 "project": {
9330 "a.txt": "a", // Modified
9331 "b.txt": "bb", // Added
9332 "c.txt": "ccc", // Unchanged
9333 "d.txt": "dddd", // Deleted
9334 },
9335 }));
9336
9337 // Set up git repository before creating the project.
9338 let work_dir = root.path().join("project");
9339 let repo = git_init(work_dir.as_path());
9340 git_add("a.txt", &repo);
9341 git_add("c.txt", &repo);
9342 git_add("d.txt", &repo);
9343 git_commit("Initial commit", &repo);
9344 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9345 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9346
9347 let project = Project::test(
9348 Arc::new(RealFs::new(None, cx.executor())),
9349 [root.path()],
9350 cx,
9351 )
9352 .await;
9353
9354 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9355 tree.flush_fs_events(cx).await;
9356 project
9357 .update(cx, |project, cx| project.git_scans_complete(cx))
9358 .await;
9359 cx.executor().run_until_parked();
9360
9361 let repository = project.read_with(cx, |project, cx| {
9362 project.repositories(cx).values().next().unwrap().clone()
9363 });
9364
9365 // Check that the right git state is observed on startup
9366 repository.read_with(cx, |repository, _| {
9367 let entries = repository.cached_status().collect::<Vec<_>>();
9368 assert_eq!(
9369 entries,
9370 [
9371 StatusEntry {
9372 repo_path: repo_path("a.txt"),
9373 status: StatusCode::Modified.worktree(),
9374 },
9375 StatusEntry {
9376 repo_path: repo_path("b.txt"),
9377 status: FileStatus::Untracked,
9378 },
9379 StatusEntry {
9380 repo_path: repo_path("d.txt"),
9381 status: StatusCode::Deleted.worktree(),
9382 },
9383 ]
9384 );
9385 });
9386
9387 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9388
9389 tree.flush_fs_events(cx).await;
9390 project
9391 .update(cx, |project, cx| project.git_scans_complete(cx))
9392 .await;
9393 cx.executor().run_until_parked();
9394
9395 repository.read_with(cx, |repository, _| {
9396 let entries = repository.cached_status().collect::<Vec<_>>();
9397 assert_eq!(
9398 entries,
9399 [
9400 StatusEntry {
9401 repo_path: repo_path("a.txt"),
9402 status: StatusCode::Modified.worktree(),
9403 },
9404 StatusEntry {
9405 repo_path: repo_path("b.txt"),
9406 status: FileStatus::Untracked,
9407 },
9408 StatusEntry {
9409 repo_path: repo_path("c.txt"),
9410 status: StatusCode::Modified.worktree(),
9411 },
9412 StatusEntry {
9413 repo_path: repo_path("d.txt"),
9414 status: StatusCode::Deleted.worktree(),
9415 },
9416 ]
9417 );
9418 });
9419
9420 git_add("a.txt", &repo);
9421 git_add("c.txt", &repo);
9422 git_remove_index(Path::new("d.txt"), &repo);
9423 git_commit("Another commit", &repo);
9424 tree.flush_fs_events(cx).await;
9425 project
9426 .update(cx, |project, cx| project.git_scans_complete(cx))
9427 .await;
9428 cx.executor().run_until_parked();
9429
9430 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9431 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9432 tree.flush_fs_events(cx).await;
9433 project
9434 .update(cx, |project, cx| project.git_scans_complete(cx))
9435 .await;
9436 cx.executor().run_until_parked();
9437
9438 repository.read_with(cx, |repository, _cx| {
9439 let entries = repository.cached_status().collect::<Vec<_>>();
9440
9441 // Deleting an untracked entry, b.txt, should leave no status
9442 // a.txt was tracked, and so should have a status
9443 assert_eq!(
9444 entries,
9445 [StatusEntry {
9446 repo_path: repo_path("a.txt"),
9447 status: StatusCode::Deleted.worktree(),
9448 }]
9449 );
9450 });
9451}
9452
9453#[gpui::test]
9454#[ignore]
9455async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9456 init_test(cx);
9457 cx.executor().allow_parking();
9458
9459 let root = TempTree::new(json!({
9460 "project": {
9461 "sub": {},
9462 "a.txt": "",
9463 },
9464 }));
9465
9466 let work_dir = root.path().join("project");
9467 let repo = git_init(work_dir.as_path());
9468 // a.txt exists in HEAD and the working copy but is deleted in the index.
9469 git_add("a.txt", &repo);
9470 git_commit("Initial commit", &repo);
9471 git_remove_index("a.txt".as_ref(), &repo);
9472 // `sub` is a nested git repository.
9473 let _sub = git_init(&work_dir.join("sub"));
9474
9475 let project = Project::test(
9476 Arc::new(RealFs::new(None, cx.executor())),
9477 [root.path()],
9478 cx,
9479 )
9480 .await;
9481
9482 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9483 tree.flush_fs_events(cx).await;
9484 project
9485 .update(cx, |project, cx| project.git_scans_complete(cx))
9486 .await;
9487 cx.executor().run_until_parked();
9488
9489 let repository = project.read_with(cx, |project, cx| {
9490 project
9491 .repositories(cx)
9492 .values()
9493 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9494 .unwrap()
9495 .clone()
9496 });
9497
9498 repository.read_with(cx, |repository, _cx| {
9499 let entries = repository.cached_status().collect::<Vec<_>>();
9500
9501 // `sub` doesn't appear in our computed statuses.
9502 // a.txt appears with a combined `DA` status.
9503 assert_eq!(
9504 entries,
9505 [StatusEntry {
9506 repo_path: repo_path("a.txt"),
9507 status: TrackedStatus {
9508 index_status: StatusCode::Deleted,
9509 worktree_status: StatusCode::Added
9510 }
9511 .into(),
9512 }]
9513 )
9514 });
9515}
9516
9517#[track_caller]
9518/// We merge lhs into rhs.
9519fn merge_pending_ops_snapshots(
9520 source: Vec<pending_op::PendingOps>,
9521 mut target: Vec<pending_op::PendingOps>,
9522) -> Vec<pending_op::PendingOps> {
9523 for s_ops in source {
9524 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9525 if ops.repo_path == s_ops.repo_path {
9526 Some(idx)
9527 } else {
9528 None
9529 }
9530 }) {
9531 let t_ops = &mut target[idx];
9532 for s_op in s_ops.ops {
9533 if let Some(op_idx) = t_ops
9534 .ops
9535 .iter()
9536 .zip(0..)
9537 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9538 {
9539 let t_op = &mut t_ops.ops[op_idx];
9540 match (s_op.job_status, t_op.job_status) {
9541 (pending_op::JobStatus::Running, _) => {}
9542 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9543 (s_st, t_st) if s_st == t_st => {}
9544 _ => unreachable!(),
9545 }
9546 } else {
9547 t_ops.ops.push(s_op);
9548 }
9549 }
9550 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9551 } else {
9552 target.push(s_ops);
9553 }
9554 }
9555 target
9556}
9557
9558#[gpui::test]
9559async fn test_repository_pending_ops_staging(
9560 executor: gpui::BackgroundExecutor,
9561 cx: &mut gpui::TestAppContext,
9562) {
9563 init_test(cx);
9564
9565 let fs = FakeFs::new(executor);
9566 fs.insert_tree(
9567 path!("/root"),
9568 json!({
9569 "my-repo": {
9570 ".git": {},
9571 "a.txt": "a",
9572 }
9573
9574 }),
9575 )
9576 .await;
9577
9578 fs.set_status_for_repo(
9579 path!("/root/my-repo/.git").as_ref(),
9580 &[("a.txt", FileStatus::Untracked)],
9581 );
9582
9583 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9584 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9585 project.update(cx, |project, cx| {
9586 let pending_ops_all = pending_ops_all.clone();
9587 cx.subscribe(project.git_store(), move |_, _, e, _| {
9588 if let GitStoreEvent::RepositoryUpdated(
9589 _,
9590 RepositoryEvent::PendingOpsChanged { pending_ops },
9591 _,
9592 ) = e
9593 {
9594 let merged = merge_pending_ops_snapshots(
9595 pending_ops.items(()),
9596 pending_ops_all.lock().items(()),
9597 );
9598 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9599 }
9600 })
9601 .detach();
9602 });
9603 project
9604 .update(cx, |project, cx| project.git_scans_complete(cx))
9605 .await;
9606
9607 let repo = project.read_with(cx, |project, cx| {
9608 project.repositories(cx).values().next().unwrap().clone()
9609 });
9610
9611 // Ensure we have no pending ops for any of the untracked files
9612 repo.read_with(cx, |repo, _cx| {
9613 assert!(repo.pending_ops().next().is_none());
9614 });
9615
9616 let mut id = 1u16;
9617
9618 let mut assert_stage = async |path: RepoPath, stage| {
9619 let git_status = if stage {
9620 pending_op::GitStatus::Staged
9621 } else {
9622 pending_op::GitStatus::Unstaged
9623 };
9624 repo.update(cx, |repo, cx| {
9625 let task = if stage {
9626 repo.stage_entries(vec![path.clone()], cx)
9627 } else {
9628 repo.unstage_entries(vec![path.clone()], cx)
9629 };
9630 let ops = repo.pending_ops_for_path(&path).unwrap();
9631 assert_eq!(
9632 ops.ops.last(),
9633 Some(&pending_op::PendingOp {
9634 id: id.into(),
9635 git_status,
9636 job_status: pending_op::JobStatus::Running
9637 })
9638 );
9639 task
9640 })
9641 .await
9642 .unwrap();
9643
9644 repo.read_with(cx, |repo, _cx| {
9645 let ops = repo.pending_ops_for_path(&path).unwrap();
9646 assert_eq!(
9647 ops.ops.last(),
9648 Some(&pending_op::PendingOp {
9649 id: id.into(),
9650 git_status,
9651 job_status: pending_op::JobStatus::Finished
9652 })
9653 );
9654 });
9655
9656 id += 1;
9657 };
9658
9659 assert_stage(repo_path("a.txt"), true).await;
9660 assert_stage(repo_path("a.txt"), false).await;
9661 assert_stage(repo_path("a.txt"), true).await;
9662 assert_stage(repo_path("a.txt"), false).await;
9663 assert_stage(repo_path("a.txt"), true).await;
9664
9665 cx.run_until_parked();
9666
9667 assert_eq!(
9668 pending_ops_all
9669 .lock()
9670 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9671 .unwrap()
9672 .ops,
9673 vec![
9674 pending_op::PendingOp {
9675 id: 1u16.into(),
9676 git_status: pending_op::GitStatus::Staged,
9677 job_status: pending_op::JobStatus::Finished
9678 },
9679 pending_op::PendingOp {
9680 id: 2u16.into(),
9681 git_status: pending_op::GitStatus::Unstaged,
9682 job_status: pending_op::JobStatus::Finished
9683 },
9684 pending_op::PendingOp {
9685 id: 3u16.into(),
9686 git_status: pending_op::GitStatus::Staged,
9687 job_status: pending_op::JobStatus::Finished
9688 },
9689 pending_op::PendingOp {
9690 id: 4u16.into(),
9691 git_status: pending_op::GitStatus::Unstaged,
9692 job_status: pending_op::JobStatus::Finished
9693 },
9694 pending_op::PendingOp {
9695 id: 5u16.into(),
9696 git_status: pending_op::GitStatus::Staged,
9697 job_status: pending_op::JobStatus::Finished
9698 }
9699 ],
9700 );
9701
9702 repo.update(cx, |repo, _cx| {
9703 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9704
9705 assert_eq!(
9706 git_statuses,
9707 [StatusEntry {
9708 repo_path: repo_path("a.txt"),
9709 status: TrackedStatus {
9710 index_status: StatusCode::Added,
9711 worktree_status: StatusCode::Unmodified
9712 }
9713 .into(),
9714 }]
9715 );
9716 });
9717}
9718
9719#[gpui::test]
9720async fn test_repository_pending_ops_long_running_staging(
9721 executor: gpui::BackgroundExecutor,
9722 cx: &mut gpui::TestAppContext,
9723) {
9724 init_test(cx);
9725
9726 let fs = FakeFs::new(executor);
9727 fs.insert_tree(
9728 path!("/root"),
9729 json!({
9730 "my-repo": {
9731 ".git": {},
9732 "a.txt": "a",
9733 }
9734
9735 }),
9736 )
9737 .await;
9738
9739 fs.set_status_for_repo(
9740 path!("/root/my-repo/.git").as_ref(),
9741 &[("a.txt", FileStatus::Untracked)],
9742 );
9743
9744 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9745 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9746 project.update(cx, |project, cx| {
9747 let pending_ops_all = pending_ops_all.clone();
9748 cx.subscribe(project.git_store(), move |_, _, e, _| {
9749 if let GitStoreEvent::RepositoryUpdated(
9750 _,
9751 RepositoryEvent::PendingOpsChanged { pending_ops },
9752 _,
9753 ) = e
9754 {
9755 let merged = merge_pending_ops_snapshots(
9756 pending_ops.items(()),
9757 pending_ops_all.lock().items(()),
9758 );
9759 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9760 }
9761 })
9762 .detach();
9763 });
9764
9765 project
9766 .update(cx, |project, cx| project.git_scans_complete(cx))
9767 .await;
9768
9769 let repo = project.read_with(cx, |project, cx| {
9770 project.repositories(cx).values().next().unwrap().clone()
9771 });
9772
9773 repo.update(cx, |repo, cx| {
9774 repo.stage_entries(vec![repo_path("a.txt")], cx)
9775 })
9776 .detach();
9777
9778 repo.update(cx, |repo, cx| {
9779 repo.stage_entries(vec![repo_path("a.txt")], cx)
9780 })
9781 .unwrap()
9782 .with_timeout(Duration::from_secs(1), &cx.executor())
9783 .await
9784 .unwrap();
9785
9786 cx.run_until_parked();
9787
9788 assert_eq!(
9789 pending_ops_all
9790 .lock()
9791 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9792 .unwrap()
9793 .ops,
9794 vec![
9795 pending_op::PendingOp {
9796 id: 1u16.into(),
9797 git_status: pending_op::GitStatus::Staged,
9798 job_status: pending_op::JobStatus::Skipped
9799 },
9800 pending_op::PendingOp {
9801 id: 2u16.into(),
9802 git_status: pending_op::GitStatus::Staged,
9803 job_status: pending_op::JobStatus::Finished
9804 }
9805 ],
9806 );
9807
9808 repo.update(cx, |repo, _cx| {
9809 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9810
9811 assert_eq!(
9812 git_statuses,
9813 [StatusEntry {
9814 repo_path: repo_path("a.txt"),
9815 status: TrackedStatus {
9816 index_status: StatusCode::Added,
9817 worktree_status: StatusCode::Unmodified
9818 }
9819 .into(),
9820 }]
9821 );
9822 });
9823}
9824
9825#[gpui::test]
9826async fn test_repository_pending_ops_stage_all(
9827 executor: gpui::BackgroundExecutor,
9828 cx: &mut gpui::TestAppContext,
9829) {
9830 init_test(cx);
9831
9832 let fs = FakeFs::new(executor);
9833 fs.insert_tree(
9834 path!("/root"),
9835 json!({
9836 "my-repo": {
9837 ".git": {},
9838 "a.txt": "a",
9839 "b.txt": "b"
9840 }
9841
9842 }),
9843 )
9844 .await;
9845
9846 fs.set_status_for_repo(
9847 path!("/root/my-repo/.git").as_ref(),
9848 &[
9849 ("a.txt", FileStatus::Untracked),
9850 ("b.txt", FileStatus::Untracked),
9851 ],
9852 );
9853
9854 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9855 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9856 project.update(cx, |project, cx| {
9857 let pending_ops_all = pending_ops_all.clone();
9858 cx.subscribe(project.git_store(), move |_, _, e, _| {
9859 if let GitStoreEvent::RepositoryUpdated(
9860 _,
9861 RepositoryEvent::PendingOpsChanged { pending_ops },
9862 _,
9863 ) = e
9864 {
9865 let merged = merge_pending_ops_snapshots(
9866 pending_ops.items(()),
9867 pending_ops_all.lock().items(()),
9868 );
9869 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9870 }
9871 })
9872 .detach();
9873 });
9874 project
9875 .update(cx, |project, cx| project.git_scans_complete(cx))
9876 .await;
9877
9878 let repo = project.read_with(cx, |project, cx| {
9879 project.repositories(cx).values().next().unwrap().clone()
9880 });
9881
9882 repo.update(cx, |repo, cx| {
9883 repo.stage_entries(vec![repo_path("a.txt")], cx)
9884 })
9885 .await
9886 .unwrap();
9887 repo.update(cx, |repo, cx| repo.stage_all(cx))
9888 .await
9889 .unwrap();
9890 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9891 .await
9892 .unwrap();
9893
9894 cx.run_until_parked();
9895
9896 assert_eq!(
9897 pending_ops_all
9898 .lock()
9899 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9900 .unwrap()
9901 .ops,
9902 vec![
9903 pending_op::PendingOp {
9904 id: 1u16.into(),
9905 git_status: pending_op::GitStatus::Staged,
9906 job_status: pending_op::JobStatus::Finished
9907 },
9908 pending_op::PendingOp {
9909 id: 2u16.into(),
9910 git_status: pending_op::GitStatus::Unstaged,
9911 job_status: pending_op::JobStatus::Finished
9912 },
9913 ],
9914 );
9915 assert_eq!(
9916 pending_ops_all
9917 .lock()
9918 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9919 .unwrap()
9920 .ops,
9921 vec![
9922 pending_op::PendingOp {
9923 id: 1u16.into(),
9924 git_status: pending_op::GitStatus::Staged,
9925 job_status: pending_op::JobStatus::Finished
9926 },
9927 pending_op::PendingOp {
9928 id: 2u16.into(),
9929 git_status: pending_op::GitStatus::Unstaged,
9930 job_status: pending_op::JobStatus::Finished
9931 },
9932 ],
9933 );
9934
9935 repo.update(cx, |repo, _cx| {
9936 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9937
9938 assert_eq!(
9939 git_statuses,
9940 [
9941 StatusEntry {
9942 repo_path: repo_path("a.txt"),
9943 status: FileStatus::Untracked,
9944 },
9945 StatusEntry {
9946 repo_path: repo_path("b.txt"),
9947 status: FileStatus::Untracked,
9948 },
9949 ]
9950 );
9951 });
9952}
9953
9954#[gpui::test]
9955async fn test_repository_subfolder_git_status(
9956 executor: gpui::BackgroundExecutor,
9957 cx: &mut gpui::TestAppContext,
9958) {
9959 init_test(cx);
9960
9961 let fs = FakeFs::new(executor);
9962 fs.insert_tree(
9963 path!("/root"),
9964 json!({
9965 "my-repo": {
9966 ".git": {},
9967 "a.txt": "a",
9968 "sub-folder-1": {
9969 "sub-folder-2": {
9970 "c.txt": "cc",
9971 "d": {
9972 "e.txt": "eee"
9973 }
9974 },
9975 }
9976 },
9977 }),
9978 )
9979 .await;
9980
9981 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9982 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9983
9984 fs.set_status_for_repo(
9985 path!("/root/my-repo/.git").as_ref(),
9986 &[(E_TXT, FileStatus::Untracked)],
9987 );
9988
9989 let project = Project::test(
9990 fs.clone(),
9991 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9992 cx,
9993 )
9994 .await;
9995
9996 project
9997 .update(cx, |project, cx| project.git_scans_complete(cx))
9998 .await;
9999 cx.run_until_parked();
10000
10001 let repository = project.read_with(cx, |project, cx| {
10002 project.repositories(cx).values().next().unwrap().clone()
10003 });
10004
10005 // Ensure that the git status is loaded correctly
10006 repository.read_with(cx, |repository, _cx| {
10007 assert_eq!(
10008 repository.work_directory_abs_path,
10009 Path::new(path!("/root/my-repo")).into()
10010 );
10011
10012 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10013 assert_eq!(
10014 repository
10015 .status_for_path(&repo_path(E_TXT))
10016 .unwrap()
10017 .status,
10018 FileStatus::Untracked
10019 );
10020 });
10021
10022 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10023 project
10024 .update(cx, |project, cx| project.git_scans_complete(cx))
10025 .await;
10026 cx.run_until_parked();
10027
10028 repository.read_with(cx, |repository, _cx| {
10029 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10030 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10031 });
10032}
10033
10034// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10035#[cfg(any())]
10036#[gpui::test]
10037async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10038 init_test(cx);
10039 cx.executor().allow_parking();
10040
10041 let root = TempTree::new(json!({
10042 "project": {
10043 "a.txt": "a",
10044 },
10045 }));
10046 let root_path = root.path();
10047
10048 let repo = git_init(&root_path.join("project"));
10049 git_add("a.txt", &repo);
10050 git_commit("init", &repo);
10051
10052 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10053
10054 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10055 tree.flush_fs_events(cx).await;
10056 project
10057 .update(cx, |project, cx| project.git_scans_complete(cx))
10058 .await;
10059 cx.executor().run_until_parked();
10060
10061 let repository = project.read_with(cx, |project, cx| {
10062 project.repositories(cx).values().next().unwrap().clone()
10063 });
10064
10065 git_branch("other-branch", &repo);
10066 git_checkout("refs/heads/other-branch", &repo);
10067 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10068 git_add("a.txt", &repo);
10069 git_commit("capitalize", &repo);
10070 let commit = repo
10071 .head()
10072 .expect("Failed to get HEAD")
10073 .peel_to_commit()
10074 .expect("HEAD is not a commit");
10075 git_checkout("refs/heads/main", &repo);
10076 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10077 git_add("a.txt", &repo);
10078 git_commit("improve letter", &repo);
10079 git_cherry_pick(&commit, &repo);
10080 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10081 .expect("No CHERRY_PICK_HEAD");
10082 pretty_assertions::assert_eq!(
10083 git_status(&repo),
10084 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10085 );
10086 tree.flush_fs_events(cx).await;
10087 project
10088 .update(cx, |project, cx| project.git_scans_complete(cx))
10089 .await;
10090 cx.executor().run_until_parked();
10091 let conflicts = repository.update(cx, |repository, _| {
10092 repository
10093 .merge_conflicts
10094 .iter()
10095 .cloned()
10096 .collect::<Vec<_>>()
10097 });
10098 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10099
10100 git_add("a.txt", &repo);
10101 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10102 git_commit("whatevs", &repo);
10103 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10104 .expect("Failed to remove CHERRY_PICK_HEAD");
10105 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10106 tree.flush_fs_events(cx).await;
10107 let conflicts = repository.update(cx, |repository, _| {
10108 repository
10109 .merge_conflicts
10110 .iter()
10111 .cloned()
10112 .collect::<Vec<_>>()
10113 });
10114 pretty_assertions::assert_eq!(conflicts, []);
10115}
10116
10117#[gpui::test]
10118async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10119 init_test(cx);
10120 let fs = FakeFs::new(cx.background_executor.clone());
10121 fs.insert_tree(
10122 path!("/root"),
10123 json!({
10124 ".git": {},
10125 ".gitignore": "*.txt\n",
10126 "a.xml": "<a></a>",
10127 "b.txt": "Some text"
10128 }),
10129 )
10130 .await;
10131
10132 fs.set_head_and_index_for_repo(
10133 path!("/root/.git").as_ref(),
10134 &[
10135 (".gitignore", "*.txt\n".into()),
10136 ("a.xml", "<a></a>".into()),
10137 ],
10138 );
10139
10140 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10141
10142 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10143 tree.flush_fs_events(cx).await;
10144 project
10145 .update(cx, |project, cx| project.git_scans_complete(cx))
10146 .await;
10147 cx.executor().run_until_parked();
10148
10149 let repository = project.read_with(cx, |project, cx| {
10150 project.repositories(cx).values().next().unwrap().clone()
10151 });
10152
10153 // One file is unmodified, the other is ignored.
10154 cx.read(|cx| {
10155 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10156 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10157 });
10158
10159 // Change the gitignore, and stage the newly non-ignored file.
10160 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10161 .await
10162 .unwrap();
10163 fs.set_index_for_repo(
10164 Path::new(path!("/root/.git")),
10165 &[
10166 (".gitignore", "*.txt\n".into()),
10167 ("a.xml", "<a></a>".into()),
10168 ("b.txt", "Some text".into()),
10169 ],
10170 );
10171
10172 cx.executor().run_until_parked();
10173 cx.read(|cx| {
10174 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10175 assert_entry_git_state(
10176 tree.read(cx),
10177 repository.read(cx),
10178 "b.txt",
10179 Some(StatusCode::Added),
10180 false,
10181 );
10182 });
10183}
10184
10185// NOTE:
10186// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10187// a directory which some program has already open.
10188// This is a limitation of the Windows.
10189// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10190// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10191#[gpui::test]
10192#[cfg_attr(target_os = "windows", ignore)]
10193async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10194 init_test(cx);
10195 cx.executor().allow_parking();
10196 let root = TempTree::new(json!({
10197 "projects": {
10198 "project1": {
10199 "a": "",
10200 "b": "",
10201 }
10202 },
10203
10204 }));
10205 let root_path = root.path();
10206
10207 let repo = git_init(&root_path.join("projects/project1"));
10208 git_add("a", &repo);
10209 git_commit("init", &repo);
10210 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10211
10212 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10213
10214 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10215 tree.flush_fs_events(cx).await;
10216 project
10217 .update(cx, |project, cx| project.git_scans_complete(cx))
10218 .await;
10219 cx.executor().run_until_parked();
10220
10221 let repository = project.read_with(cx, |project, cx| {
10222 project.repositories(cx).values().next().unwrap().clone()
10223 });
10224
10225 repository.read_with(cx, |repository, _| {
10226 assert_eq!(
10227 repository.work_directory_abs_path.as_ref(),
10228 root_path.join("projects/project1").as_path()
10229 );
10230 assert_eq!(
10231 repository
10232 .status_for_path(&repo_path("a"))
10233 .map(|entry| entry.status),
10234 Some(StatusCode::Modified.worktree()),
10235 );
10236 assert_eq!(
10237 repository
10238 .status_for_path(&repo_path("b"))
10239 .map(|entry| entry.status),
10240 Some(FileStatus::Untracked),
10241 );
10242 });
10243
10244 std::fs::rename(
10245 root_path.join("projects/project1"),
10246 root_path.join("projects/project2"),
10247 )
10248 .unwrap();
10249 tree.flush_fs_events(cx).await;
10250
10251 repository.read_with(cx, |repository, _| {
10252 assert_eq!(
10253 repository.work_directory_abs_path.as_ref(),
10254 root_path.join("projects/project2").as_path()
10255 );
10256 assert_eq!(
10257 repository.status_for_path(&repo_path("a")).unwrap().status,
10258 StatusCode::Modified.worktree(),
10259 );
10260 assert_eq!(
10261 repository.status_for_path(&repo_path("b")).unwrap().status,
10262 FileStatus::Untracked,
10263 );
10264 });
10265}
10266
10267// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10268// you can't rename a directory which some program has already open. This is a
10269// limitation of the Windows. See:
10270// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10271// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10272#[gpui::test]
10273#[cfg_attr(target_os = "windows", ignore)]
10274async fn test_file_status(cx: &mut gpui::TestAppContext) {
10275 init_test(cx);
10276 cx.executor().allow_parking();
10277 const IGNORE_RULE: &str = "**/target";
10278
10279 let root = TempTree::new(json!({
10280 "project": {
10281 "a.txt": "a",
10282 "b.txt": "bb",
10283 "c": {
10284 "d": {
10285 "e.txt": "eee"
10286 }
10287 },
10288 "f.txt": "ffff",
10289 "target": {
10290 "build_file": "???"
10291 },
10292 ".gitignore": IGNORE_RULE
10293 },
10294
10295 }));
10296 let root_path = root.path();
10297
10298 const A_TXT: &str = "a.txt";
10299 const B_TXT: &str = "b.txt";
10300 const E_TXT: &str = "c/d/e.txt";
10301 const F_TXT: &str = "f.txt";
10302 const DOTGITIGNORE: &str = ".gitignore";
10303 const BUILD_FILE: &str = "target/build_file";
10304
10305 // Set up git repository before creating the worktree.
10306 let work_dir = root.path().join("project");
10307 let mut repo = git_init(work_dir.as_path());
10308 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10309 git_add(A_TXT, &repo);
10310 git_add(E_TXT, &repo);
10311 git_add(DOTGITIGNORE, &repo);
10312 git_commit("Initial commit", &repo);
10313
10314 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10315
10316 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10317 tree.flush_fs_events(cx).await;
10318 project
10319 .update(cx, |project, cx| project.git_scans_complete(cx))
10320 .await;
10321 cx.executor().run_until_parked();
10322
10323 let repository = project.read_with(cx, |project, cx| {
10324 project.repositories(cx).values().next().unwrap().clone()
10325 });
10326
10327 // Check that the right git state is observed on startup
10328 repository.read_with(cx, |repository, _cx| {
10329 assert_eq!(
10330 repository.work_directory_abs_path.as_ref(),
10331 root_path.join("project").as_path()
10332 );
10333
10334 assert_eq!(
10335 repository
10336 .status_for_path(&repo_path(B_TXT))
10337 .unwrap()
10338 .status,
10339 FileStatus::Untracked,
10340 );
10341 assert_eq!(
10342 repository
10343 .status_for_path(&repo_path(F_TXT))
10344 .unwrap()
10345 .status,
10346 FileStatus::Untracked,
10347 );
10348 });
10349
10350 // Modify a file in the working copy.
10351 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10352 tree.flush_fs_events(cx).await;
10353 project
10354 .update(cx, |project, cx| project.git_scans_complete(cx))
10355 .await;
10356 cx.executor().run_until_parked();
10357
10358 // The worktree detects that the file's git status has changed.
10359 repository.read_with(cx, |repository, _| {
10360 assert_eq!(
10361 repository
10362 .status_for_path(&repo_path(A_TXT))
10363 .unwrap()
10364 .status,
10365 StatusCode::Modified.worktree(),
10366 );
10367 });
10368
10369 // Create a commit in the git repository.
10370 git_add(A_TXT, &repo);
10371 git_add(B_TXT, &repo);
10372 git_commit("Committing modified and added", &repo);
10373 tree.flush_fs_events(cx).await;
10374 project
10375 .update(cx, |project, cx| project.git_scans_complete(cx))
10376 .await;
10377 cx.executor().run_until_parked();
10378
10379 // The worktree detects that the files' git status have changed.
10380 repository.read_with(cx, |repository, _cx| {
10381 assert_eq!(
10382 repository
10383 .status_for_path(&repo_path(F_TXT))
10384 .unwrap()
10385 .status,
10386 FileStatus::Untracked,
10387 );
10388 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10389 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10390 });
10391
10392 // Modify files in the working copy and perform git operations on other files.
10393 git_reset(0, &repo);
10394 git_remove_index(Path::new(B_TXT), &repo);
10395 git_stash(&mut repo);
10396 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10397 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10398 tree.flush_fs_events(cx).await;
10399 project
10400 .update(cx, |project, cx| project.git_scans_complete(cx))
10401 .await;
10402 cx.executor().run_until_parked();
10403
10404 // Check that more complex repo changes are tracked
10405 repository.read_with(cx, |repository, _cx| {
10406 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10407 assert_eq!(
10408 repository
10409 .status_for_path(&repo_path(B_TXT))
10410 .unwrap()
10411 .status,
10412 FileStatus::Untracked,
10413 );
10414 assert_eq!(
10415 repository
10416 .status_for_path(&repo_path(E_TXT))
10417 .unwrap()
10418 .status,
10419 StatusCode::Modified.worktree(),
10420 );
10421 });
10422
10423 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10424 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10425 std::fs::write(
10426 work_dir.join(DOTGITIGNORE),
10427 [IGNORE_RULE, "f.txt"].join("\n"),
10428 )
10429 .unwrap();
10430
10431 git_add(Path::new(DOTGITIGNORE), &repo);
10432 git_commit("Committing modified git ignore", &repo);
10433
10434 tree.flush_fs_events(cx).await;
10435 cx.executor().run_until_parked();
10436
10437 let mut renamed_dir_name = "first_directory/second_directory";
10438 const RENAMED_FILE: &str = "rf.txt";
10439
10440 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10441 std::fs::write(
10442 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10443 "new-contents",
10444 )
10445 .unwrap();
10446
10447 tree.flush_fs_events(cx).await;
10448 project
10449 .update(cx, |project, cx| project.git_scans_complete(cx))
10450 .await;
10451 cx.executor().run_until_parked();
10452
10453 repository.read_with(cx, |repository, _cx| {
10454 assert_eq!(
10455 repository
10456 .status_for_path(&RepoPath::from_rel_path(
10457 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10458 ))
10459 .unwrap()
10460 .status,
10461 FileStatus::Untracked,
10462 );
10463 });
10464
10465 renamed_dir_name = "new_first_directory/second_directory";
10466
10467 std::fs::rename(
10468 work_dir.join("first_directory"),
10469 work_dir.join("new_first_directory"),
10470 )
10471 .unwrap();
10472
10473 tree.flush_fs_events(cx).await;
10474 project
10475 .update(cx, |project, cx| project.git_scans_complete(cx))
10476 .await;
10477 cx.executor().run_until_parked();
10478
10479 repository.read_with(cx, |repository, _cx| {
10480 assert_eq!(
10481 repository
10482 .status_for_path(&RepoPath::from_rel_path(
10483 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10484 ))
10485 .unwrap()
10486 .status,
10487 FileStatus::Untracked,
10488 );
10489 });
10490}
10491
10492#[gpui::test]
10493#[ignore]
10494async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10495 init_test(cx);
10496 cx.executor().allow_parking();
10497
10498 const IGNORE_RULE: &str = "**/target";
10499
10500 let root = TempTree::new(json!({
10501 "project": {
10502 "src": {
10503 "main.rs": "fn main() {}"
10504 },
10505 "target": {
10506 "debug": {
10507 "important_text.txt": "important text",
10508 },
10509 },
10510 ".gitignore": IGNORE_RULE
10511 },
10512
10513 }));
10514 let root_path = root.path();
10515
10516 // Set up git repository before creating the worktree.
10517 let work_dir = root.path().join("project");
10518 let repo = git_init(work_dir.as_path());
10519 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10520 git_add("src/main.rs", &repo);
10521 git_add(".gitignore", &repo);
10522 git_commit("Initial commit", &repo);
10523
10524 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10525 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10526 let project_events = Arc::new(Mutex::new(Vec::new()));
10527 project.update(cx, |project, cx| {
10528 let repo_events = repository_updates.clone();
10529 cx.subscribe(project.git_store(), move |_, _, e, _| {
10530 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10531 repo_events.lock().push(e.clone());
10532 }
10533 })
10534 .detach();
10535 let project_events = project_events.clone();
10536 cx.subscribe_self(move |_, e, _| {
10537 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10538 project_events.lock().extend(
10539 updates
10540 .iter()
10541 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10542 .filter(|(path, _)| path != "fs-event-sentinel"),
10543 );
10544 }
10545 })
10546 .detach();
10547 });
10548
10549 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10550 tree.flush_fs_events(cx).await;
10551 tree.update(cx, |tree, cx| {
10552 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10553 })
10554 .await
10555 .unwrap();
10556 tree.update(cx, |tree, _| {
10557 assert_eq!(
10558 tree.entries(true, 0)
10559 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10560 .collect::<Vec<_>>(),
10561 vec![
10562 (rel_path(""), false),
10563 (rel_path("project/"), false),
10564 (rel_path("project/.gitignore"), false),
10565 (rel_path("project/src"), false),
10566 (rel_path("project/src/main.rs"), false),
10567 (rel_path("project/target"), true),
10568 (rel_path("project/target/debug"), true),
10569 (rel_path("project/target/debug/important_text.txt"), true),
10570 ]
10571 );
10572 });
10573
10574 assert_eq!(
10575 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10576 vec![
10577 RepositoryEvent::StatusesChanged,
10578 RepositoryEvent::MergeHeadsChanged,
10579 ],
10580 "Initial worktree scan should produce a repo update event"
10581 );
10582 assert_eq!(
10583 project_events.lock().drain(..).collect::<Vec<_>>(),
10584 vec![
10585 ("project/target".to_string(), PathChange::Loaded),
10586 ("project/target/debug".to_string(), PathChange::Loaded),
10587 (
10588 "project/target/debug/important_text.txt".to_string(),
10589 PathChange::Loaded
10590 ),
10591 ],
10592 "Initial project changes should show that all not-ignored and all opened files are loaded"
10593 );
10594
10595 let deps_dir = work_dir.join("target").join("debug").join("deps");
10596 std::fs::create_dir_all(&deps_dir).unwrap();
10597 tree.flush_fs_events(cx).await;
10598 project
10599 .update(cx, |project, cx| project.git_scans_complete(cx))
10600 .await;
10601 cx.executor().run_until_parked();
10602 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10603 tree.flush_fs_events(cx).await;
10604 project
10605 .update(cx, |project, cx| project.git_scans_complete(cx))
10606 .await;
10607 cx.executor().run_until_parked();
10608 std::fs::remove_dir_all(&deps_dir).unwrap();
10609 tree.flush_fs_events(cx).await;
10610 project
10611 .update(cx, |project, cx| project.git_scans_complete(cx))
10612 .await;
10613 cx.executor().run_until_parked();
10614
10615 tree.update(cx, |tree, _| {
10616 assert_eq!(
10617 tree.entries(true, 0)
10618 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10619 .collect::<Vec<_>>(),
10620 vec![
10621 (rel_path(""), false),
10622 (rel_path("project/"), false),
10623 (rel_path("project/.gitignore"), false),
10624 (rel_path("project/src"), false),
10625 (rel_path("project/src/main.rs"), false),
10626 (rel_path("project/target"), true),
10627 (rel_path("project/target/debug"), true),
10628 (rel_path("project/target/debug/important_text.txt"), true),
10629 ],
10630 "No stray temp files should be left after the flycheck changes"
10631 );
10632 });
10633
10634 assert_eq!(
10635 repository_updates
10636 .lock()
10637 .iter()
10638 .cloned()
10639 .collect::<Vec<_>>(),
10640 Vec::new(),
10641 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10642 );
10643 assert_eq!(
10644 project_events.lock().as_slice(),
10645 vec![
10646 ("project/target/debug/deps".to_string(), PathChange::Added),
10647 ("project/target/debug/deps".to_string(), PathChange::Removed),
10648 ],
10649 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10650 No updates for more nested directories should happen as those are ignored",
10651 );
10652}
10653
10654// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10655// to different timings/ordering of events.
10656#[ignore]
10657#[gpui::test]
10658async fn test_odd_events_for_ignored_dirs(
10659 executor: BackgroundExecutor,
10660 cx: &mut gpui::TestAppContext,
10661) {
10662 init_test(cx);
10663 let fs = FakeFs::new(executor);
10664 fs.insert_tree(
10665 path!("/root"),
10666 json!({
10667 ".git": {},
10668 ".gitignore": "**/target/",
10669 "src": {
10670 "main.rs": "fn main() {}",
10671 },
10672 "target": {
10673 "debug": {
10674 "foo.txt": "foo",
10675 "deps": {}
10676 }
10677 }
10678 }),
10679 )
10680 .await;
10681 fs.set_head_and_index_for_repo(
10682 path!("/root/.git").as_ref(),
10683 &[
10684 (".gitignore", "**/target/".into()),
10685 ("src/main.rs", "fn main() {}".into()),
10686 ],
10687 );
10688
10689 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10690 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10691 let project_events = Arc::new(Mutex::new(Vec::new()));
10692 project.update(cx, |project, cx| {
10693 let repository_updates = repository_updates.clone();
10694 cx.subscribe(project.git_store(), move |_, _, e, _| {
10695 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10696 repository_updates.lock().push(e.clone());
10697 }
10698 })
10699 .detach();
10700 let project_events = project_events.clone();
10701 cx.subscribe_self(move |_, e, _| {
10702 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10703 project_events.lock().extend(
10704 updates
10705 .iter()
10706 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10707 .filter(|(path, _)| path != "fs-event-sentinel"),
10708 );
10709 }
10710 })
10711 .detach();
10712 });
10713
10714 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10715 tree.update(cx, |tree, cx| {
10716 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10717 })
10718 .await
10719 .unwrap();
10720 tree.flush_fs_events(cx).await;
10721 project
10722 .update(cx, |project, cx| project.git_scans_complete(cx))
10723 .await;
10724 cx.run_until_parked();
10725 tree.update(cx, |tree, _| {
10726 assert_eq!(
10727 tree.entries(true, 0)
10728 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10729 .collect::<Vec<_>>(),
10730 vec![
10731 (rel_path(""), false),
10732 (rel_path(".gitignore"), false),
10733 (rel_path("src"), false),
10734 (rel_path("src/main.rs"), false),
10735 (rel_path("target"), true),
10736 (rel_path("target/debug"), true),
10737 (rel_path("target/debug/deps"), true),
10738 (rel_path("target/debug/foo.txt"), true),
10739 ]
10740 );
10741 });
10742
10743 assert_eq!(
10744 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10745 vec![
10746 RepositoryEvent::MergeHeadsChanged,
10747 RepositoryEvent::BranchChanged,
10748 RepositoryEvent::StatusesChanged,
10749 RepositoryEvent::StatusesChanged,
10750 ],
10751 "Initial worktree scan should produce a repo update event"
10752 );
10753 assert_eq!(
10754 project_events.lock().drain(..).collect::<Vec<_>>(),
10755 vec![
10756 ("target".to_string(), PathChange::Loaded),
10757 ("target/debug".to_string(), PathChange::Loaded),
10758 ("target/debug/deps".to_string(), PathChange::Loaded),
10759 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10760 ],
10761 "All non-ignored entries and all opened firs should be getting a project event",
10762 );
10763
10764 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10765 // This may happen multiple times during a single flycheck, but once is enough for testing.
10766 fs.emit_fs_event("/root/target/debug/deps", None);
10767 tree.flush_fs_events(cx).await;
10768 project
10769 .update(cx, |project, cx| project.git_scans_complete(cx))
10770 .await;
10771 cx.executor().run_until_parked();
10772
10773 assert_eq!(
10774 repository_updates
10775 .lock()
10776 .iter()
10777 .cloned()
10778 .collect::<Vec<_>>(),
10779 Vec::new(),
10780 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10781 );
10782 assert_eq!(
10783 project_events.lock().as_slice(),
10784 Vec::new(),
10785 "No further project events should happen, as only ignored dirs received FS events",
10786 );
10787}
10788
10789#[gpui::test]
10790async fn test_repos_in_invisible_worktrees(
10791 executor: BackgroundExecutor,
10792 cx: &mut gpui::TestAppContext,
10793) {
10794 init_test(cx);
10795 let fs = FakeFs::new(executor);
10796 fs.insert_tree(
10797 path!("/root"),
10798 json!({
10799 "dir1": {
10800 ".git": {},
10801 "dep1": {
10802 ".git": {},
10803 "src": {
10804 "a.txt": "",
10805 },
10806 },
10807 "b.txt": "",
10808 },
10809 }),
10810 )
10811 .await;
10812
10813 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10814 let _visible_worktree =
10815 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10816 project
10817 .update(cx, |project, cx| project.git_scans_complete(cx))
10818 .await;
10819
10820 let repos = project.read_with(cx, |project, cx| {
10821 project
10822 .repositories(cx)
10823 .values()
10824 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10825 .collect::<Vec<_>>()
10826 });
10827 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10828
10829 let (_invisible_worktree, _) = project
10830 .update(cx, |project, cx| {
10831 project.worktree_store().update(cx, |worktree_store, cx| {
10832 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10833 })
10834 })
10835 .await
10836 .expect("failed to create worktree");
10837 project
10838 .update(cx, |project, cx| project.git_scans_complete(cx))
10839 .await;
10840
10841 let repos = project.read_with(cx, |project, cx| {
10842 project
10843 .repositories(cx)
10844 .values()
10845 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10846 .collect::<Vec<_>>()
10847 });
10848 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10849}
10850
10851#[gpui::test(iterations = 10)]
10852async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10853 init_test(cx);
10854 cx.update(|cx| {
10855 cx.update_global::<SettingsStore, _>(|store, cx| {
10856 store.update_user_settings(cx, |settings| {
10857 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10858 });
10859 });
10860 });
10861 let fs = FakeFs::new(cx.background_executor.clone());
10862 fs.insert_tree(
10863 path!("/root"),
10864 json!({
10865 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10866 "tree": {
10867 ".git": {},
10868 ".gitignore": "ignored-dir\n",
10869 "tracked-dir": {
10870 "tracked-file1": "",
10871 "ancestor-ignored-file1": "",
10872 },
10873 "ignored-dir": {
10874 "ignored-file1": ""
10875 }
10876 }
10877 }),
10878 )
10879 .await;
10880 fs.set_head_and_index_for_repo(
10881 path!("/root/tree/.git").as_ref(),
10882 &[
10883 (".gitignore", "ignored-dir\n".into()),
10884 ("tracked-dir/tracked-file1", "".into()),
10885 ],
10886 );
10887
10888 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10889
10890 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10891 tree.flush_fs_events(cx).await;
10892 project
10893 .update(cx, |project, cx| project.git_scans_complete(cx))
10894 .await;
10895 cx.executor().run_until_parked();
10896
10897 let repository = project.read_with(cx, |project, cx| {
10898 project.repositories(cx).values().next().unwrap().clone()
10899 });
10900
10901 tree.read_with(cx, |tree, _| {
10902 tree.as_local()
10903 .unwrap()
10904 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10905 })
10906 .recv()
10907 .await;
10908
10909 cx.read(|cx| {
10910 assert_entry_git_state(
10911 tree.read(cx),
10912 repository.read(cx),
10913 "tracked-dir/tracked-file1",
10914 None,
10915 false,
10916 );
10917 assert_entry_git_state(
10918 tree.read(cx),
10919 repository.read(cx),
10920 "tracked-dir/ancestor-ignored-file1",
10921 None,
10922 false,
10923 );
10924 assert_entry_git_state(
10925 tree.read(cx),
10926 repository.read(cx),
10927 "ignored-dir/ignored-file1",
10928 None,
10929 true,
10930 );
10931 });
10932
10933 fs.create_file(
10934 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10935 Default::default(),
10936 )
10937 .await
10938 .unwrap();
10939 fs.set_index_for_repo(
10940 path!("/root/tree/.git").as_ref(),
10941 &[
10942 (".gitignore", "ignored-dir\n".into()),
10943 ("tracked-dir/tracked-file1", "".into()),
10944 ("tracked-dir/tracked-file2", "".into()),
10945 ],
10946 );
10947 fs.create_file(
10948 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10949 Default::default(),
10950 )
10951 .await
10952 .unwrap();
10953 fs.create_file(
10954 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10955 Default::default(),
10956 )
10957 .await
10958 .unwrap();
10959
10960 cx.executor().run_until_parked();
10961 cx.read(|cx| {
10962 assert_entry_git_state(
10963 tree.read(cx),
10964 repository.read(cx),
10965 "tracked-dir/tracked-file2",
10966 Some(StatusCode::Added),
10967 false,
10968 );
10969 assert_entry_git_state(
10970 tree.read(cx),
10971 repository.read(cx),
10972 "tracked-dir/ancestor-ignored-file2",
10973 None,
10974 false,
10975 );
10976 assert_entry_git_state(
10977 tree.read(cx),
10978 repository.read(cx),
10979 "ignored-dir/ignored-file2",
10980 None,
10981 true,
10982 );
10983 assert!(
10984 tree.read(cx)
10985 .entry_for_path(&rel_path(".git"))
10986 .unwrap()
10987 .is_ignored
10988 );
10989 });
10990}
10991
10992#[gpui::test]
10993async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10994 init_test(cx);
10995
10996 let fs = FakeFs::new(cx.executor());
10997 fs.insert_tree(
10998 path!("/project"),
10999 json!({
11000 ".git": {
11001 "worktrees": {
11002 "some-worktree": {
11003 "commondir": "../..\n",
11004 // For is_git_dir
11005 "HEAD": "",
11006 "config": ""
11007 }
11008 },
11009 "modules": {
11010 "subdir": {
11011 "some-submodule": {
11012 // For is_git_dir
11013 "HEAD": "",
11014 "config": "",
11015 }
11016 }
11017 }
11018 },
11019 "src": {
11020 "a.txt": "A",
11021 },
11022 "some-worktree": {
11023 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11024 "src": {
11025 "b.txt": "B",
11026 }
11027 },
11028 "subdir": {
11029 "some-submodule": {
11030 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11031 "c.txt": "C",
11032 }
11033 }
11034 }),
11035 )
11036 .await;
11037
11038 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11039 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11040 scan_complete.await;
11041
11042 let mut repositories = project.update(cx, |project, cx| {
11043 project
11044 .repositories(cx)
11045 .values()
11046 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11047 .collect::<Vec<_>>()
11048 });
11049 repositories.sort();
11050 pretty_assertions::assert_eq!(
11051 repositories,
11052 [
11053 Path::new(path!("/project")).into(),
11054 Path::new(path!("/project/some-worktree")).into(),
11055 Path::new(path!("/project/subdir/some-submodule")).into(),
11056 ]
11057 );
11058
11059 // Generate a git-related event for the worktree and check that it's refreshed.
11060 fs.with_git_state(
11061 path!("/project/some-worktree/.git").as_ref(),
11062 true,
11063 |state| {
11064 state
11065 .head_contents
11066 .insert(repo_path("src/b.txt"), "b".to_owned());
11067 state
11068 .index_contents
11069 .insert(repo_path("src/b.txt"), "b".to_owned());
11070 },
11071 )
11072 .unwrap();
11073 cx.run_until_parked();
11074
11075 let buffer = project
11076 .update(cx, |project, cx| {
11077 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11078 })
11079 .await
11080 .unwrap();
11081 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11082 let (repo, _) = project
11083 .git_store()
11084 .read(cx)
11085 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11086 .unwrap();
11087 pretty_assertions::assert_eq!(
11088 repo.read(cx).work_directory_abs_path,
11089 Path::new(path!("/project/some-worktree")).into(),
11090 );
11091 let barrier = repo.update(cx, |repo, _| repo.barrier());
11092 (repo.clone(), barrier)
11093 });
11094 barrier.await.unwrap();
11095 worktree_repo.update(cx, |repo, _| {
11096 pretty_assertions::assert_eq!(
11097 repo.status_for_path(&repo_path("src/b.txt"))
11098 .unwrap()
11099 .status,
11100 StatusCode::Modified.worktree(),
11101 );
11102 });
11103
11104 // The same for the submodule.
11105 fs.with_git_state(
11106 path!("/project/subdir/some-submodule/.git").as_ref(),
11107 true,
11108 |state| {
11109 state
11110 .head_contents
11111 .insert(repo_path("c.txt"), "c".to_owned());
11112 state
11113 .index_contents
11114 .insert(repo_path("c.txt"), "c".to_owned());
11115 },
11116 )
11117 .unwrap();
11118 cx.run_until_parked();
11119
11120 let buffer = project
11121 .update(cx, |project, cx| {
11122 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11123 })
11124 .await
11125 .unwrap();
11126 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11127 let (repo, _) = project
11128 .git_store()
11129 .read(cx)
11130 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11131 .unwrap();
11132 pretty_assertions::assert_eq!(
11133 repo.read(cx).work_directory_abs_path,
11134 Path::new(path!("/project/subdir/some-submodule")).into(),
11135 );
11136 let barrier = repo.update(cx, |repo, _| repo.barrier());
11137 (repo.clone(), barrier)
11138 });
11139 barrier.await.unwrap();
11140 submodule_repo.update(cx, |repo, _| {
11141 pretty_assertions::assert_eq!(
11142 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11143 StatusCode::Modified.worktree(),
11144 );
11145 });
11146}
11147
11148#[gpui::test]
11149async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11150 init_test(cx);
11151 let fs = FakeFs::new(cx.background_executor.clone());
11152 fs.insert_tree(
11153 path!("/root"),
11154 json!({
11155 "project": {
11156 ".git": {},
11157 "child1": {
11158 "a.txt": "A",
11159 },
11160 "child2": {
11161 "b.txt": "B",
11162 }
11163 }
11164 }),
11165 )
11166 .await;
11167
11168 let project = Project::test(
11169 fs.clone(),
11170 [
11171 path!("/root/project/child1").as_ref(),
11172 path!("/root/project/child2").as_ref(),
11173 ],
11174 cx,
11175 )
11176 .await;
11177
11178 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11179 tree.flush_fs_events(cx).await;
11180 project
11181 .update(cx, |project, cx| project.git_scans_complete(cx))
11182 .await;
11183 cx.executor().run_until_parked();
11184
11185 let repos = project.read_with(cx, |project, cx| {
11186 project
11187 .repositories(cx)
11188 .values()
11189 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11190 .collect::<Vec<_>>()
11191 });
11192 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11193}
11194
11195#[gpui::test]
11196async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11197 init_test(cx);
11198
11199 let file_1_committed = String::from(r#"file_1_committed"#);
11200 let file_1_staged = String::from(r#"file_1_staged"#);
11201 let file_2_committed = String::from(r#"file_2_committed"#);
11202 let file_2_staged = String::from(r#"file_2_staged"#);
11203 let buffer_contents = String::from(r#"buffer"#);
11204
11205 let fs = FakeFs::new(cx.background_executor.clone());
11206 fs.insert_tree(
11207 path!("/dir"),
11208 json!({
11209 ".git": {},
11210 "src": {
11211 "file_1.rs": file_1_committed.clone(),
11212 "file_2.rs": file_2_committed.clone(),
11213 }
11214 }),
11215 )
11216 .await;
11217
11218 fs.set_head_for_repo(
11219 path!("/dir/.git").as_ref(),
11220 &[
11221 ("src/file_1.rs", file_1_committed.clone()),
11222 ("src/file_2.rs", file_2_committed.clone()),
11223 ],
11224 "deadbeef",
11225 );
11226 fs.set_index_for_repo(
11227 path!("/dir/.git").as_ref(),
11228 &[
11229 ("src/file_1.rs", file_1_staged.clone()),
11230 ("src/file_2.rs", file_2_staged.clone()),
11231 ],
11232 );
11233
11234 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11235
11236 let buffer = project
11237 .update(cx, |project, cx| {
11238 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11239 })
11240 .await
11241 .unwrap();
11242
11243 buffer.update(cx, |buffer, cx| {
11244 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11245 });
11246
11247 let unstaged_diff = project
11248 .update(cx, |project, cx| {
11249 project.open_unstaged_diff(buffer.clone(), cx)
11250 })
11251 .await
11252 .unwrap();
11253
11254 cx.run_until_parked();
11255
11256 unstaged_diff.update(cx, |unstaged_diff, cx| {
11257 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11258 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11259 });
11260
11261 // Save the buffer as `file_2.rs`, which should trigger the
11262 // `BufferChangedFilePath` event.
11263 project
11264 .update(cx, |project, cx| {
11265 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11266 let path = ProjectPath {
11267 worktree_id,
11268 path: rel_path("src/file_2.rs").into(),
11269 };
11270 project.save_buffer_as(buffer.clone(), path, cx)
11271 })
11272 .await
11273 .unwrap();
11274
11275 cx.run_until_parked();
11276
11277 // Verify that the diff bases have been updated to file_2's contents due to
11278 // the `BufferChangedFilePath` event being handled.
11279 unstaged_diff.update(cx, |unstaged_diff, cx| {
11280 let snapshot = buffer.read(cx).snapshot();
11281 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11282 assert_eq!(
11283 base_text, file_2_staged,
11284 "Diff bases should be automatically updated to file_2 staged content"
11285 );
11286
11287 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11288 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11289 });
11290
11291 let uncommitted_diff = project
11292 .update(cx, |project, cx| {
11293 project.open_uncommitted_diff(buffer.clone(), cx)
11294 })
11295 .await
11296 .unwrap();
11297
11298 cx.run_until_parked();
11299
11300 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11301 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11302 assert_eq!(
11303 base_text, file_2_committed,
11304 "Uncommitted diff should compare against file_2 committed content"
11305 );
11306 });
11307}
11308
11309async fn search(
11310 project: &Entity<Project>,
11311 query: SearchQuery,
11312 cx: &mut gpui::TestAppContext,
11313) -> Result<HashMap<String, Vec<Range<usize>>>> {
11314 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11315 let mut results = HashMap::default();
11316 while let Ok(search_result) = search_rx.rx.recv().await {
11317 match search_result {
11318 SearchResult::Buffer { buffer, ranges } => {
11319 results.entry(buffer).or_insert(ranges);
11320 }
11321 SearchResult::LimitReached => {}
11322 }
11323 }
11324 Ok(results
11325 .into_iter()
11326 .map(|(buffer, ranges)| {
11327 buffer.update(cx, |buffer, cx| {
11328 let path = buffer
11329 .file()
11330 .unwrap()
11331 .full_path(cx)
11332 .to_string_lossy()
11333 .to_string();
11334 let ranges = ranges
11335 .into_iter()
11336 .map(|range| range.to_offset(buffer))
11337 .collect::<Vec<_>>();
11338 (path, ranges)
11339 })
11340 })
11341 .collect())
11342}
11343
11344#[gpui::test]
11345async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11346 init_test(cx);
11347
11348 let fs = FakeFs::new(cx.executor());
11349
11350 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11351 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11352 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11353 fs.insert_tree(path!("/dir"), json!({})).await;
11354 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11355
11356 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11357
11358 let buffer = project
11359 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11360 .await
11361 .unwrap();
11362
11363 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11364 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11365 });
11366 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11367 assert_eq!(initial_text, "Hi");
11368 assert!(!initial_dirty);
11369
11370 let reload_receiver = buffer.update(cx, |buffer, cx| {
11371 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11372 });
11373 cx.executor().run_until_parked();
11374
11375 // Wait for reload to complete
11376 let _ = reload_receiver.await;
11377
11378 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11379 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11380 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11381 });
11382 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11383 assert_eq!(reloaded_text, "楈");
11384 assert!(!reloaded_dirty);
11385
11386 // Undo the reload
11387 buffer.update(cx, |buffer, cx| {
11388 buffer.undo(cx);
11389 });
11390
11391 buffer.read_with(cx, |buffer, _| {
11392 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11393 assert_eq!(buffer.text(), "Hi");
11394 assert!(!buffer.is_dirty());
11395 });
11396
11397 buffer.update(cx, |buffer, cx| {
11398 buffer.redo(cx);
11399 });
11400
11401 buffer.read_with(cx, |buffer, _| {
11402 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11403 assert_ne!(buffer.text(), "Hi");
11404 assert!(!buffer.is_dirty());
11405 });
11406}
11407
11408pub fn init_test(cx: &mut gpui::TestAppContext) {
11409 zlog::init_test();
11410
11411 cx.update(|cx| {
11412 let settings_store = SettingsStore::test(cx);
11413 cx.set_global(settings_store);
11414 release_channel::init(semver::Version::new(0, 0, 0), cx);
11415 });
11416}
11417
11418fn json_lang() -> Arc<Language> {
11419 Arc::new(Language::new(
11420 LanguageConfig {
11421 name: "JSON".into(),
11422 matcher: LanguageMatcher {
11423 path_suffixes: vec!["json".to_string()],
11424 ..Default::default()
11425 },
11426 ..Default::default()
11427 },
11428 None,
11429 ))
11430}
11431
11432fn js_lang() -> Arc<Language> {
11433 Arc::new(Language::new(
11434 LanguageConfig {
11435 name: "JavaScript".into(),
11436 matcher: LanguageMatcher {
11437 path_suffixes: vec!["js".to_string()],
11438 ..Default::default()
11439 },
11440 ..Default::default()
11441 },
11442 None,
11443 ))
11444}
11445
11446fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11447 struct PythonMootToolchainLister(Arc<FakeFs>);
11448 #[async_trait]
11449 impl ToolchainLister for PythonMootToolchainLister {
11450 async fn list(
11451 &self,
11452 worktree_root: PathBuf,
11453 subroot_relative_path: Arc<RelPath>,
11454 _: Option<HashMap<String, String>>,
11455 _: &dyn Fs,
11456 ) -> ToolchainList {
11457 // This lister will always return a path .venv directories within ancestors
11458 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11459 let mut toolchains = vec![];
11460 for ancestor in ancestors {
11461 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11462 if self.0.is_dir(&venv_path).await {
11463 toolchains.push(Toolchain {
11464 name: SharedString::new_static("Python Venv"),
11465 path: venv_path.to_string_lossy().into_owned().into(),
11466 language_name: LanguageName(SharedString::new_static("Python")),
11467 as_json: serde_json::Value::Null,
11468 })
11469 }
11470 }
11471 ToolchainList {
11472 toolchains,
11473 ..Default::default()
11474 }
11475 }
11476 async fn resolve(
11477 &self,
11478 _: PathBuf,
11479 _: Option<HashMap<String, String>>,
11480 _: &dyn Fs,
11481 ) -> anyhow::Result<Toolchain> {
11482 Err(anyhow::anyhow!("Not implemented"))
11483 }
11484 fn meta(&self) -> ToolchainMetadata {
11485 ToolchainMetadata {
11486 term: SharedString::new_static("Virtual Environment"),
11487 new_toolchain_placeholder: SharedString::new_static(
11488 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11489 ),
11490 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11491 }
11492 }
11493 fn activation_script(
11494 &self,
11495 _: &Toolchain,
11496 _: ShellKind,
11497 _: &gpui::App,
11498 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11499 Box::pin(async { vec![] })
11500 }
11501 }
11502 Arc::new(
11503 Language::new(
11504 LanguageConfig {
11505 name: "Python".into(),
11506 matcher: LanguageMatcher {
11507 path_suffixes: vec!["py".to_string()],
11508 ..Default::default()
11509 },
11510 ..Default::default()
11511 },
11512 None, // We're not testing Python parsing with this language.
11513 )
11514 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11515 "pyproject.toml",
11516 ))))
11517 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11518 )
11519}
11520
11521fn typescript_lang() -> Arc<Language> {
11522 Arc::new(Language::new(
11523 LanguageConfig {
11524 name: "TypeScript".into(),
11525 matcher: LanguageMatcher {
11526 path_suffixes: vec!["ts".to_string()],
11527 ..Default::default()
11528 },
11529 ..Default::default()
11530 },
11531 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11532 ))
11533}
11534
11535fn tsx_lang() -> Arc<Language> {
11536 Arc::new(Language::new(
11537 LanguageConfig {
11538 name: "tsx".into(),
11539 matcher: LanguageMatcher {
11540 path_suffixes: vec!["tsx".to_string()],
11541 ..Default::default()
11542 },
11543 ..Default::default()
11544 },
11545 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11546 ))
11547}
11548
11549fn get_all_tasks(
11550 project: &Entity<Project>,
11551 task_contexts: Arc<TaskContexts>,
11552 cx: &mut App,
11553) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11554 let new_tasks = project.update(cx, |project, cx| {
11555 project.task_store().update(cx, |task_store, cx| {
11556 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11557 this.used_and_current_resolved_tasks(task_contexts, cx)
11558 })
11559 })
11560 });
11561
11562 cx.background_spawn(async move {
11563 let (mut old, new) = new_tasks.await;
11564 old.extend(new);
11565 old
11566 })
11567}
11568
11569#[track_caller]
11570fn assert_entry_git_state(
11571 tree: &Worktree,
11572 repository: &Repository,
11573 path: &str,
11574 index_status: Option<StatusCode>,
11575 is_ignored: bool,
11576) {
11577 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11578 let entry = tree
11579 .entry_for_path(&rel_path(path))
11580 .unwrap_or_else(|| panic!("entry {path} not found"));
11581 let status = repository
11582 .status_for_path(&repo_path(path))
11583 .map(|entry| entry.status);
11584 let expected = index_status.map(|index_status| {
11585 TrackedStatus {
11586 index_status,
11587 worktree_status: StatusCode::Unmodified,
11588 }
11589 .into()
11590 });
11591 assert_eq!(
11592 status, expected,
11593 "expected {path} to have git status: {expected:?}"
11594 );
11595 assert_eq!(
11596 entry.is_ignored, is_ignored,
11597 "expected {path} to have is_ignored: {is_ignored}"
11598 );
11599}
11600
11601#[track_caller]
11602fn git_init(path: &Path) -> git2::Repository {
11603 let mut init_opts = RepositoryInitOptions::new();
11604 init_opts.initial_head("main");
11605 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11606}
11607
11608#[track_caller]
11609fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11610 let path = path.as_ref();
11611 let mut index = repo.index().expect("Failed to get index");
11612 index.add_path(path).expect("Failed to add file");
11613 index.write().expect("Failed to write index");
11614}
11615
11616#[track_caller]
11617fn git_remove_index(path: &Path, repo: &git2::Repository) {
11618 let mut index = repo.index().expect("Failed to get index");
11619 index.remove_path(path).expect("Failed to add file");
11620 index.write().expect("Failed to write index");
11621}
11622
11623#[track_caller]
11624fn git_commit(msg: &'static str, repo: &git2::Repository) {
11625 use git2::Signature;
11626
11627 let signature = Signature::now("test", "test@zed.dev").unwrap();
11628 let oid = repo.index().unwrap().write_tree().unwrap();
11629 let tree = repo.find_tree(oid).unwrap();
11630 if let Ok(head) = repo.head() {
11631 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11632
11633 let parent_commit = parent_obj.as_commit().unwrap();
11634
11635 repo.commit(
11636 Some("HEAD"),
11637 &signature,
11638 &signature,
11639 msg,
11640 &tree,
11641 &[parent_commit],
11642 )
11643 .expect("Failed to commit with parent");
11644 } else {
11645 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11646 .expect("Failed to commit");
11647 }
11648}
11649
11650#[cfg(any())]
11651#[track_caller]
11652fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11653 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11654}
11655
11656#[track_caller]
11657fn git_stash(repo: &mut git2::Repository) {
11658 use git2::Signature;
11659
11660 let signature = Signature::now("test", "test@zed.dev").unwrap();
11661 repo.stash_save(&signature, "N/A", None)
11662 .expect("Failed to stash");
11663}
11664
11665#[track_caller]
11666fn git_reset(offset: usize, repo: &git2::Repository) {
11667 let head = repo.head().expect("Couldn't get repo head");
11668 let object = head.peel(git2::ObjectType::Commit).unwrap();
11669 let commit = object.as_commit().unwrap();
11670 let new_head = commit
11671 .parents()
11672 .inspect(|parnet| {
11673 parnet.message();
11674 })
11675 .nth(offset)
11676 .expect("Not enough history");
11677 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11678 .expect("Could not reset");
11679}
11680
11681#[cfg(any())]
11682#[track_caller]
11683fn git_branch(name: &str, repo: &git2::Repository) {
11684 let head = repo
11685 .head()
11686 .expect("Couldn't get repo head")
11687 .peel_to_commit()
11688 .expect("HEAD is not a commit");
11689 repo.branch(name, &head, false).expect("Failed to commit");
11690}
11691
11692#[cfg(any())]
11693#[track_caller]
11694fn git_checkout(name: &str, repo: &git2::Repository) {
11695 repo.set_head(name).expect("Failed to set head");
11696 repo.checkout_head(None).expect("Failed to check out head");
11697}
11698
11699#[cfg(any())]
11700#[track_caller]
11701fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11702 repo.statuses(None)
11703 .unwrap()
11704 .iter()
11705 .map(|status| (status.path().unwrap().to_string(), status.status()))
11706 .collect()
11707}
11708
11709#[gpui::test]
11710async fn test_find_project_path_abs(
11711 background_executor: BackgroundExecutor,
11712 cx: &mut gpui::TestAppContext,
11713) {
11714 // find_project_path should work with absolute paths
11715 init_test(cx);
11716
11717 let fs = FakeFs::new(background_executor);
11718 fs.insert_tree(
11719 path!("/root"),
11720 json!({
11721 "project1": {
11722 "file1.txt": "content1",
11723 "subdir": {
11724 "file2.txt": "content2"
11725 }
11726 },
11727 "project2": {
11728 "file3.txt": "content3"
11729 }
11730 }),
11731 )
11732 .await;
11733
11734 let project = Project::test(
11735 fs.clone(),
11736 [
11737 path!("/root/project1").as_ref(),
11738 path!("/root/project2").as_ref(),
11739 ],
11740 cx,
11741 )
11742 .await;
11743
11744 // Make sure the worktrees are fully initialized
11745 project
11746 .update(cx, |project, cx| project.git_scans_complete(cx))
11747 .await;
11748 cx.run_until_parked();
11749
11750 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11751 project.read_with(cx, |project, cx| {
11752 let worktrees: Vec<_> = project.worktrees(cx).collect();
11753 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11754 let id1 = worktrees[0].read(cx).id();
11755 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11756 let id2 = worktrees[1].read(cx).id();
11757 (abs_path1, id1, abs_path2, id2)
11758 });
11759
11760 project.update(cx, |project, cx| {
11761 let abs_path = project1_abs_path.join("file1.txt");
11762 let found_path = project.find_project_path(abs_path, cx).unwrap();
11763 assert_eq!(found_path.worktree_id, project1_id);
11764 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11765
11766 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11767 let found_path = project.find_project_path(abs_path, cx).unwrap();
11768 assert_eq!(found_path.worktree_id, project1_id);
11769 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11770
11771 let abs_path = project2_abs_path.join("file3.txt");
11772 let found_path = project.find_project_path(abs_path, cx).unwrap();
11773 assert_eq!(found_path.worktree_id, project2_id);
11774 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11775
11776 let abs_path = project1_abs_path.join("nonexistent.txt");
11777 let found_path = project.find_project_path(abs_path, cx);
11778 assert!(
11779 found_path.is_some(),
11780 "Should find project path for nonexistent file in worktree"
11781 );
11782
11783 // Test with an absolute path outside any worktree
11784 let abs_path = Path::new("/some/other/path");
11785 let found_path = project.find_project_path(abs_path, cx);
11786 assert!(
11787 found_path.is_none(),
11788 "Should not find project path for path outside any worktree"
11789 );
11790 });
11791}
11792
11793#[gpui::test]
11794async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11795 init_test(cx);
11796
11797 let fs = FakeFs::new(cx.executor());
11798 fs.insert_tree(
11799 path!("/root"),
11800 json!({
11801 "a": {
11802 ".git": {},
11803 "src": {
11804 "main.rs": "fn main() {}",
11805 }
11806 },
11807 "b": {
11808 ".git": {},
11809 "src": {
11810 "main.rs": "fn main() {}",
11811 },
11812 "script": {
11813 "run.sh": "#!/bin/bash"
11814 }
11815 }
11816 }),
11817 )
11818 .await;
11819
11820 let project = Project::test(
11821 fs.clone(),
11822 [
11823 path!("/root/a").as_ref(),
11824 path!("/root/b/script").as_ref(),
11825 path!("/root/b").as_ref(),
11826 ],
11827 cx,
11828 )
11829 .await;
11830 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11831 scan_complete.await;
11832
11833 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11834 assert_eq!(worktrees.len(), 3);
11835
11836 let worktree_id_by_abs_path = worktrees
11837 .into_iter()
11838 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11839 .collect::<HashMap<_, _>>();
11840 let worktree_id = worktree_id_by_abs_path
11841 .get(Path::new(path!("/root/b/script")))
11842 .unwrap();
11843
11844 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11845 assert_eq!(repos.len(), 2);
11846
11847 project.update(cx, |project, cx| {
11848 project.remove_worktree(*worktree_id, cx);
11849 });
11850 cx.run_until_parked();
11851
11852 let mut repo_paths = project
11853 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11854 .values()
11855 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11856 .collect::<Vec<_>>();
11857 repo_paths.sort();
11858
11859 pretty_assertions::assert_eq!(
11860 repo_paths,
11861 [
11862 Path::new(path!("/root/a")).into(),
11863 Path::new(path!("/root/b")).into(),
11864 ]
11865 );
11866
11867 let active_repo_path = project
11868 .read_with(cx, |p, cx| {
11869 p.active_repository(cx)
11870 .map(|r| r.read(cx).work_directory_abs_path.clone())
11871 })
11872 .unwrap();
11873 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11874
11875 let worktree_id = worktree_id_by_abs_path
11876 .get(Path::new(path!("/root/a")))
11877 .unwrap();
11878 project.update(cx, |project, cx| {
11879 project.remove_worktree(*worktree_id, cx);
11880 });
11881 cx.run_until_parked();
11882
11883 let active_repo_path = project
11884 .read_with(cx, |p, cx| {
11885 p.active_repository(cx)
11886 .map(|r| r.read(cx).work_directory_abs_path.clone())
11887 })
11888 .unwrap();
11889 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11890
11891 let worktree_id = worktree_id_by_abs_path
11892 .get(Path::new(path!("/root/b")))
11893 .unwrap();
11894 project.update(cx, |project, cx| {
11895 project.remove_worktree(*worktree_id, cx);
11896 });
11897 cx.run_until_parked();
11898
11899 let active_repo_path = project.read_with(cx, |p, cx| {
11900 p.active_repository(cx)
11901 .map(|r| r.read(cx).work_directory_abs_path.clone())
11902 });
11903 assert!(active_repo_path.is_none());
11904}
11905
11906#[gpui::test]
11907async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11908 use DiffHunkSecondaryStatus::*;
11909 init_test(cx);
11910
11911 let committed_contents = r#"
11912 one
11913 two
11914 three
11915 "#
11916 .unindent();
11917 let file_contents = r#"
11918 one
11919 TWO
11920 three
11921 "#
11922 .unindent();
11923
11924 let fs = FakeFs::new(cx.background_executor.clone());
11925 fs.insert_tree(
11926 path!("/dir"),
11927 json!({
11928 ".git": {},
11929 "file.txt": file_contents.clone()
11930 }),
11931 )
11932 .await;
11933
11934 fs.set_head_and_index_for_repo(
11935 path!("/dir/.git").as_ref(),
11936 &[("file.txt", committed_contents.clone())],
11937 );
11938
11939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11940
11941 let buffer = project
11942 .update(cx, |project, cx| {
11943 project.open_local_buffer(path!("/dir/file.txt"), cx)
11944 })
11945 .await
11946 .unwrap();
11947 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11948 let uncommitted_diff = project
11949 .update(cx, |project, cx| {
11950 project.open_uncommitted_diff(buffer.clone(), cx)
11951 })
11952 .await
11953 .unwrap();
11954
11955 // The hunk is initially unstaged.
11956 uncommitted_diff.read_with(cx, |diff, cx| {
11957 assert_hunks(
11958 diff.snapshot(cx).hunks(&snapshot),
11959 &snapshot,
11960 &diff.base_text_string(cx).unwrap(),
11961 &[(
11962 1..2,
11963 "two\n",
11964 "TWO\n",
11965 DiffHunkStatus::modified(HasSecondaryHunk),
11966 )],
11967 );
11968 });
11969
11970 // Get the repository handle.
11971 let repo = project.read_with(cx, |project, cx| {
11972 project.repositories(cx).values().next().unwrap().clone()
11973 });
11974
11975 // Stage the file.
11976 let stage_task = repo.update(cx, |repo, cx| {
11977 repo.stage_entries(vec![repo_path("file.txt")], cx)
11978 });
11979
11980 // Run a few ticks to let the job start and mark hunks as pending,
11981 // but don't run_until_parked which would complete the entire operation.
11982 for _ in 0..10 {
11983 cx.executor().tick();
11984 let [hunk]: [_; 1] = uncommitted_diff
11985 .read_with(cx, |diff, cx| {
11986 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11987 })
11988 .try_into()
11989 .unwrap();
11990 match hunk.secondary_status {
11991 HasSecondaryHunk => {}
11992 SecondaryHunkRemovalPending => break,
11993 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11994 _ => panic!("unexpected hunk state"),
11995 }
11996 }
11997 uncommitted_diff.read_with(cx, |diff, cx| {
11998 assert_hunks(
11999 diff.snapshot(cx).hunks(&snapshot),
12000 &snapshot,
12001 &diff.base_text_string(cx).unwrap(),
12002 &[(
12003 1..2,
12004 "two\n",
12005 "TWO\n",
12006 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12007 )],
12008 );
12009 });
12010
12011 // Let the staging complete.
12012 stage_task.await.unwrap();
12013 cx.run_until_parked();
12014
12015 // The hunk is now fully staged.
12016 uncommitted_diff.read_with(cx, |diff, cx| {
12017 assert_hunks(
12018 diff.snapshot(cx).hunks(&snapshot),
12019 &snapshot,
12020 &diff.base_text_string(cx).unwrap(),
12021 &[(
12022 1..2,
12023 "two\n",
12024 "TWO\n",
12025 DiffHunkStatus::modified(NoSecondaryHunk),
12026 )],
12027 );
12028 });
12029
12030 // Simulate a commit by updating HEAD to match the current file contents.
12031 // The FakeGitRepository's commit method is a no-op, so we need to manually
12032 // update HEAD to simulate the commit completing.
12033 fs.set_head_for_repo(
12034 path!("/dir/.git").as_ref(),
12035 &[("file.txt", file_contents.clone())],
12036 "newhead",
12037 );
12038 cx.run_until_parked();
12039
12040 // After committing, there are no more hunks.
12041 uncommitted_diff.read_with(cx, |diff, cx| {
12042 assert_hunks(
12043 diff.snapshot(cx).hunks(&snapshot),
12044 &snapshot,
12045 &diff.base_text_string(cx).unwrap(),
12046 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12047 );
12048 });
12049}
12050
12051#[gpui::test]
12052async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12053 init_test(cx);
12054
12055 // Configure read_only_files setting
12056 cx.update(|cx| {
12057 cx.update_global::<SettingsStore, _>(|store, cx| {
12058 store.update_user_settings(cx, |settings| {
12059 settings.project.worktree.read_only_files = Some(vec![
12060 "**/generated/**".to_string(),
12061 "**/*.gen.rs".to_string(),
12062 ]);
12063 });
12064 });
12065 });
12066
12067 let fs = FakeFs::new(cx.background_executor.clone());
12068 fs.insert_tree(
12069 path!("/root"),
12070 json!({
12071 "src": {
12072 "main.rs": "fn main() {}",
12073 "types.gen.rs": "// Generated file",
12074 },
12075 "generated": {
12076 "schema.rs": "// Auto-generated schema",
12077 }
12078 }),
12079 )
12080 .await;
12081
12082 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12083
12084 // Open a regular file - should be read-write
12085 let regular_buffer = project
12086 .update(cx, |project, cx| {
12087 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12088 })
12089 .await
12090 .unwrap();
12091
12092 regular_buffer.read_with(cx, |buffer, _| {
12093 assert!(!buffer.read_only(), "Regular file should not be read-only");
12094 });
12095
12096 // Open a file matching *.gen.rs pattern - should be read-only
12097 let gen_buffer = project
12098 .update(cx, |project, cx| {
12099 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12100 })
12101 .await
12102 .unwrap();
12103
12104 gen_buffer.read_with(cx, |buffer, _| {
12105 assert!(
12106 buffer.read_only(),
12107 "File matching *.gen.rs pattern should be read-only"
12108 );
12109 });
12110
12111 // Open a file in generated directory - should be read-only
12112 let generated_buffer = project
12113 .update(cx, |project, cx| {
12114 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12115 })
12116 .await
12117 .unwrap();
12118
12119 generated_buffer.read_with(cx, |buffer, _| {
12120 assert!(
12121 buffer.read_only(),
12122 "File in generated directory should be read-only"
12123 );
12124 });
12125}
12126
12127#[gpui::test]
12128async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12129 init_test(cx);
12130
12131 // Explicitly set read_only_files to empty (default behavior)
12132 cx.update(|cx| {
12133 cx.update_global::<SettingsStore, _>(|store, cx| {
12134 store.update_user_settings(cx, |settings| {
12135 settings.project.worktree.read_only_files = Some(vec![]);
12136 });
12137 });
12138 });
12139
12140 let fs = FakeFs::new(cx.background_executor.clone());
12141 fs.insert_tree(
12142 path!("/root"),
12143 json!({
12144 "src": {
12145 "main.rs": "fn main() {}",
12146 },
12147 "generated": {
12148 "schema.rs": "// Auto-generated schema",
12149 }
12150 }),
12151 )
12152 .await;
12153
12154 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12155
12156 // All files should be read-write when read_only_files is empty
12157 let main_buffer = project
12158 .update(cx, |project, cx| {
12159 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12160 })
12161 .await
12162 .unwrap();
12163
12164 main_buffer.read_with(cx, |buffer, _| {
12165 assert!(
12166 !buffer.read_only(),
12167 "Files should not be read-only when read_only_files is empty"
12168 );
12169 });
12170
12171 let generated_buffer = project
12172 .update(cx, |project, cx| {
12173 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12174 })
12175 .await
12176 .unwrap();
12177
12178 generated_buffer.read_with(cx, |buffer, _| {
12179 assert!(
12180 !buffer.read_only(),
12181 "Generated files should not be read-only when read_only_files is empty"
12182 );
12183 });
12184}
12185
12186#[gpui::test]
12187async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12188 init_test(cx);
12189
12190 // Configure to make lock files read-only
12191 cx.update(|cx| {
12192 cx.update_global::<SettingsStore, _>(|store, cx| {
12193 store.update_user_settings(cx, |settings| {
12194 settings.project.worktree.read_only_files = Some(vec![
12195 "**/*.lock".to_string(),
12196 "**/package-lock.json".to_string(),
12197 ]);
12198 });
12199 });
12200 });
12201
12202 let fs = FakeFs::new(cx.background_executor.clone());
12203 fs.insert_tree(
12204 path!("/root"),
12205 json!({
12206 "Cargo.lock": "# Lock file",
12207 "Cargo.toml": "[package]",
12208 "package-lock.json": "{}",
12209 "package.json": "{}",
12210 }),
12211 )
12212 .await;
12213
12214 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12215
12216 // Cargo.lock should be read-only
12217 let cargo_lock = project
12218 .update(cx, |project, cx| {
12219 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12220 })
12221 .await
12222 .unwrap();
12223
12224 cargo_lock.read_with(cx, |buffer, _| {
12225 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12226 });
12227
12228 // Cargo.toml should be read-write
12229 let cargo_toml = project
12230 .update(cx, |project, cx| {
12231 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12232 })
12233 .await
12234 .unwrap();
12235
12236 cargo_toml.read_with(cx, |buffer, _| {
12237 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12238 });
12239
12240 // package-lock.json should be read-only
12241 let package_lock = project
12242 .update(cx, |project, cx| {
12243 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12244 })
12245 .await
12246 .unwrap();
12247
12248 package_lock.read_with(cx, |buffer, _| {
12249 assert!(buffer.read_only(), "package-lock.json should be read-only");
12250 });
12251
12252 // package.json should be read-write
12253 let package_json = project
12254 .update(cx, |project, cx| {
12255 project.open_local_buffer(path!("/root/package.json"), cx)
12256 })
12257 .await
12258 .unwrap();
12259
12260 package_json.read_with(cx, |buffer, _| {
12261 assert!(!buffer.read_only(), "package.json should not be read-only");
12262 });
12263}
12264
12265mod disable_ai_settings_tests {
12266 use gpui::TestAppContext;
12267 use project::*;
12268 use settings::{Settings, SettingsStore};
12269
12270 #[gpui::test]
12271 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12272 cx.update(|cx| {
12273 settings::init(cx);
12274
12275 // Test 1: Default is false (AI enabled)
12276 assert!(
12277 !DisableAiSettings::get_global(cx).disable_ai,
12278 "Default should allow AI"
12279 );
12280 });
12281
12282 let disable_true = serde_json::json!({
12283 "disable_ai": true
12284 })
12285 .to_string();
12286 let disable_false = serde_json::json!({
12287 "disable_ai": false
12288 })
12289 .to_string();
12290
12291 cx.update_global::<SettingsStore, _>(|store, cx| {
12292 store.set_user_settings(&disable_false, cx).unwrap();
12293 store.set_global_settings(&disable_true, cx).unwrap();
12294 });
12295 cx.update(|cx| {
12296 assert!(
12297 DisableAiSettings::get_global(cx).disable_ai,
12298 "Local false cannot override global true"
12299 );
12300 });
12301
12302 cx.update_global::<SettingsStore, _>(|store, cx| {
12303 store.set_global_settings(&disable_false, cx).unwrap();
12304 store.set_user_settings(&disable_true, cx).unwrap();
12305 });
12306
12307 cx.update(|cx| {
12308 assert!(
12309 DisableAiSettings::get_global(cx).disable_ai,
12310 "Local false cannot override global true"
12311 );
12312 });
12313 }
12314
12315 #[gpui::test]
12316 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12317 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12318 use worktree::WorktreeId;
12319
12320 cx.update(|cx| {
12321 settings::init(cx);
12322
12323 // Default should allow AI
12324 assert!(
12325 !DisableAiSettings::get_global(cx).disable_ai,
12326 "Default should allow AI"
12327 );
12328 });
12329
12330 let worktree_id = WorktreeId::from_usize(1);
12331 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12332 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12333 };
12334 let project_path = rel_path("project");
12335 let settings_location = SettingsLocation {
12336 worktree_id,
12337 path: project_path.as_ref(),
12338 };
12339
12340 // Test: Project-level disable_ai=true should disable AI for files in that project
12341 cx.update_global::<SettingsStore, _>(|store, cx| {
12342 store
12343 .set_local_settings(
12344 worktree_id,
12345 LocalSettingsPath::InWorktree(project_path.clone()),
12346 LocalSettingsKind::Settings,
12347 Some(r#"{ "disable_ai": true }"#),
12348 cx,
12349 )
12350 .unwrap();
12351 });
12352
12353 cx.update(|cx| {
12354 let settings = DisableAiSettings::get(Some(settings_location), cx);
12355 assert!(
12356 settings.disable_ai,
12357 "Project-level disable_ai=true should disable AI for files in that project"
12358 );
12359 // Global should now also be true since project-level disable_ai is merged into global
12360 assert!(
12361 DisableAiSettings::get_global(cx).disable_ai,
12362 "Global setting should be affected by project-level disable_ai=true"
12363 );
12364 });
12365
12366 // Test: Setting project-level to false should allow AI for that project
12367 cx.update_global::<SettingsStore, _>(|store, cx| {
12368 store
12369 .set_local_settings(
12370 worktree_id,
12371 LocalSettingsPath::InWorktree(project_path.clone()),
12372 LocalSettingsKind::Settings,
12373 Some(r#"{ "disable_ai": false }"#),
12374 cx,
12375 )
12376 .unwrap();
12377 });
12378
12379 cx.update(|cx| {
12380 let settings = DisableAiSettings::get(Some(settings_location), cx);
12381 assert!(
12382 !settings.disable_ai,
12383 "Project-level disable_ai=false should allow AI"
12384 );
12385 // Global should also be false now
12386 assert!(
12387 !DisableAiSettings::get_global(cx).disable_ai,
12388 "Global setting should be false when project-level is false"
12389 );
12390 });
12391
12392 // Test: User-level true + project-level false = AI disabled (saturation)
12393 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12394 cx.update_global::<SettingsStore, _>(|store, cx| {
12395 store.set_user_settings(&disable_true, cx).unwrap();
12396 store
12397 .set_local_settings(
12398 worktree_id,
12399 LocalSettingsPath::InWorktree(project_path.clone()),
12400 LocalSettingsKind::Settings,
12401 Some(r#"{ "disable_ai": false }"#),
12402 cx,
12403 )
12404 .unwrap();
12405 });
12406
12407 cx.update(|cx| {
12408 let settings = DisableAiSettings::get(Some(settings_location), cx);
12409 assert!(
12410 settings.disable_ai,
12411 "Project-level false cannot override user-level true (SaturatingBool)"
12412 );
12413 });
12414 }
12415}